diff --git a/.dockerignore b/.dockerignore index c32286be6..eababae16 100644 --- a/.dockerignore +++ b/.dockerignore @@ -47,3 +47,5 @@ contracts/.git !etc/env/consensus_secrets.yaml !etc/env/consensus_config.yaml !rust-toolchain + +!via_verifier \ No newline at end of file diff --git a/.gitignore b/.gitignore index a05869d89..21679c61a 100644 --- a/.gitignore +++ b/.gitignore @@ -36,6 +36,7 @@ Cargo.lock /etc/env/.current /etc/env/configs/* !/etc/env/configs/dev.toml +!/etc/env/configs/via*.toml !/etc/env/configs/dev_validium.toml !/etc/env/configs/dev_validium_docker.toml !/etc/env/configs/ext-node.toml @@ -117,3 +118,7 @@ configs/* era-observability/ core/tests/ts-integration/deployments-zk .env + +*.via + +celestia-keys/* \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index 8ec15bfcb..3c352b775 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,4 @@ [submodule "contracts"] path = contracts url = https://github.com/vianetwork/era-contracts.git +branch = devnet-2 diff --git a/.prettierignore b/.prettierignore index d58a7f3e8..6e1731a85 100644 --- a/.prettierignore +++ b/.prettierignore @@ -3,7 +3,8 @@ bellman-cuda sdk/zksync-rs/CHANGELOG.md CHANGELOG.md core/lib/dal/.sqlx -prover/lib/dal/.sqlx +prover/lib/dal/. +verifier/lib/verifier_dal/.sqlx node_modules # Ignore contract submodules diff --git a/Cargo.lock b/Cargo.lock index 071f8d160..cb1164f74 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,16 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + [[package]] name = "addchain" version = "0.2.0" @@ -41,7 +51,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" dependencies = [ "crypto-common", - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -109,9 +119,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-rlp" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26154390b1d205a4a7ac7352aa2eb4f81f391399d4e2f546fb81a2f8bb383f62" +checksum = "da0822426598f95e45dd1ea32a738dac057529a709ee645fcc516ffa4cbde08f" dependencies = [ "arrayvec 0.7.6", "bytes", @@ -404,15 +414,26 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.81" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2 1.0.86", "quote 1.0.37", "syn 2.0.76", ] +[[package]] +name = "async_io_stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" +dependencies = [ + "futures 0.3.30", + "pharos", + "rustc_version 0.4.0", +] + [[package]] name = "atoi" version = "2.0.0" @@ -439,6 +460,18 @@ dependencies = [ "winapi", ] +[[package]] +name = "auto_impl" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7862e21c893d65a1650125d157eaeec691439379a1cee17ee49031b79236ada4" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + [[package]] name = "auto_impl" version = "1.2.0" @@ -485,12 +518,44 @@ dependencies = [ [[package]] name = "axum" -version = "0.7.5" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core 0.3.4", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.30", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper 0.1.2", + "tokio", + "tower 0.4.13", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", - "axum-core", + "axum-core 0.4.5", "bytes", "futures-util", "http 1.1.0", @@ -511,7 +576,7 @@ dependencies = [ "serde_urlencoded", "sync_wrapper 1.0.1", "tokio", - "tower", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", @@ -519,9 +584,26 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.4.3" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" dependencies = [ "async-trait", "bytes", @@ -532,7 +614,7 @@ dependencies = [ "mime", "pin-project-lite", "rustversion", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.1", "tower-layer", "tower-service", "tracing", @@ -583,6 +665,22 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" +[[package]] +name = "base58" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5024ee8015f02155eee35c711107ddd9a9bf3cb689cf2a9089c97e79b6e1ae83" + +[[package]] +name = "base58check" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ee2fe4c9a0c84515f136aaae2466744a721af6d63339c18689d9e995d74d99b" +dependencies = [ + "base58", + "sha2 0.8.2", +] + [[package]] name = "base58ck" version = "0.1.0" @@ -593,6 +691,12 @@ dependencies = [ "bitcoin_hashes", ] +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + [[package]] name = "base64" version = "0.13.1" @@ -617,6 +721,12 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bech32" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dabbe35f96fb9507f7330793dc490461b2962659ac5d427181e451a623751d1" + [[package]] name = "bech32" version = "0.11.0" @@ -762,7 +872,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea507acc1cd80fc084ace38544bbcf7ced7c2aa65b653b102de0ce718df668f6" dependencies = [ "base58ck", - "bech32", + "bech32 0.11.0", "bitcoin-internals", "bitcoin-io", "bitcoin-units", @@ -857,6 +967,16 @@ dependencies = [ "typenum", ] +[[package]] +name = "bitvec" +version = "0.17.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41262f11d771fd4a61aa3ce019fca363b4b6c282fca9da2a31186d3965a47a5c" +dependencies = [ + "either", + "radium 0.3.0", +] + [[package]] name = "bitvec" version = "1.0.1" @@ -864,7 +984,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" dependencies = [ "funty", - "radium", + "radium 0.7.0", "tap", "wyz", ] @@ -877,7 +997,7 @@ checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" dependencies = [ "crypto-mac", "digest 0.9.0", - "opaque-debug", + "opaque-debug 0.3.1", ] [[package]] @@ -942,14 +1062,26 @@ dependencies = [ "constant_time_eq", ] +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding 0.1.5", + "byte-tools", + "byteorder", + "generic-array 0.12.4", +] + [[package]] name = "block-buffer" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "block-padding", - "generic-array", + "block-padding 0.2.1", + "generic-array 0.14.7", ] [[package]] @@ -958,7 +1090,16 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ - "generic-array", + "generic-array 0.14.7", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", ] [[package]] @@ -1041,6 +1182,37 @@ dependencies = [ "unroll", ] +[[package]] +name = "boojum" +version = "0.30.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52c986d4816d76dddb81594b94a59d2c2978a8a5e5695d6de1fdc3edd833e026" +dependencies = [ + "arrayvec 0.7.6", + "bincode", + "blake2 0.10.6", + "const_format", + "convert_case", + "crossbeam 0.8.4", + "crypto-bigint 0.5.5", + "derivative", + "ethereum-types", + "firestorm", + "itertools 0.10.5", + "lazy_static", + "num-modular", + "num_cpus", + "rand 0.8.5", + "rayon", + "serde", + "sha2 0.10.8", + "sha3_ce", + "smallvec", + "unroll", + "zksync_cs_derive", + "zksync_pairing", +] + [[package]] name = "borsh" version = "1.5.1" @@ -1065,6 +1237,12 @@ dependencies = [ "syn_derive", ] +[[package]] +name = "bs58" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" + [[package]] name = "bs58" version = "0.5.1" @@ -1092,6 +1270,12 @@ version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + [[package]] name = "bytecheck" version = "0.6.12" @@ -1183,6 +1367,20 @@ dependencies = [ "serde_json", ] +[[package]] +name = "cargo_metadata" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a" +dependencies = [ + "camino", + "cargo-platform", + "semver 1.0.23", + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "cast" version = "0.3.0" @@ -1280,7 +1478,7 @@ version = "0.4.0" source = "git+https://github.com/eigerco/lumina.git#f11959550851afb8c2062b30e38cc87242467cdc" dependencies = [ "base64 0.22.1", - "bech32", + "bech32 0.11.0", "blockstore", "bytes", "celestia-proto", @@ -1424,6 +1622,19 @@ dependencies = [ "zeroize", ] +[[package]] +name = "circuit_definitions" +version = "0.150.16" +source = "git+https://github.com/matter-labs/zksync-protocol.git?tag=v0.150.16#fb09cb3bd949af68572d98886905f55e9ae77eaf" +dependencies = [ + "circuit_encodings 0.150.16", + "crossbeam 0.8.4", + "derivative", + "seq-macro", + "serde", + "snark_wrapper", +] + [[package]] name = "circuit_encodings" version = "0.140.1" @@ -1472,6 +1683,17 @@ dependencies = [ "zkevm_circuits 0.150.4", ] +[[package]] +name = "circuit_encodings" +version = "0.150.16" +source = "git+https://github.com/matter-labs/zksync-protocol.git?tag=v0.150.16#fb09cb3bd949af68572d98886905f55e9ae77eaf" +dependencies = [ + "derivative", + "serde", + "zk_evm 0.150.16", + "zkevm_circuits 0.150.16", +] + [[package]] name = "circuit_sequencer_api" version = "0.133.0" @@ -1645,6 +1867,63 @@ dependencies = [ "indexmap 1.9.3", ] +[[package]] +name = "coins-bip32" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634c509653de24b439672164bbf56f5f582a2ab0e313d3b0f6af0b7345cf2560" +dependencies = [ + "bincode", + "bs58 0.4.0", + "coins-core", + "digest 0.10.7", + "getrandom", + "hmac", + "k256 0.11.6", + "lazy_static", + "serde", + "sha2 0.10.8", + "thiserror", +] + +[[package]] +name = "coins-bip39" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a11892bcac83b4c6e95ab84b5b06c76d9d70ad73548dd07418269c5c7977171" +dependencies = [ + "bitvec 0.17.4", + "coins-bip32", + "getrandom", + "hex", + "hmac", + "pbkdf2", + "rand 0.8.5", + "sha2 0.10.8", + "thiserror", +] + +[[package]] +name = "coins-core" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c94090a6663f224feae66ab01e41a2555a8296ee07b5f20dab8888bdefc9f617" +dependencies = [ + "base58check", + "base64 0.12.3", + "bech32 0.7.3", + "blake2 0.10.6", + "digest 0.10.7", + "generic-array 0.14.7", + "hex", + "ripemd", + "serde", + "serde_derive", + "sha2 0.10.8", + "sha3 0.10.8", + "thiserror", +] + [[package]] name = "colorchoice" version = "1.0.2" @@ -1961,7 +2240,7 @@ dependencies = [ "crossterm_winapi", "libc", "mio 0.8.11", - "parking_lot", + "parking_lot 0.12.3", "signal-hook", "signal-hook-mio", "winapi", @@ -1988,7 +2267,7 @@ version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ - "generic-array", + "generic-array 0.14.7", "rand_core 0.6.4", "subtle", "zeroize", @@ -2000,7 +2279,7 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ - "generic-array", + "generic-array 0.14.7", "rand_core 0.6.4", "subtle", "zeroize", @@ -2012,7 +2291,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array", + "generic-array 0.14.7", "rand_core 0.6.4", "typenum", ] @@ -2023,7 +2302,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ - "generic-array", + "generic-array 0.14.7", "subtle", ] @@ -2153,7 +2432,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.10", ] [[package]] @@ -2167,7 +2446,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.10", ] [[package]] @@ -2203,7 +2482,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" dependencies = [ "serde", - "uuid", + "uuid 1.10.0", ] [[package]] @@ -2248,6 +2527,17 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive_more" +version = "0.99.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.76", +] + [[package]] name = "derive_more" version = "1.0.0-beta.6" @@ -2275,13 +2565,22 @@ version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array 0.12.4", +] + [[package]] name = "digest" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -2296,6 +2595,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "dotenv" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" + [[package]] name = "dotenvy" version = "0.15.7" @@ -2410,7 +2715,7 @@ dependencies = [ "der 0.6.1", "digest 0.10.7", "ff 0.12.1", - "generic-array", + "generic-array 0.14.7", "group 0.12.1", "pkcs8 0.9.0", "rand_core 0.6.4", @@ -2429,7 +2734,7 @@ dependencies = [ "crypto-bigint 0.5.5", "digest 0.10.7", "ff 0.13.0", - "generic-array", + "generic-array 0.14.7", "group 0.13.0", "pem-rfc7468", "pkcs8 0.10.2", @@ -2542,15 +2847,37 @@ dependencies = [ ] [[package]] -name = "ethabi" -version = "18.0.0" +name = "eth-keystore" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" +checksum = "1fda3bf123be441da5260717e0661c25a2fd9cb2b2c1d20bf2e05580047158ab" dependencies = [ - "ethereum-types", + "aes", + "ctr", + "digest 0.10.7", "hex", - "once_cell", - "regex", + "hmac", + "pbkdf2", + "rand 0.8.5", + "scrypt", + "serde", + "serde_json", + "sha2 0.10.8", + "sha3 0.10.8", + "thiserror", + "uuid 0.8.2", +] + +[[package]] +name = "ethabi" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" +dependencies = [ + "ethereum-types", + "hex", + "once_cell", + "regex", "serde", "serde_json", "sha3 0.10.8", @@ -2566,8 +2893,10 @@ checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" dependencies = [ "crunchy", "fixed-hash", + "impl-codec", "impl-rlp", "impl-serde", + "scale-info", "tiny-keccak 2.0.2", ] @@ -2579,12 +2908,227 @@ checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" dependencies = [ "ethbloom", "fixed-hash", + "impl-codec", "impl-rlp", "impl-serde", "primitive-types", + "scale-info", "uint", ] +[[package]] +name = "ethers" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11f26f9d8d80da18ca72aca51804c65eb2153093af3bec74fd5ce32aa0c1f665" +dependencies = [ + "ethers-addressbook", + "ethers-contract", + "ethers-core", + "ethers-etherscan", + "ethers-middleware", + "ethers-providers", + "ethers-signers", +] + +[[package]] +name = "ethers-addressbook" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4be54dd2260945d784e06ccdeb5ad573e8f1541838cee13a1ab885485eaa0b" +dependencies = [ + "ethers-core", + "once_cell", + "serde", + "serde_json", +] + +[[package]] +name = "ethers-contract" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9c3c3e119a89f0a9a1e539e7faecea815f74ddcf7c90d0b00d1f524db2fdc9c" +dependencies = [ + "ethers-contract-abigen", + "ethers-contract-derive", + "ethers-core", + "ethers-providers", + "futures-util", + "hex", + "once_cell", + "pin-project", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "ethers-contract-abigen" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d4e5ad46aede34901f71afdb7bb555710ed9613d88d644245c657dc371aa228" +dependencies = [ + "Inflector", + "cfg-if 1.0.0", + "dunce", + "ethers-core", + "eyre", + "getrandom", + "hex", + "proc-macro2 1.0.86", + "quote 1.0.37", + "regex", + "reqwest 0.11.27", + "serde", + "serde_json", + "syn 1.0.109", + "toml 0.5.11", + "url", + "walkdir", +] + +[[package]] +name = "ethers-contract-derive" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f192e8e4cf2b038318aae01e94e7644e0659a76219e94bcd3203df744341d61f" +dependencies = [ + "ethers-contract-abigen", + "ethers-core", + "hex", + "proc-macro2 1.0.86", + "quote 1.0.37", + "serde_json", + "syn 1.0.109", +] + +[[package]] +name = "ethers-core" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade3e9c97727343984e1ceada4fdab11142d2ee3472d2c67027d56b1251d4f15" +dependencies = [ + "arrayvec 0.7.6", + "bytes", + "cargo_metadata 0.15.4", + "chrono", + "convert_case", + "elliptic-curve 0.12.3", + "ethabi", + "generic-array 0.14.7", + "hex", + "k256 0.11.6", + "once_cell", + "open-fastrlp", + "proc-macro2 1.0.86", + "rand 0.8.5", + "rlp", + "rlp-derive", + "serde", + "serde_json", + "strum 0.24.1", + "syn 1.0.109", + "thiserror", + "tiny-keccak 2.0.2", + "unicode-xid 0.2.5", +] + +[[package]] +name = "ethers-etherscan" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9713f525348e5dde025d09b0a4217429f8074e8ff22c886263cc191e87d8216" +dependencies = [ + "ethers-core", + "getrandom", + "reqwest 0.11.27", + "semver 1.0.23", + "serde", + "serde-aux", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "ethers-middleware" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e71df7391b0a9a51208ffb5c7f2d068900e99d6b3128d3a4849d138f194778b7" +dependencies = [ + "async-trait", + "auto_impl 0.5.0", + "ethers-contract", + "ethers-core", + "ethers-etherscan", + "ethers-providers", + "ethers-signers", + "futures-locks", + "futures-util", + "instant", + "reqwest 0.11.27", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", + "tracing-futures", + "url", +] + +[[package]] +name = "ethers-providers" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1a9e0597aa6b2fdc810ff58bc95e4eeaa2c219b3e615ed025106ecb027407d8" +dependencies = [ + "async-trait", + "auto_impl 1.2.0", + "base64 0.13.1", + "ethers-core", + "futures-core", + "futures-timer", + "futures-util", + "getrandom", + "hashers", + "hex", + "http 0.2.12", + "once_cell", + "parking_lot 0.11.2", + "pin-project", + "reqwest 0.11.27", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", + "tracing-futures", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-timer", + "web-sys", + "ws_stream_wasm", +] + +[[package]] +name = "ethers-signers" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f41ced186867f64773db2e55ffdd92959e094072a1d09a5e5e831d443204f98" +dependencies = [ + "async-trait", + "coins-bip32", + "coins-bip39", + "elliptic-curve 0.12.3", + "eth-keystore", + "ethers-core", + "hex", + "rand 0.8.5", + "sha2 0.10.8", + "thiserror", +] + [[package]] name = "event-listener" version = "5.3.1" @@ -2606,6 +3150,12 @@ dependencies = [ "once_cell", ] +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + [[package]] name = "fastrand" version = "2.1.1" @@ -2619,7 +3169,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" dependencies = [ "arrayvec 0.7.6", - "auto_impl", + "auto_impl 1.2.0", "bytes", ] @@ -2821,6 +3371,39 @@ dependencies = [ "tiny-keccak 1.5.0", ] +[[package]] +name = "franklin-crypto" +version = "0.30.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09876c8a2d5e706a8669f69d7db592ca9c48257cbfc4917fe32daf419a8ea78e" +dependencies = [ + "arr_macro", + "bit-vec", + "blake2 0.9.2", + "blake2-rfc_bellman_edition", + "blake2s_simd", + "boojum 0.30.9", + "byteorder", + "derivative", + "digest 0.9.0", + "hex", + "indexmap 1.9.3", + "itertools 0.10.5", + "lazy_static", + "num-bigint 0.4.6", + "num-derive 0.2.5", + "num-integer", + "num-traits", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "sha3 0.9.1", + "smallvec", + "splitmut", + "tiny-keccak 1.5.0", + "zksync_bellman", +] + [[package]] name = "fs_extra" version = "1.3.0" @@ -2896,7 +3479,7 @@ checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" dependencies = [ "futures-core", "lock_api", - "parking_lot", + "parking_lot 0.12.3", ] [[package]] @@ -2905,6 +3488,16 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +[[package]] +name = "futures-locks" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ec6fe3675af967e67c5536c0b9d44e34e6c52f86bedc4ea49c5317b8e94d06" +dependencies = [ + "futures-channel", + "futures-task", +] + [[package]] name = "futures-macro" version = "0.3.30" @@ -2935,7 +3528,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" dependencies = [ "gloo-timers", - "send_wrapper", + "send_wrapper 0.4.0", ] [[package]] @@ -2975,6 +3568,15 @@ dependencies = [ "byteorder", ] +[[package]] +name = "generic-array" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +dependencies = [ + "typenum", +] + [[package]] name = "generic-array" version = "0.14.7" @@ -3029,7 +3631,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" dependencies = [ - "opaque-debug", + "opaque-debug 0.3.1", "polyval", ] @@ -3177,7 +3779,7 @@ dependencies = [ "futures-timer", "no-std-compat", "nonzero_ext", - "parking_lot", + "parking_lot 0.12.3", "quanta", "rand 0.8.5", "smallvec", @@ -3217,7 +3819,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.4.0", + "indexmap 2.6.0", "slab", "tokio", "tokio-util", @@ -3236,7 +3838,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.1.0", - "indexmap 2.4.0", + "indexmap 2.6.0", "slab", "tokio", "tokio-util", @@ -3286,6 +3888,21 @@ dependencies = [ "allocator-api2", ] +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "hashers" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2bca93b15ea5a746f220e56587f71e73c6165eab783df9e26590069953e3c30" +dependencies = [ + "fxhash", +] + [[package]] name = "hashlink" version = "0.9.1" @@ -3304,6 +3921,12 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + [[package]] name = "heck" version = "0.5.0" @@ -3497,6 +4120,20 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.30", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-rustls" version = "0.27.2" @@ -3508,10 +4145,10 @@ dependencies = [ "hyper 1.4.1", "hyper-util", "log", - "rustls", + "rustls 0.23.12", "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "tower-service", ] @@ -3572,7 +4209,7 @@ dependencies = [ "pin-project-lite", "socket2", "tokio", - "tower", + "tower 0.4.13", "tower-service", "tracing", ] @@ -3692,12 +4329,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.4.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.2", ] [[package]] @@ -3706,7 +4343,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -3739,6 +4376,18 @@ dependencies = [ "similar", ] +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "ipnet" version = "2.9.0" @@ -3879,13 +4528,13 @@ dependencies = [ "http 1.1.0", "jsonrpsee-core 0.23.2", "pin-project", - "rustls", + "rustls 0.23.12", "rustls-pki-types", "rustls-platform-verifier", "soketto", "thiserror", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "tokio-util", "tracing", "url", @@ -3902,13 +4551,13 @@ dependencies = [ "http 1.1.0", "jsonrpsee-core 0.24.3", "pin-project", - "rustls", + "rustls 0.23.12", "rustls-pki-types", "rustls-platform-verifier", "soketto", "thiserror", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "tokio-util", "tracing", "url", @@ -3930,7 +4579,7 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "jsonrpsee-types 0.23.2", - "parking_lot", + "parking_lot 0.12.3", "pin-project", "rand 0.8.5", "rustc-hash 1.1.0", @@ -3977,17 +4626,17 @@ dependencies = [ "base64 0.22.1", "http-body 1.0.1", "hyper 1.4.1", - "hyper-rustls", + "hyper-rustls 0.27.2", "hyper-util", "jsonrpsee-core 0.23.2", "jsonrpsee-types 0.23.2", - "rustls", + "rustls 0.23.12", "rustls-platform-verifier", "serde", "serde_json", "thiserror", "tokio", - "tower", + "tower 0.4.13", "tracing", "url", ] @@ -4002,17 +4651,17 @@ dependencies = [ "base64 0.22.1", "http-body 1.0.1", "hyper 1.4.1", - "hyper-rustls", + "hyper-rustls 0.27.2", "hyper-util", "jsonrpsee-core 0.24.3", "jsonrpsee-types 0.24.3", - "rustls", + "rustls 0.23.12", "rustls-platform-verifier", "serde", "serde_json", "thiserror", "tokio", - "tower", + "tower 0.4.13", "tracing", "url", ] @@ -4067,7 +4716,7 @@ dependencies = [ "tokio", "tokio-stream", "tokio-util", - "tower", + "tower 0.4.13", "tracing", ] @@ -4158,6 +4807,7 @@ dependencies = [ "ecdsa 0.14.8", "elliptic-curve 0.12.3", "sha2 0.10.8", + "sha3 0.10.8", ] [[package]] @@ -4228,7 +4878,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if 1.0.0", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -4243,7 +4893,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55cca1eb2bc1fd29f099f3daaab7effd01e1a54b7c577d0ed082521034d912e8" dependencies = [ - "bs58", + "bs58 0.5.1", "hkdf", "multihash", "quick-protobuf", @@ -4665,6 +5315,21 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" +[[package]] +name = "musig2" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1eaa74fd6a0747bd589b36abce26cd4cd1cd2abedb2fcba26c4eed694dd85487" +dependencies = [ + "base16ct 0.2.0", + "hmac", + "once_cell", + "secp", + "secp256k1 0.30.0", + "sha2 0.10.8", + "subtle", +] + [[package]] name = "native-tls" version = "0.2.12" @@ -4969,12 +5634,43 @@ version = "11.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + [[package]] name = "opaque-debug" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "open-fastrlp" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" +dependencies = [ + "arrayvec 0.7.6", + "auto_impl 1.2.0", + "bytes", + "ethereum-types", + "open-fastrlp-derive", +] + +[[package]] +name = "open-fastrlp-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" +dependencies = [ + "bytes", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + [[package]] name = "openssl" version = "0.10.66" @@ -5181,7 +5877,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ "arrayvec 0.7.6", - "bitvec", + "bitvec 1.0.1", "byte-slice-cast", "impl-trait-for-tuples", "parity-scale-codec-derive", @@ -5206,6 +5902,17 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + [[package]] name = "parking_lot" version = "0.12.3" @@ -5213,7 +5920,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", - "parking_lot_core", + "parking_lot_core 0.9.10", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", ] [[package]] @@ -5229,12 +5950,35 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "password-hash" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" +dependencies = [ + "base64ct", + "rand_core 0.6.4", + "subtle", +] + [[package]] name = "paste" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest 0.10.7", + "hmac", + "password-hash", + "sha2 0.10.8", +] + [[package]] name = "peeking_take_while" version = "0.1.2" @@ -5318,7 +6062,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.4.0", + "indexmap 2.6.0", +] + +[[package]] +name = "pharos" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" +dependencies = [ + "futures 0.3.30", + "rustc_version 0.4.0", ] [[package]] @@ -5425,7 +6179,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" dependencies = [ "cpufeatures", - "opaque-debug", + "opaque-debug 0.3.1", "universal-hash", ] @@ -5437,7 +6191,7 @@ checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "opaque-debug", + "opaque-debug 0.3.1", "universal-hash", ] @@ -5521,6 +6275,7 @@ dependencies = [ "impl-codec", "impl-rlp", "impl-serde", + "scale-info", "uint", ] @@ -5599,7 +6354,7 @@ checksum = "504ee9ff529add891127c4827eb481bd69dc0ebc72e9a682e187db4caa60c3ca" dependencies = [ "dtoa", "itoa", - "parking_lot", + "parking_lot 0.12.3", "prometheus-client-derive-encode", ] @@ -5829,6 +6584,12 @@ dependencies = [ "proc-macro2 1.0.86", ] +[[package]] +name = "radium" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "def50a86306165861203e7f84ecffbbdfdea79f0e51039b33de1e952358c47ac" + [[package]] name = "radium" version = "0.7.0" @@ -5949,6 +6710,15 @@ dependencies = [ "rand_core 0.3.1", ] +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "redox_syscall" version = "0.4.1" @@ -6035,6 +6805,7 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.30", + "hyper-rustls 0.24.2", "hyper-tls 0.5.0", "ipnet", "js-sys", @@ -6044,6 +6815,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "rustls 0.21.12", "rustls-pemfile 1.0.4", "serde", "serde_json", @@ -6052,11 +6824,13 @@ dependencies = [ "system-configuration 0.5.1", "tokio", "tokio-native-tls", + "tokio-rustls 0.24.1", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", + "webpki-roots 0.25.4", "winreg", ] @@ -6077,7 +6851,7 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.4.1", - "hyper-rustls", + "hyper-rustls 0.27.2", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -6132,7 +6906,31 @@ dependencies = [ "arrayvec 0.7.6", "blake2 0.10.6", "byteorder", - "franklin-crypto", + "franklin-crypto 0.1.0", + "num-bigint 0.3.3", + "num-integer", + "num-iter", + "num-traits", + "rand 0.4.6", + "serde", + "sha3 0.9.1", + "smallvec", +] + +[[package]] +name = "rescue_poseidon" +version = "0.30.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "def7c91dcd919a62ca93bff4d67f710f8ae099e2639458fc5e3862f5812491a3" +dependencies = [ + "addchain", + "arrayvec 0.7.6", + "blake2 0.10.6", + "byteorder", + "derivative", + "franklin-crypto 0.30.9", + "lazy_static", + "log", "num-bigint 0.3.3", "num-integer", "num-iter", @@ -6141,6 +6939,7 @@ dependencies = [ "serde", "sha3 0.9.1", "smallvec", + "typemap_rev", ] [[package]] @@ -6179,13 +6978,22 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "ripemd" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" +dependencies = [ + "digest 0.10.7", +] + [[package]] name = "rkyv" version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" dependencies = [ - "bitvec", + "bitvec 1.0.1", "bytecheck", "bytes", "hashbrown 0.12.3", @@ -6194,7 +7002,7 @@ dependencies = [ "rkyv_derive", "seahash", "tinyvec", - "uuid", + "uuid 1.10.0", ] [[package]] @@ -6218,6 +7026,17 @@ dependencies = [ "rustc-hex", ] +[[package]] +name = "rlp-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + [[package]] name = "rocksdb" version = "0.21.0" @@ -6355,6 +7174,18 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.7", + "sct", +] + [[package]] name = "rustls" version = "0.23.12" @@ -6366,7 +7197,7 @@ dependencies = [ "once_cell", "ring", "rustls-pki-types", - "rustls-webpki", + "rustls-webpki 0.102.7", "subtle", "zeroize", ] @@ -6420,13 +7251,13 @@ dependencies = [ "jni", "log", "once_cell", - "rustls", + "rustls 0.23.12", "rustls-native-certs", "rustls-platform-verifier-android", - "rustls-webpki", + "rustls-webpki 0.102.7", "security-framework", "security-framework-sys", - "webpki-roots", + "webpki-roots 0.26.3", "winapi", ] @@ -6436,6 +7267,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustls-webpki" version = "0.102.7" @@ -6460,6 +7301,15 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +[[package]] +name = "salsa20" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" +dependencies = [ + "cipher", +] + [[package]] name = "same-file" version = "1.0.6" @@ -6469,6 +7319,30 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "scale-info" +version = "2.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca070c12893629e2cc820a9761bedf6ce1dcddc9852984d1dc734b8bd9bd024" +dependencies = [ + "cfg-if 1.0.0", + "derive_more 0.99.18", + "parity-scale-codec", + "scale-info-derive", +] + +[[package]] +name = "scale-info-derive" +version = "2.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6630024bf739e2179b91fb424b28898baf819414262c5d376677dbff1fe7ebf" +dependencies = [ + "proc-macro-crate 3.2.0", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.76", +] + [[package]] name = "schannel" version = "0.1.23" @@ -6484,6 +7358,28 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "scrypt" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f9e24d2b632954ded8ab2ef9fea0a0c769ea56ea98bddbafbad22caeeadf45d" +dependencies = [ + "hmac", + "pbkdf2", + "salsa20", + "sha2 0.10.8", +] + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "seahash" version = "4.1.0" @@ -6498,7 +7394,7 @@ checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" dependencies = [ "base16ct 0.1.1", "der 0.6.1", - "generic-array", + "generic-array 0.14.7", "pkcs8 0.9.0", "subtle", "zeroize", @@ -6512,12 +7408,24 @@ checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ "base16ct 0.2.0", "der 0.7.9", - "generic-array", + "generic-array 0.14.7", "pkcs8 0.10.2", "subtle", "zeroize", ] +[[package]] +name = "secp" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85ed54b1141d8cec428d8a4abf01282755ba4e4c8a621dd23fa2e0ed761814c2" +dependencies = [ + "base16ct 0.2.0", + "once_cell", + "secp256k1 0.30.0", + "subtle", +] + [[package]] name = "secp256k1" version = "0.27.0" @@ -6539,6 +7447,17 @@ dependencies = [ "serde", ] +[[package]] +name = "secp256k1" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" +dependencies = [ + "bitcoin_hashes", + "rand 0.8.5", + "secp256k1-sys 0.10.0", +] + [[package]] name = "secp256k1-sys" version = "0.8.1" @@ -6623,6 +7542,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + [[package]] name = "sentry" version = "0.31.8" @@ -6728,7 +7653,7 @@ dependencies = [ "thiserror", "time", "url", - "uuid", + "uuid 1.10.0", ] [[package]] @@ -6746,6 +7671,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-aux" +version = "4.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d2e8bfba469d06512e11e3311d4d051a4a387a5b42d010404fecf3200321c95" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "serde-value" version = "0.7.0" @@ -6778,9 +7713,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.127" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", "memchr", @@ -6860,7 +7795,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.4.0", + "indexmap 2.6.0", "itoa", "ryu", "serde", @@ -6878,6 +7813,18 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "sha2" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" +dependencies = [ + "block-buffer 0.7.3", + "digest 0.8.1", + "fake-simd", + "opaque-debug 0.2.3", +] + [[package]] name = "sha2" version = "0.9.9" @@ -6888,7 +7835,7 @@ dependencies = [ "cfg-if 1.0.0", "cpufeatures", "digest 0.9.0", - "opaque-debug", + "opaque-debug 0.3.1", ] [[package]] @@ -6922,7 +7869,7 @@ dependencies = [ "block-buffer 0.9.0", "digest 0.9.0", "keccak", - "opaque-debug", + "opaque-debug 0.3.1", ] [[package]] @@ -7051,7 +7998,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8" dependencies = [ "bytecount", - "cargo_metadata", + "cargo_metadata 0.14.2", "error-chain", "glob", "pulldown-cmark", @@ -7097,6 +8044,18 @@ dependencies = [ "zksync_vlog", ] +[[package]] +name = "snark_wrapper" +version = "0.30.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "374c01372ffa80be5ae488fb54cf58ade53fd5279aa5a0340a44fc1da96d71a1" +dependencies = [ + "derivative", + "rand 0.4.6", + "rescue_poseidon 0.30.9", + "serde", +] + [[package]] name = "snow" version = "0.9.6" @@ -7220,7 +8179,7 @@ dependencies = [ "hashbrown 0.14.5", "hashlink", "hex", - "indexmap 2.4.0", + "indexmap 2.6.0", "ipnetwork", "log", "memchr", @@ -7301,7 +8260,7 @@ dependencies = [ "futures-core", "futures-io", "futures-util", - "generic-array", + "generic-array 0.14.7", "hex", "hkdf", "hmac", @@ -7457,13 +8416,35 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros 0.24.3", +] + [[package]] name = "strum" version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" dependencies = [ - "strum_macros", + "strum_macros 0.26.4", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.86", + "quote 1.0.37", + "rustversion", + "syn 1.0.109", ] [[package]] @@ -7856,20 +8837,20 @@ dependencies = [ "pin-project-lite", "thiserror", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", ] [[package]] name = "tokio" -version = "1.39.3" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9babc99b9923bfa4804bd74722ff02c0381021eafa4db9949217e3be8e84fff5" +checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" dependencies = [ "backtrace", "bytes", "libc", "mio 1.0.2", - "parking_lot", + "parking_lot 0.12.3", "pin-project-lite", "signal-hook-registry", "socket2", @@ -7900,19 +8881,29 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", - "rustls-pki-types", + "rustls 0.21.12", "tokio", ] [[package]] -name = "tokio-stream" -version = "0.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls 0.23.12", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", @@ -7935,6 +8926,15 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + [[package]] name = "toml" version = "0.8.19" @@ -7962,7 +8962,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.4.0", + "indexmap 2.6.0", "toml_datetime", "winnow 0.5.40", ] @@ -7973,7 +8973,7 @@ version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ - "indexmap 2.4.0", + "indexmap 2.6.0", "serde", "serde_spanned", "toml_datetime", @@ -7988,7 +8988,7 @@ checksum = "c6f6ba989e4b2c58ae83d862d3a3e27690b6e3ae630d0deb59f3697f32aa88ad" dependencies = [ "async-stream", "async-trait", - "axum", + "axum 0.7.9", "base64 0.22.1", "bytes", "h2 0.4.6", @@ -8004,7 +9004,7 @@ dependencies = [ "socket2", "tokio", "tokio-stream", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", "tracing", @@ -8030,6 +9030,22 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 1.0.1", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower-http" version = "0.5.2" @@ -8092,6 +9108,16 @@ dependencies = [ "valuable", ] +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + [[package]] name = "tracing-log" version = "0.2.0" @@ -8176,9 +9202,15 @@ dependencies = [ "serde_derive", "serde_json", "termcolor", - "toml", + "toml 0.8.19", ] +[[package]] +name = "typemap_rev" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74b08b0c1257381af16a5c3605254d529d3e7e109f3c62befc5d168968192998" + [[package]] name = "typenum" version = "1.17.0" @@ -8365,12 +9397,23 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom", + "serde", +] + [[package]] name = "uuid" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" dependencies = [ + "getrandom", "serde", ] @@ -8420,10 +9463,12 @@ dependencies = [ "bincode", "bitcoin", "bitcoincore-rpc", + "futures 0.3.30", "hex", "inquire", "lazy_static", "mockall", + "musig2", "rand 0.8.5", "reqwest 0.12.7", "secp256k1 0.29.0", @@ -8432,15 +9477,259 @@ dependencies = [ "thiserror", "tokio", "tracing", - "tracing-subscriber", - "zksync_basic_types", + "tracing-subscriber", + "zksync_basic_types", + "zksync_config", + "zksync_object_store", + "zksync_types", +] + +[[package]] +name = "via_btc_sender" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "bincode", + "bitcoin", + "chrono", + "hex", + "thiserror", + "tokio", + "tracing", + "via_btc_client", + "via_da_dispatcher", + "zksync_config", + "zksync_contracts", + "zksync_dal", + "zksync_l1_contract_interface", + "zksync_node_test_utils", + "zksync_object_store", + "zksync_types", +] + +[[package]] +name = "via_btc_watch" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "sqlx", + "thiserror", + "tokio", + "tracing", + "via_btc_client", + "vise", + "zksync_config", + "zksync_dal", + "zksync_shared_metrics", + "zksync_types", +] + +[[package]] +name = "via_da_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "byteorder", + "hex", + "rand 0.8.5", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "via_da_clients" +version = "0.1.0" +dependencies = [ + "anyhow", + "assert_matches", + "async-trait", + "celestia-rpc", + "celestia-types", + "hex", + "pretty_assertions", + "serde", + "tokio", + "zksync_config", + "zksync_da_client", + "zksync_env_config", + "zksync_node_framework", + "zksync_object_store", + "zksync_types", +] + +[[package]] +name = "via_da_dispatcher" +version = "0.1.0" +dependencies = [ + "anyhow", + "bincode", + "chrono", + "futures 0.3.30", + "rand 0.8.5", + "tokio", + "tracing", + "vise", + "zksync_config", + "zksync_da_client", + "zksync_dal", + "zksync_l1_contract_interface", + "zksync_object_store", + "zksync_prover_interface", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "via_fee_model" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "tokio", + "tracing", + "via_btc_client", + "vise", + "zksync_config", + "zksync_dal", + "zksync_node_fee_model", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "via_musig2" +version = "0.1.0" +dependencies = [ + "anyhow", + "axum 0.6.20", + "base64 0.21.7", + "bitcoin", + "bitcoincore-rpc", + "hex", + "hyper 0.14.30", + "musig2", + "rand 0.8.5", + "reqwest 0.12.7", + "secp256k1 0.30.0", + "serde", + "serde_json", + "tokio", + "tracing", + "tracing-subscriber", + "uuid 1.10.0", + "via_btc_client", +] + +[[package]] +name = "via_server" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.5.16", + "tikv-jemallocator", + "tokio", + "tracing", + "via_da_clients", + "zksync_config", + "zksync_core_leftovers", + "zksync_env_config", + "zksync_metadata_calculator", + "zksync_node_api_server", + "zksync_node_framework", + "zksync_protobuf_config", + "zksync_storage", + "zksync_types", + "zksync_utils", + "zksync_vlog", +] + +[[package]] +name = "via_state_keeper" +version = "0.1.0" +dependencies = [ + "anyhow", + "assert_matches", + "async-trait", + "futures 0.3.30", + "hex", + "itertools 0.10.5", + "once_cell", + "tempfile", + "test-casing", + "thiserror", + "tokio", + "tracing", + "via_btc_client", + "via_fee_model", + "vise", + "zksync_config", + "zksync_contracts", + "zksync_dal", + "zksync_mempool", + "zksync_multivm", + "zksync_node_genesis", + "zksync_node_test_utils", + "zksync_protobuf", + "zksync_shared_metrics", + "zksync_state", + "zksync_storage", + "zksync_system_constants", + "zksync_test_account", + "zksync_types", + "zksync_utils", + "zksync_vm_utils", +] + +[[package]] +name = "via_verification" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "circuit_definitions", + "clap 4.5.16", + "ethers", + "hex", + "once_cell", + "primitive-types", + "reqwest 0.12.7", + "serde", + "serde_json", + "sha3 0.10.8", + "thiserror", + "tokio", + "tracing", + "tracing-subscriber", + "zksync_types", +] + +[[package]] +name = "via_verifier" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.5.16", + "tikv-jemallocator", + "tokio", + "tracing", + "via_da_clients", "zksync_config", - "zksync_object_store", + "zksync_core_leftovers", + "zksync_env_config", + "zksync_metadata_calculator", + "zksync_node_api_server", + "zksync_node_framework", + "zksync_protobuf_config", + "zksync_storage", "zksync_types", + "zksync_utils", + "zksync_vlog", ] [[package]] -name = "via_btc_sender" +name = "via_verifier_btc_sender" version = "0.1.0" dependencies = [ "anyhow", @@ -8454,9 +9743,9 @@ dependencies = [ "tracing", "via_btc_client", "via_da_dispatcher", + "via_verifier_dal", "zksync_config", "zksync_contracts", - "zksync_dal", "zksync_l1_contract_interface", "zksync_node_test_utils", "zksync_object_store", @@ -8464,136 +9753,115 @@ dependencies = [ ] [[package]] -name = "via_btc_watch" +name = "via_verifier_btc_watch" version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "sqlx", "thiserror", "tokio", "tracing", "via_btc_client", + "via_verifier_dal", "vise", - "zksync_dal", + "zksync_config", "zksync_shared_metrics", "zksync_types", ] [[package]] -name = "via_da_clients" +name = "via_verifier_dal" version = "0.1.0" dependencies = [ "anyhow", - "assert_matches", - "async-trait", - "celestia-rpc", - "celestia-types", - "hex", - "pretty_assertions", - "serde", + "bitcoin", + "rand 0.8.5", + "sqlx", + "strum 0.26.3", + "thiserror", "tokio", - "zksync_config", - "zksync_da_client", - "zksync_env_config", - "zksync_node_framework", - "zksync_object_store", + "zksync_basic_types", + "zksync_db_connection", "zksync_types", ] [[package]] -name = "via_da_dispatcher" +name = "via_withdrawal_client" version = "0.1.0" dependencies = [ "anyhow", - "bincode", - "chrono", - "futures 0.3.30", - "rand 0.8.5", + "bitcoin", + "dotenv", + "hex", "tokio", "tracing", - "vise", + "tracing-subscriber", + "via_btc_client", + "via_da_client", + "via_da_clients", + "zksync_basic_types", "zksync_config", "zksync_da_client", "zksync_dal", - "zksync_l1_contract_interface", - "zksync_object_store", - "zksync_prover_interface", "zksync_types", "zksync_utils", ] [[package]] -name = "via_fee_model" +name = "via_withdrawal_service" version = "0.1.0" dependencies = [ "anyhow", - "async-trait", + "axum 0.7.9", + "base64 0.21.7", + "bitcoin", + "chrono", + "hex", + "musig2", + "reqwest 0.12.7", + "secp256k1 0.30.0", + "serde", + "serde_json", + "sha2 0.10.8", + "thiserror", "tokio", + "tower 0.4.13", + "tower-http", "tracing", + "uuid 1.10.0", + "via_btc_client", + "via_musig2", + "via_verifier_dal", + "via_withdrawal_client", "vise", "zksync_config", - "zksync_node_fee_model", - "zksync_types", - "zksync_utils", -] - -[[package]] -name = "via_server" -version = "0.1.0" -dependencies = [ - "anyhow", - "clap 4.5.16", - "tikv-jemallocator", - "tokio", - "tracing", - "via_da_clients", - "zksync_config", - "zksync_core_leftovers", - "zksync_env_config", - "zksync_metadata_calculator", - "zksync_node_api_server", - "zksync_node_framework", - "zksync_protobuf_config", - "zksync_storage", "zksync_types", - "zksync_utils", - "zksync_vlog", ] [[package]] -name = "via_state_keeper" +name = "via_zk_verifier" version = "0.1.0" dependencies = [ "anyhow", - "assert_matches", "async-trait", - "futures 0.3.30", - "hex", - "itertools 0.10.5", - "once_cell", - "tempfile", - "test-casing", + "bincode", + "serde", + "sqlx", "thiserror", "tokio", "tracing", - "via_fee_model", + "via_btc_client", + "via_da_client", + "via_verification", + "via_verifier_dal", "vise", "zksync_config", - "zksync_contracts", + "zksync_da_client", "zksync_dal", - "zksync_mempool", - "zksync_multivm", - "zksync_node_genesis", - "zksync_node_test_utils", - "zksync_protobuf", + "zksync_prover_interface", "zksync_shared_metrics", - "zksync_state", - "zksync_storage", - "zksync_system_constants", - "zksync_test_account", "zksync_types", - "zksync_utils", - "zksync_vm_utils", ] [[package]] @@ -8780,6 +10048,21 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasm-timer" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" +dependencies = [ + "futures 0.3.30", + "js-sys", + "parking_lot 0.11.2", + "pin-utils", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "web-sys" version = "0.3.70" @@ -8800,6 +10083,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + [[package]] name = "webpki-roots" version = "0.26.3" @@ -8853,7 +10142,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -9077,6 +10366,25 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "ws_stream_wasm" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" +dependencies = [ + "async_io_stream", + "futures 0.3.30", + "js-sys", + "log", + "pharos", + "rustc_version 0.4.0", + "send_wrapper 0.6.0", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wyz" version = "0.5.1" @@ -9214,6 +10522,20 @@ dependencies = [ "zk_evm_abstractions 0.150.4", ] +[[package]] +name = "zk_evm" +version = "0.150.16" +source = "git+https://github.com/matter-labs/zksync-protocol.git?tag=v0.150.16#fb09cb3bd949af68572d98886905f55e9ae77eaf" +dependencies = [ + "anyhow", + "lazy_static", + "num", + "serde", + "serde_json", + "static_assertions", + "zk_evm_abstractions 0.150.16", +] + [[package]] name = "zk_evm_abstractions" version = "0.140.0" @@ -9253,6 +10575,18 @@ dependencies = [ "zkevm_opcode_defs 0.150.4", ] +[[package]] +name = "zk_evm_abstractions" +version = "0.150.16" +source = "git+https://github.com/matter-labs/zksync-protocol.git?tag=v0.150.16#fb09cb3bd949af68572d98886905f55e9ae77eaf" +dependencies = [ + "anyhow", + "num_enum 0.6.1", + "serde", + "static_assertions", + "zkevm_opcode_defs 0.150.16", +] + [[package]] name = "zkevm_circuits" version = "0.140.2" @@ -9261,7 +10595,7 @@ checksum = "8beed4cc1ab1f9d99a694506d18705e10059534b30742832be49637c4775e1f8" dependencies = [ "arrayvec 0.7.6", "bincode", - "boojum", + "boojum 0.30.9", "cs_derive", "derivative", "hex", @@ -9283,7 +10617,7 @@ checksum = "20f1a64d256cc5f5c58d19cf976cb45973df54e4e3010ca4a3e6fafe9f06075e" dependencies = [ "arrayvec 0.7.6", "bincode", - "boojum", + "boojum 0.30.9", "cs_derive", "derivative", "hex", @@ -9304,7 +10638,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abdfaa95dfe0878fda219dd17a6cc8c28711e2067785910c0e06d3ffdca78629" dependencies = [ "arrayvec 0.7.6", - "boojum", + "boojum 0.2.2", "cs_derive", "derivative", "hex", @@ -9317,6 +10651,25 @@ dependencies = [ "zkevm_opcode_defs 0.150.4", ] +[[package]] +name = "zkevm_circuits" +version = "0.150.16" +source = "git+https://github.com/matter-labs/zksync-protocol.git?tag=v0.150.16#fb09cb3bd949af68572d98886905f55e9ae77eaf" +dependencies = [ + "arrayvec 0.7.6", + "boojum 0.30.9", + "derivative", + "hex", + "itertools 0.10.5", + "rand 0.4.6", + "rand 0.8.5", + "seq-macro", + "serde", + "smallvec", + "zkevm_opcode_defs 0.150.16", + "zksync_cs_derive", +] + [[package]] name = "zkevm_opcode_defs" version = "0.131.0" @@ -9376,6 +10729,22 @@ dependencies = [ "sha3 0.10.8", ] +[[package]] +name = "zkevm_opcode_defs" +version = "0.150.16" +source = "git+https://github.com/matter-labs/zksync-protocol.git?tag=v0.150.16#fb09cb3bd949af68572d98886905f55e9ae77eaf" +dependencies = [ + "bitflags 2.6.0", + "blake2 0.10.6", + "ethereum-types", + "k256 0.13.3", + "lazy_static", + "p256", + "serde", + "sha2 0.10.8", + "sha3 0.10.8", +] + [[package]] name = "zksync_base_token_adjuster" version = "0.1.0" @@ -9411,12 +10780,35 @@ dependencies = [ "serde", "serde_json", "serde_with", - "strum", + "strum 0.26.3", "thiserror", "tiny-keccak 2.0.2", "url", ] +[[package]] +name = "zksync_bellman" +version = "0.30.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38fc1a740dfba0dd5e90a2689060910966c9c5ca837520428962ffe972223a61" +dependencies = [ + "arrayvec 0.7.6", + "bit-vec", + "blake2s_simd", + "byteorder", + "cfg-if 1.0.0", + "crossbeam 0.8.4", + "futures 0.3.30", + "hex", + "lazy_static", + "num_cpus", + "rand 0.4.6", + "serde", + "smallvec", + "tiny-keccak 1.5.0", + "zksync_pairing", +] + [[package]] name = "zksync_block_reverter" version = "0.1.0" @@ -9615,7 +11007,7 @@ dependencies = [ "thiserror", "tls-listener", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "tracing", "vise", "zksync_concurrency", @@ -9712,7 +11104,7 @@ name = "zksync_contract_verification_server" version = "0.1.0" dependencies = [ "anyhow", - "axum", + "axum 0.7.9", "serde", "serde_json", "tokio", @@ -9817,6 +11209,18 @@ dependencies = [ "zksync_utils", ] +[[package]] +name = "zksync_cs_derive" +version = "0.30.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1326884175159e4b8138feb900d359f382e78a26344434fc9713b4cfb4f601bf" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + [[package]] name = "zksync_da_client" version = "0.1.0" @@ -9863,7 +11267,7 @@ dependencies = [ "serde", "serde_json", "sqlx", - "strum", + "strum 0.26.3", "thiserror", "tokio", "tracing", @@ -10091,7 +11495,7 @@ name = "zksync_external_proof_integration_api" version = "0.1.0" dependencies = [ "anyhow", - "axum", + "axum 0.7.9", "bincode", "tokio", "tracing", @@ -10103,6 +11507,34 @@ dependencies = [ "zksync_prover_interface", ] +[[package]] +name = "zksync_ff" +version = "0.30.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2c1b1fc4f6d81f26b4e17506306153f355b50cd7aa3f324344a531aaf1cbcdd" +dependencies = [ + "byteorder", + "hex", + "rand 0.4.6", + "serde", + "zksync_ff_derive", +] + +[[package]] +name = "zksync_ff_derive" +version = "0.30.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24682c5be0f3983dc85832d7b0c7901b5668494417c7fcb21613cbee72df067c" +dependencies = [ + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "proc-macro2 1.0.86", + "quote 1.0.37", + "serde", + "syn 1.0.109", +] + [[package]] name = "zksync_health_check" version = "0.1.0" @@ -10140,7 +11572,7 @@ version = "0.150.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9949f48ea1a9f9a0e73242d4d1e87e681095181827486b3fcc2cf93e5aa03280" dependencies = [ - "boojum", + "boojum 0.2.2", "derivative", "hex", "once_cell", @@ -10225,7 +11657,7 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "axum", + "axum 0.7.9", "futures 0.3.30", "itertools 0.10.5", "once_cell", @@ -10305,7 +11737,7 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "axum", + "axum 0.7.9", "chrono", "futures 0.3.30", "governor", @@ -10318,12 +11750,12 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "strum", + "strum 0.26.3", "test-casing", "thiserror", "thread_local", "tokio", - "tower", + "tower 0.4.13", "tower-http", "tracing", "vise", @@ -10452,6 +11884,12 @@ dependencies = [ "via_da_dispatcher", "via_fee_model", "via_state_keeper", + "via_verifier_btc_sender", + "via_verifier_btc_watch", + "via_verifier_dal", + "via_withdrawal_client", + "via_withdrawal_service", + "via_zk_verifier", "zksync_base_token_adjuster", "zksync_block_reverter", "zksync_circuit_breaker", @@ -10625,17 +12063,30 @@ dependencies = [ "zksync_types", ] +[[package]] +name = "zksync_pairing" +version = "0.30.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835ac4c2544a9810c6c2cde05c536c5a8876e92e3c9a10bdff99ef784d404813" +dependencies = [ + "byteorder", + "cfg-if 1.0.0", + "rand 0.4.6", + "serde", + "zksync_ff", +] + [[package]] name = "zksync_proof_data_handler" version = "0.1.0" dependencies = [ "anyhow", - "axum", + "axum 0.7.9", "chrono", "hyper 1.4.1", "serde_json", "tokio", - "tower", + "tower 0.4.13", "tracing", "vise", "zksync_basic_types", @@ -10711,7 +12162,7 @@ name = "zksync_prover_dal" version = "0.1.0" dependencies = [ "sqlx", - "strum", + "strum 0.26.3", "zksync_basic_types", "zksync_db_connection", ] @@ -10726,7 +12177,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "strum", + "strum 0.26.3", "tokio", "zksync_multivm", "zksync_object_store", @@ -10838,11 +12289,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6bac71750012656b207e8cdb67415823318909077d8c8e235111f0d2feeeeeda" dependencies = [ "ethereum-types", - "franklin-crypto", + "franklin-crypto 0.1.0", "handlebars", "hex", "paste", - "rescue_poseidon", + "rescue_poseidon 0.4.1", "serde", "serde_derive", "serde_json", @@ -11022,7 +12473,7 @@ dependencies = [ "bitcoin", "blake2 0.10.6", "chrono", - "derive_more", + "derive_more 1.0.0-beta.6", "hex", "itertools 0.10.5", "num", @@ -11033,7 +12484,7 @@ dependencies = [ "secp256k1 0.27.0", "serde", "serde_json", - "strum", + "strum 0.26.3", "thiserror", "tokio", "tracing", @@ -11169,7 +12620,7 @@ dependencies = [ "pin-project-lite", "rand 0.8.5", "rlp", - "rustls", + "rustls 0.23.12", "serde", "serde_json", "test-casing", diff --git a/Cargo.toml b/Cargo.toml index ffc0077d5..b2ff737b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -91,7 +91,19 @@ members = [ "core/node/via_btc_sender", "core/node/via_fee_model", "core/node/via_state_keeper", + "core/lib/via_da_clients", + # VIA Verifier + "via_verifier/bin/verifier_server", + "via_verifier/lib/via_withdrawal_client", + "via_verifier/lib/via_verification", + "via_verifier/lib/via_musig2", + "via_verifier/lib/verifier_dal", + "via_verifier/node/withdrawal_service", + "via_verifier/node/via_zk_verifier", + "via_verifier/node/via_btc_watch", + "via_verifier/node/via_btc_sender", + "via_verifier/lib/via_da_client", ] resolver = "2" @@ -321,3 +333,15 @@ via_da_dispatcher = { version = "0.1.0", path = "core/node/via_da_dispatcher" } via_btc_sender = { version = "0.1.0", path = "core/node/via_btc_sender" } via_fee_model = { version = "0.1.0", path = "core/node/via_fee_model" } via_state_keeper = { version = "0.1.0", path = "core/node/via_state_keeper" } + + +# VIA Verifier +via_withdrawal_client = { version = "0.1.0", path = "via_verifier/lib/via_withdrawal_client" } +via_verification = { version = "0.1.0", path = "via_verifier/lib/via_verification" } +via_musig2 = { version = "0.1.0", path = "via_verifier/lib/via_musig2" } +via_verifier_dal = { version = "0.1.0", path = "via_verifier/lib/verifier_dal" } +via_da_client = { version = "0.1.0", path = "via_verifier/lib/via_da_client" } +via_withdrawal_service = { version = "0.1.0", path = "via_verifier/node/withdrawal_service" } +via_zk_verifier = { version = "0.1.0", path = "via_verifier/node/via_zk_verifier" } +via_verifier_btc_watch = { version = "0.1.0", path = "via_verifier/node/via_btc_watch" } +via_verifier_btc_sender = { version = "0.1.0", path = "via_verifier/node/via_btc_sender" } diff --git a/Makefile b/Makefile index de12e5278..d1198c0a8 100644 --- a/Makefile +++ b/Makefile @@ -5,6 +5,29 @@ RESET = \033[0m # CLI tool CLI_TOOL = via + +CMD := $(firstword $(MAKECMDGOALS)) +VIA_ENV ?= via +DIFF ?= 0 +MODE ?= sequencer + +# Select the via env +ifeq ($(CMD), via-verifier) + VIA_ENV := via_verifier + DIFF := 1 + MODE := verifier +else ifeq ($(CMD), via-restart) + VIA_ENV := via +else ifeq ($(CMD), via-restart-verifier) + VIA_ENV := via_verifier +else ifeq ($(CMD), via-coordinator) + VIA_ENV := via_coordinator + DIFF := 2 + MODE := coordinator +else ifeq ($(CMD), via-restart-coordinator) + VIA_ENV := via_coordinator +endif + # Default target: Show help message .PHONY: help help: @@ -38,13 +61,37 @@ help: # Default target: Redirect to help .DEFAULT_GOAL := help +# Restart the sequence +.PHONY: via-restart +via-restart: env-soft server + # Run the basic setup workflow in sequence .PHONY: via -via: env config init transactions celestia bootstrap server-genesis server +via: base transactions celestia bootstrap server-genesis server # Run the full setup workflow in sequence .PHONY: all -all: env config init transactions celestia btc-explorer bootstrap server-genesis server +all: base transactions celestia btc-explorer bootstrap server-genesis server + +# Run the basic setup workflow in verifier +.PHONY: via-verifier +via-verifier: base celestia verifier + +# Restart the verifier +.PHONY: via-restart-verifier +via-restart-verifier: env-soft verifier + +# Run the basic setup workflow for the coordinator +.PHONY: via-coordinator +via-coordinator: base celestia verifier + +# Restart the coordinator +.PHONY: via-restart-coordinator +via-restart-coordinator: env-soft verifier + +# Run minimal required setup +.PHONY: base +base: env config init # Run 'via env via' .PHONY: env @@ -52,7 +99,15 @@ env: @echo "------------------------------------------------------------------------------------" @echo "$(YELLOW)Setting the environment...$(RESET)" @echo "------------------------------------------------------------------------------------" - @$(CLI_TOOL) env via + @$(CLI_TOOL) env ${VIA_ENV} + +# Run 'via env via --soft' +.PHONY: env-soft +env-soft: + @echo "------------------------------------------------------------------------------------" + @echo "$(YELLOW)Setting the environment...$(RESET)" + @echo "------------------------------------------------------------------------------------" + @$(CLI_TOOL) env ${VIA_ENV} --soft # Run 'via config compile' .PHONY: config @@ -60,7 +115,7 @@ config: @echo "------------------------------------------------------------------------------------" @echo "$(YELLOW)Creating environment configuration file...$(RESET)" @echo "------------------------------------------------------------------------------------" - @$(CLI_TOOL) config compile + @$(CLI_TOOL) config compile ${VIA_ENV} ${DIFF} # Run 'via init' .PHONY: init @@ -68,7 +123,7 @@ init: @echo "------------------------------------------------------------------------------------" @echo "$(YELLOW)Initializing the project...$(RESET)" @echo "------------------------------------------------------------------------------------" - @$(CLI_TOOL) init + @$(CLI_TOOL) init --mode ${MODE} # Run 'via transactions' .PHONY: transactions @@ -118,6 +173,14 @@ server: @echo "------------------------------------------------------------------------------------" @$(CLI_TOOL) server +# Run 'via verifier' +.PHONY: verifier +verifier: + @echo "------------------------------------------------------------------------------------" + @echo "$(YELLOW)Running the verifier/coordinator software...$(RESET)" + @echo "------------------------------------------------------------------------------------" + @$(CLI_TOOL) verifier + # Run 'via clean' .PHONY: clean clean: diff --git a/celestia-keys/keys/keyring-test/02122b5b52d9dd736578baeeb056e4516e92a565.address b/celestia-keys/keys/keyring-test/02122b5b52d9dd736578baeeb056e4516e92a565.address new file mode 100644 index 000000000..97fe3f4b6 --- /dev/null +++ b/celestia-keys/keys/keyring-test/02122b5b52d9dd736578baeeb056e4516e92a565.address @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyNS0wMS0xNSAxNDoyMjo1MC41NDMzNTkwMDEgKzAwMDAgVVRDIG09KzAuMDU4MzU0MzM0IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiQ1h1eFoxa1gxcTRJM1pWYyJ9.uNjZsbL9Hv8otbgUlksmWNCQFvLR6AOmas9qmny0B2TP5MJeFRWk7g.wyZbmsDqzNCv1H5M.CUcs61H8OmnLsY-jxmel8zhCNSXTKJsQJIrn7JBbK1XnJgpVgQ-KMkeQVp5T69GSL3KUfe2MjI2kCVeKcDytDsGy0ateEonhdO08Pg91X77phFVkBNFn1VtnCHaceyFC4zwf-zZ30gh8N4QKSuLQNdjI07BFs2q1swW-eCUF3TFryVxixyOYGpx_Juy2p_bLoyni1A4eip7I1BBvNRJ8fxYo9qqRB4mZwXIJIcxk8-3tiA.gjMUyxcmYXgIz5jnJ9gRMA \ No newline at end of file diff --git a/celestia-keys/keys/keyring-test/via.info b/celestia-keys/keys/keyring-test/via.info new file mode 100644 index 000000000..19426573a --- /dev/null +++ b/celestia-keys/keys/keyring-test/via.info @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyNS0wMS0xNSAxNDoyMjo1MC41NDExNTQ1MDEgKzAwMDAgVVRDIG09KzAuMDU2MTQ5ODM0IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiM2M2ZE5hZGtfZ2tlYTZpeCJ9.vZYYaf-uL3peWfxb6Hn4CZesAbXSgLeYXwMaeN_0h9gFuzr4XBALXg.qo8y1jMkqcJGbhDc.8KRkaeKDNlahLgJd3mvsjLODIlcJAeMh8ZBvIVVWcdY-O-GhR_HfXFyYquWGC0pjLuJaL49WoERrRWUjU61bGpsqMS0aGDI4kp8mZUFefs7ZH0knqfHbacpqgU-5VIbvvPQ4qGlrtSZfiCMNN_h53leFzy8W3QGU1icA-J63kjl_GvVagQ02eqoX9HnzlYlWl4nUNprnK2qvaxZ-OfDCPCDpd3ol0mqnKlAuR_dOJjgJdcwpYEK7k93qUbjRdjTOJP-j_GG8OnP-TuMb-btNhi08jcVmTdfJFW3MK1XBnpDMqVz0tTm5Wk0HWC_5rhPAyamPkFWegvTMDhuq7GfAYICxIdi5Rj5A1024zQZItTCgwezPgWpYRiqVD4TcesFbUHOkDaQI5kUyozOB3CtdlBWDePhM2vnoZMRP8ODHeXg-7rDwocNdqGu0.K8T_WIUoinYUDSewZpFGMQ \ No newline at end of file diff --git a/contracts b/contracts index f5148ca1e..2fe62059d 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit f5148ca1e8d7124c468295123e245426fba3c71d +Subproject commit 2fe62059d9f9ee4270e0e5ac4e1fcb8b046e8b7d diff --git a/core/bin/external_node/src/node_builder.rs b/core/bin/external_node/src/node_builder.rs index c30cc1a43..5ec8446da 100644 --- a/core/bin/external_node/src/node_builder.rs +++ b/core/bin/external_node/src/node_builder.rs @@ -117,6 +117,7 @@ impl ExternalNodeBuilder { server_url: Some(self.config.postgres.database_url()), server_replica_url: Some(self.config.postgres.database_url()), prover_url: None, + verifier_url: None, }; let pools_layer = PoolsLayerBuilder::empty(config, secrets) .with_master(true) diff --git a/core/bin/via_server/src/config.rs b/core/bin/via_server/src/config.rs index 60a948842..a2784ece5 100644 --- a/core/bin/via_server/src/config.rs +++ b/core/bin/via_server/src/config.rs @@ -7,11 +7,11 @@ use zksync_config::{ fri_prover_group::FriProverGroupConfig, house_keeper::HouseKeeperConfig, BasicWitnessInputProducerConfig, FriProofCompressorConfig, FriProverConfig, - FriWitnessGeneratorConfig, ObservabilityConfig, PrometheusConfig, + FriWitnessGeneratorConfig, ObservabilityConfig, PrometheusConfig, ProofDataHandlerConfig, ProtectiveReadsWriterConfig, }, - ApiConfig, DADispatcherConfig, DBConfig, ObjectStoreConfig, PostgresConfig, ViaBtcSenderConfig, - ViaBtcWatchConfig, ViaCelestiaConfig, + ApiConfig, DADispatcherConfig, DBConfig, EthConfig, GasAdjusterConfig, ObjectStoreConfig, + PostgresConfig, ViaBtcSenderConfig, ViaBtcWatchConfig, ViaCelestiaConfig, }; use zksync_core_leftovers::temp_config_store::{decode_yaml_repr, TempConfigStore}; use zksync_env_config::FromEnv; @@ -60,12 +60,12 @@ pub(crate) fn load_env_config() -> anyhow::Result { fri_witness_vector_generator: None, fri_witness_generator_config: FriWitnessGeneratorConfig::from_env().ok(), prometheus_config: PrometheusConfig::from_env().ok(), - proof_data_handler_config: None, + proof_data_handler_config: ProofDataHandlerConfig::from_env().ok(), api_config: ApiConfig::from_env().ok(), db_config: DBConfig::from_env().ok(), - eth_sender_config: None, + eth_sender_config: EthConfig::from_env().ok(), eth_watch_config: None, - gas_adjuster_config: None, + gas_adjuster_config: GasAdjusterConfig::from_env().ok(), observability: ObservabilityConfig::from_env().ok(), snapshot_creator: None, da_dispatcher_config: DADispatcherConfig::from_env().ok(), diff --git a/core/bin/via_server/src/node_builder.rs b/core/bin/via_server/src/node_builder.rs index 231016e0b..8aadcc5a2 100644 --- a/core/bin/via_server/src/node_builder.rs +++ b/core/bin/via_server/src/node_builder.rs @@ -14,6 +14,7 @@ use zksync_node_framework::{ circuit_breaker_checker::CircuitBreakerCheckerLayer, commitment_generator::CommitmentGeneratorLayer, healtcheck_server::HealthCheckLayer, + house_keeper::HouseKeeperLayer, logs_bloom_backfill::LogsBloomBackfillLayer, metadata_calculator::MetadataCalculatorLayer, node_storage_init::{ @@ -23,6 +24,7 @@ use zksync_node_framework::{ pools_layer::PoolsLayerBuilder, postgres_metrics::PostgresMetricsLayer, prometheus_exporter::PrometheusExporterLayer, + proof_data_handler::ProofDataHandlerLayer, query_eth_client::QueryEthClientLayer, sigint::SigintHandlerLayer, via_btc_sender::{ @@ -30,6 +32,7 @@ use zksync_node_framework::{ }, via_btc_watch::BtcWatchLayer, via_da_dispatcher::DataAvailabilityDispatcherLayer, + via_gas_adjuster::ViaGasAdjusterLayer, via_l1_gas::ViaL1GasLayer, via_state_keeper::{ main_batch_executor::MainBatchExecutorLayer, mempool_io::MempoolIOLayer, @@ -190,6 +193,16 @@ impl ViaNodeBuilder { Ok(self) } + fn add_gas_adjuster_layer(mut self) -> anyhow::Result { + let gas_adjuster_config = try_load_config!(self.configs.eth) + .gas_adjuster + .context("Via gas adjuster")?; + let btc_sender_config = try_load_config!(self.configs.via_btc_sender_config); + let gas_adjuster_layer = ViaGasAdjusterLayer::new(gas_adjuster_config, btc_sender_config); + self.node.add_layer(gas_adjuster_layer); + Ok(self) + } + fn add_l1_gas_layer(mut self) -> anyhow::Result { let state_keeper_config = try_load_config!(self.configs.state_keeper_config); let l1_gas_layer = ViaL1GasLayer::new(state_keeper_config); @@ -367,6 +380,24 @@ impl ViaNodeBuilder { Ok(self) } + fn add_house_keeper_layer(mut self) -> anyhow::Result { + let house_keeper_config = try_load_config!(self.configs.house_keeper_config); + let fri_prover_config = try_load_config!(self.configs.prover_config); + let fri_witness_generator_config = try_load_config!(self.configs.witness_generator_config); + let fri_prover_group_config = try_load_config!(self.configs.prover_group_config); + let fri_proof_compressor_config = try_load_config!(self.configs.proof_compressor_config); + + self.node.add_layer(HouseKeeperLayer::new( + house_keeper_config, + fri_prover_config, + fri_witness_generator_config, + fri_prover_group_config, + fri_proof_compressor_config, + )); + + Ok(self) + } + fn add_commitment_generator_layer(mut self) -> anyhow::Result { self.node.add_layer(CommitmentGeneratorLayer::new( self.genesis_config.l1_batch_commit_data_generator_mode, @@ -398,6 +429,14 @@ impl ViaNodeBuilder { Ok(self) } + fn add_proof_data_handler_layer(mut self) -> anyhow::Result { + self.node.add_layer(ProofDataHandlerLayer::new( + try_load_config!(self.configs.proof_data_handler_config), + self.genesis_config.l1_batch_commit_data_generator_mode, + )); + Ok(self) + } + /// Builds the node with the genesis initialization task only. pub fn only_genesis(mut self) -> anyhow::Result { self = self @@ -422,6 +461,7 @@ impl ViaNodeBuilder { // VIA layers .add_btc_watcher_layer()? .add_btc_sender_layer()? + .add_gas_adjuster_layer()? .add_l1_gas_layer()? .add_tx_sender_layer()? .add_api_caches_layer()? @@ -436,6 +476,8 @@ impl ViaNodeBuilder { .add_commitment_generator_layer()? .add_via_celestia_da_client_layer()? .add_da_dispatcher_layer()? + .add_proof_data_handler_layer()? + .add_house_keeper_layer()? .node .build()) } diff --git a/core/lib/basic_types/src/protocol_version.rs b/core/lib/basic_types/src/protocol_version.rs index 265c06987..312e8e8af 100644 --- a/core/lib/basic_types/src/protocol_version.rs +++ b/core/lib/basic_types/src/protocol_version.rs @@ -68,15 +68,16 @@ pub enum ProtocolVersionId { Version23, Version24, Version25, + Version26, } impl ProtocolVersionId { pub const fn latest() -> Self { - Self::Version24 + Self::Version26 } pub const fn next() -> Self { - Self::Version25 + Self::Version26 } pub fn try_from_packed_semver(packed_semver: U256) -> Result { @@ -120,6 +121,7 @@ impl ProtocolVersionId { ProtocolVersionId::Version23 => VmVersion::Vm1_5_0SmallBootloaderMemory, ProtocolVersionId::Version24 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, ProtocolVersionId::Version25 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, + ProtocolVersionId::Version26 => VmVersion::VmBitcoin1_0_0, } } @@ -270,6 +272,7 @@ impl From for VmVersion { ProtocolVersionId::Version23 => VmVersion::Vm1_5_0SmallBootloaderMemory, ProtocolVersionId::Version24 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, ProtocolVersionId::Version25 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, + ProtocolVersionId::Version26 => VmVersion::VmBitcoin1_0_0, } } } diff --git a/core/lib/basic_types/src/vm.rs b/core/lib/basic_types/src/vm.rs index c178c853b..2a0a33a35 100644 --- a/core/lib/basic_types/src/vm.rs +++ b/core/lib/basic_types/src/vm.rs @@ -16,12 +16,13 @@ pub enum VmVersion { Vm1_4_2, Vm1_5_0SmallBootloaderMemory, Vm1_5_0IncreasedBootloaderMemory, + VmBitcoin1_0_0, } impl VmVersion { /// Returns the latest supported VM version. pub const fn latest() -> VmVersion { - Self::Vm1_5_0IncreasedBootloaderMemory + Self::VmBitcoin1_0_0 } } diff --git a/core/lib/config/src/configs/mod.rs b/core/lib/config/src/configs/mod.rs index f69b9392c..8834bc1bf 100644 --- a/core/lib/config/src/configs/mod.rs +++ b/core/lib/config/src/configs/mod.rs @@ -29,9 +29,10 @@ pub use self::{ snapshots_creator::SnapshotsCreatorConfig, utils::PrometheusConfig, via_btc_sender::ViaBtcSenderConfig, - via_btc_watch::ViaBtcWatchConfig, + via_btc_watch::{ActorRole, ViaBtcWatchConfig}, via_celestia::ViaCelestiaConfig, via_general::ViaGeneralConfig, + via_verifier::ViaVerifierConfig, vm_runner::{BasicWitnessInputProducerConfig, ProtectiveReadsWriterConfig}, }; @@ -72,6 +73,7 @@ pub mod via_btc_sender; pub mod via_btc_watch; pub mod via_celestia; pub mod via_general; +pub mod via_verifier; pub mod vm_runner; pub mod wallets; diff --git a/core/lib/config/src/configs/secrets.rs b/core/lib/config/src/configs/secrets.rs index 71197f5d9..1cbdb0d86 100644 --- a/core/lib/config/src/configs/secrets.rs +++ b/core/lib/config/src/configs/secrets.rs @@ -8,6 +8,7 @@ pub struct DatabaseSecrets { pub server_url: Option, pub prover_url: Option, pub server_replica_url: Option, + pub verifier_url: Option, } #[derive(Debug, Clone, PartialEq)] @@ -41,4 +42,11 @@ impl DatabaseSecrets { pub fn prover_url(&self) -> anyhow::Result { self.prover_url.clone().context("Prover DB URL is absent") } + + /// Returns a copy of the verifier database URL as a `Result` to simplify error propagation. + pub fn verifier_url(&self) -> anyhow::Result { + self.verifier_url + .clone() + .context("Verifier DB URL is absent") + } } diff --git a/core/lib/config/src/configs/via_btc_watch.rs b/core/lib/config/src/configs/via_btc_watch.rs index 0113f3cef..1fc40b355 100644 --- a/core/lib/config/src/configs/via_btc_watch.rs +++ b/core/lib/config/src/configs/via_btc_watch.rs @@ -2,7 +2,7 @@ use std::time::Duration; use serde::{Deserialize, Serialize}; -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] +#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Copy)] pub enum ActorRole { Sequencer, Verifier, diff --git a/core/lib/config/src/configs/via_general.rs b/core/lib/config/src/configs/via_general.rs index 48680fad5..5b54d20eb 100644 --- a/core/lib/config/src/configs/via_general.rs +++ b/core/lib/config/src/configs/via_general.rs @@ -17,7 +17,7 @@ use crate::{ }, ApiConfig, ContractVerifierConfig, DBConfig, EthConfig, ExternalProofIntegrationApiConfig, ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, ViaBtcSenderConfig, - ViaBtcWatchConfig, ViaCelestiaConfig, + ViaBtcWatchConfig, ViaCelestiaConfig, ViaVerifierConfig, }; #[derive(Debug, Clone, PartialEq)] @@ -59,6 +59,7 @@ pub struct ViaGeneralConfig { pub via_btc_sender_config: Option, pub via_btc_watch_config: Option, pub via_celestia_config: Option, + pub via_verifier_config: Option, } impl From for ViaGeneralConfig { @@ -101,6 +102,7 @@ impl From for ViaGeneralConfig { via_btc_sender_config: None, via_btc_watch_config: None, via_celestia_config: None, + via_verifier_config: None, } } } diff --git a/core/lib/config/src/configs/via_verifier.rs b/core/lib/config/src/configs/via_verifier.rs new file mode 100644 index 000000000..636729aa7 --- /dev/null +++ b/core/lib/config/src/configs/via_verifier.rs @@ -0,0 +1,56 @@ +use std::{ + net::{IpAddr, Ipv4Addr, SocketAddr}, + time::Duration, +}; + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub enum VerifierMode { + VERIFIER = 0, + COORDINATOR = 1, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct ViaVerifierConfig { + /// Interval between polling db for verification requests (in ms). + pub poll_interval: u64, + /// Coordinator server port. + pub port: u16, + /// Coordinator server url. + pub url: String, + /// The signer private key. + pub private_key: String, + /// The verifiers public keys. + pub verifiers_pub_keys_str: Vec, + /// The bridge address. + pub bridge_address_str: String, + /// The minimum required signers. + pub required_signers: usize, + /// The role. + pub verifier_mode: VerifierMode, +} + +impl ViaVerifierConfig { + pub fn polling_interval(&self) -> Duration { + Duration::from_millis(self.poll_interval) + } + pub fn bind_addr(&self) -> SocketAddr { + SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), self.port) + } +} + +impl ViaVerifierConfig { + pub fn for_tests() -> Self { + Self { + private_key: "private".to_string(), + poll_interval: 1000, + port: 0, + url: "".to_string(), + verifiers_pub_keys_str: Vec::new(), + bridge_address_str: "".to_string(), + required_signers: 2, + verifier_mode: VerifierMode::VERIFIER, + } + } +} diff --git a/core/lib/config/src/lib.rs b/core/lib/config/src/lib.rs index 8964a722e..6eed3401d 100644 --- a/core/lib/config/src/lib.rs +++ b/core/lib/config/src/lib.rs @@ -1,10 +1,10 @@ #![allow(clippy::upper_case_acronyms, clippy::derive_partial_eq_without_eq)] pub use crate::configs::{ - ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, ContractsConfig, + ActorRole, ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, ContractsConfig, DADispatcherConfig, DBConfig, EthConfig, EthWatchConfig, ExternalProofIntegrationApiConfig, GasAdjusterConfig, GenesisConfig, ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, - ViaBtcSenderConfig, ViaBtcWatchConfig, ViaCelestiaConfig, ViaGeneralConfig, + ViaBtcSenderConfig, ViaBtcWatchConfig, ViaCelestiaConfig, ViaGeneralConfig, ViaVerifierConfig, }; pub mod configs; diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index 2ec91f5be..5a33c4a74 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -854,6 +854,7 @@ impl Distribution for EncodeDist { server_url: Some(format!("localhost:{}", rng.gen::()).parse().unwrap()), server_replica_url: Some(format!("localhost:{}", rng.gen::()).parse().unwrap()), prover_url: Some(format!("localhost:{}", rng.gen::()).parse().unwrap()), + verifier_url: Some(format!("localhost:{}", rng.gen::()).parse().unwrap()), } } } diff --git a/core/lib/contracts/src/lib.rs b/core/lib/contracts/src/lib.rs index a7ef0e5b2..3298c4a90 100644 --- a/core/lib/contracts/src/lib.rs +++ b/core/lib/contracts/src/lib.rs @@ -399,6 +399,13 @@ impl BaseSystemContracts { BaseSystemContracts::load_with_bootloader(bootloader_bytecode) } + pub fn playground_bitcoin_1_0_0() -> Self { + let bootloader_bytecode = read_zbin_bytecode( + "etc/multivm_bootloaders/vm_bitcoin/playground_batch.yul/playground_batch.yul.zbin", + ); + BaseSystemContracts::load_with_bootloader(bootloader_bytecode) + } + pub fn estimate_gas_pre_virtual_blocks() -> Self { let bootloader_bytecode = read_zbin_bytecode( "etc/multivm_bootloaders/vm_1_3_2/fee_estimate.yul/fee_estimate.yul.zbin", @@ -462,6 +469,13 @@ impl BaseSystemContracts { BaseSystemContracts::load_with_bootloader(bootloader_bytecode) } + pub fn estimate_gas_bitcoin_1_0_0() -> Self { + let bootloader_bytecode = read_zbin_bytecode( + "etc/multivm_bootloaders/vm_bitcoin/fee_estimate.yul/fee_estimate.yul.zbin", + ); + BaseSystemContracts::load_with_bootloader(bootloader_bytecode) + } + pub fn hashes(&self) -> BaseSystemContractsHashes { BaseSystemContractsHashes { bootloader: self.bootloader.hash, diff --git a/core/lib/dal/.sqlx/query-2108acea7492e6704849a80b866e380dd27b78499c7cef2a267147c7328263fb.json b/core/lib/dal/.sqlx/query-2108acea7492e6704849a80b866e380dd27b78499c7cef2a267147c7328263fb.json new file mode 100644 index 000000000..666dd19ff --- /dev/null +++ b/core/lib/dal/.sqlx/query-2108acea7492e6704849a80b866e380dd27b78499c7cef2a267147c7328263fb.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number,\n blob_id,\n inclusion_data,\n sent_at\n FROM\n via_data_availability\n WHERE\n inclusion_data IS NOT NULL\n AND is_proof = FALSE\n AND l1_batch_number = $1\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "blob_id", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "inclusion_data", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "sent_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + true, + false + ] + }, + "hash": "2108acea7492e6704849a80b866e380dd27b78499c7cef2a267147c7328263fb" +} diff --git a/core/lib/dal/.sqlx/query-32a1a262d383bab472b28dedbde93c3a0a389b0c96da70be09697d71874f49f4.json b/core/lib/dal/.sqlx/query-32a1a262d383bab472b28dedbde93c3a0a389b0c96da70be09697d71874f49f4.json new file mode 100644 index 000000000..1a6e3e10c --- /dev/null +++ b/core/lib/dal/.sqlx/query-32a1a262d383bab472b28dedbde93c3a0a389b0c96da70be09697d71874f49f4.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE via_votable_transactions\n SET\n withdrawal_tx_id = $1\n WHERE\n is_finalized = TRUE\n AND is_verified = TRUE\n AND withdrawal_tx_id IS NULL\n AND l1_batch_number = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "32a1a262d383bab472b28dedbde93c3a0a389b0c96da70be09697d71874f49f4" +} diff --git a/core/lib/dal/.sqlx/query-42d4dac4050c296b9f1804dcf6b0073159cb97bf34ebf2740e3f40b863515cce.json b/core/lib/dal/.sqlx/query-42d4dac4050c296b9f1804dcf6b0073159cb97bf34ebf2740e3f40b863515cce.json new file mode 100644 index 000000000..2dedade5f --- /dev/null +++ b/core/lib/dal/.sqlx/query-42d4dac4050c296b9f1804dcf6b0073159cb97bf34ebf2740e3f40b863515cce.json @@ -0,0 +1,29 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n COUNT(*) FILTER (\n WHERE\n vote = TRUE\n ) AS ok_votes,\n COUNT(*) AS total_votes\n FROM\n via_votes\n WHERE\n l1_batch_number = $1\n AND tx_id = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "ok_votes", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "total_votes", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "42d4dac4050c296b9f1804dcf6b0073159cb97bf34ebf2740e3f40b863515cce" +} diff --git a/core/lib/dal/.sqlx/query-6851a0c44cca95d6f0f21dddc056e1dcef9dd65f5751547525cc9b0839836da5.json b/core/lib/dal/.sqlx/query-6851a0c44cca95d6f0f21dddc056e1dcef9dd65f5751547525cc9b0839836da5.json new file mode 100644 index 000000000..6e898fb47 --- /dev/null +++ b/core/lib/dal/.sqlx/query-6851a0c44cca95d6f0f21dddc056e1dcef9dd65f5751547525cc9b0839836da5.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number,\n tx_id\n FROM\n via_votable_transactions\n WHERE\n is_verified = FALSE\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "tx_id", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false + ] + }, + "hash": "6851a0c44cca95d6f0f21dddc056e1dcef9dd65f5751547525cc9b0839836da5" +} diff --git a/core/lib/dal/.sqlx/query-81fbd3f476db005b3260f6600aea3cdd314652a88cd6009a9fca9358b6aa4e59.json b/core/lib/dal/.sqlx/query-81fbd3f476db005b3260f6600aea3cdd314652a88cd6009a9fca9358b6aa4e59.json new file mode 100644 index 000000000..414285372 --- /dev/null +++ b/core/lib/dal/.sqlx/query-81fbd3f476db005b3260f6600aea3cdd314652a88cd6009a9fca9358b6aa4e59.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n MIN(l1_batch_number) as \"l1_batch_number\"\n FROM via_votable_transactions\n WHERE\n is_finalized = FALSE \n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "81fbd3f476db005b3260f6600aea3cdd314652a88cd6009a9fca9358b6aa4e59" +} diff --git a/core/lib/dal/.sqlx/query-8958c9555cb62efc73eccc2215d7b9c39534c83e3738db90e7111345df0281a2.json b/core/lib/dal/.sqlx/query-8958c9555cb62efc73eccc2215d7b9c39534c83e3738db90e7111345df0281a2.json new file mode 100644 index 000000000..4ed034e04 --- /dev/null +++ b/core/lib/dal/.sqlx/query-8958c9555cb62efc73eccc2215d7b9c39534c83e3738db90e7111345df0281a2.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n withdrawal_tx_id\n FROM via_votable_transactions\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "withdrawal_tx_id", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true + ] + }, + "hash": "8958c9555cb62efc73eccc2215d7b9c39534c83e3738db90e7111345df0281a2" +} diff --git a/core/lib/dal/.sqlx/query-939ac2a27e3844db1e4606e2338a2da09357ece059755139e698acbc050a7673.json b/core/lib/dal/.sqlx/query-939ac2a27e3844db1e4606e2338a2da09357ece059755139e698acbc050a7673.json deleted file mode 100644 index 27c16afa8..000000000 --- a/core/lib/dal/.sqlx/query-939ac2a27e3844db1e4606e2338a2da09357ece059755139e698acbc050a7673.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n COUNT(id) AS COUNT\n FROM\n via_btc_inscriptions_request_history\n WHERE\n inscription_request_id = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - null - ] - }, - "hash": "939ac2a27e3844db1e4606e2338a2da09357ece059755139e698acbc050a7673" -} diff --git a/core/lib/dal/.sqlx/query-9c5faf8349565e8eedb040d4a72d8690e72af5ae37d0059be1fa43a432ec1375.json b/core/lib/dal/.sqlx/query-9c5faf8349565e8eedb040d4a72d8690e72af5ae37d0059be1fa43a432ec1375.json new file mode 100644 index 000000000..963590678 --- /dev/null +++ b/core/lib/dal/.sqlx/query-9c5faf8349565e8eedb040d4a72d8690e72af5ae37d0059be1fa43a432ec1375.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_status,\n tx_id\n FROM\n via_votable_transactions\n WHERE\n l1_batch_number = $1\n AND is_verified = TRUE\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_status", + "type_info": "Bool" + }, + { + "ordinal": 1, + "name": "tx_id", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "9c5faf8349565e8eedb040d4a72d8690e72af5ae37d0059be1fa43a432ec1375" +} diff --git a/core/lib/dal/.sqlx/query-aaed24343bdc5ea2e69e03d8e735f7f7dc59a40e03457c442d14fa5c4d37182b.json b/core/lib/dal/.sqlx/query-aaed24343bdc5ea2e69e03d8e735f7f7dc59a40e03457c442d14fa5c4d37182b.json new file mode 100644 index 000000000..3f4dc4c6e --- /dev/null +++ b/core/lib/dal/.sqlx/query-aaed24343bdc5ea2e69e03d8e735f7f7dc59a40e03457c442d14fa5c4d37182b.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n via_votable_transactions (l1_batch_number, tx_id, da_identifier, blob_id, proof_tx_id)\n VALUES\n ($1, $2, $3, $4, $5)\n ON CONFLICT (l1_batch_number, tx_id) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Varchar", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "aaed24343bdc5ea2e69e03d8e735f7f7dc59a40e03457c442d14fa5c4d37182b" +} diff --git a/core/lib/dal/.sqlx/query-ad6d50d69c52bacd18be392d583f6d91830eac4268a83112781a6d9c521c5be9.json b/core/lib/dal/.sqlx/query-ad6d50d69c52bacd18be392d583f6d91830eac4268a83112781a6d9c521c5be9.json new file mode 100644 index 000000000..5a7d4a2de --- /dev/null +++ b/core/lib/dal/.sqlx/query-ad6d50d69c52bacd18be392d583f6d91830eac4268a83112781a6d9c521c5be9.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE via_votable_transactions\n SET\n is_finalized = TRUE,\n updated_at = NOW()\n WHERE\n l1_batch_number = $1\n AND tx_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [] + }, + "hash": "ad6d50d69c52bacd18be392d583f6d91830eac4268a83112781a6d9c521c5be9" +} diff --git a/core/lib/dal/.sqlx/query-c00edea696d125effe23b50c656970ac92a60580e047d5a43b77e56ab5f3d2b4.json b/core/lib/dal/.sqlx/query-c00edea696d125effe23b50c656970ac92a60580e047d5a43b77e56ab5f3d2b4.json new file mode 100644 index 000000000..7fff300d2 --- /dev/null +++ b/core/lib/dal/.sqlx/query-c00edea696d125effe23b50c656970ac92a60580e047d5a43b77e56ab5f3d2b4.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n is_finalized\n FROM\n via_votable_transactions\n WHERE\n l1_batch_number = $1\n AND tx_id = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "is_finalized", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [ + false + ] + }, + "hash": "c00edea696d125effe23b50c656970ac92a60580e047d5a43b77e56ab5f3d2b4" +} diff --git a/core/lib/dal/.sqlx/query-d574f696c92de6af8a597071dad510b32864e18832c7f6d4bd54c14783f54cca.json b/core/lib/dal/.sqlx/query-d574f696c92de6af8a597071dad510b32864e18832c7f6d4bd54c14783f54cca.json new file mode 100644 index 000000000..9133c893a --- /dev/null +++ b/core/lib/dal/.sqlx/query-d574f696c92de6af8a597071dad510b32864e18832c7f6d4bd54c14783f54cca.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n via_votes (l1_batch_number, tx_id, verifier_address, vote)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT (l1_batch_number, tx_id, verifier_address) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Text", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "d574f696c92de6af8a597071dad510b32864e18832c7f6d4bd54c14783f54cca" +} diff --git a/core/lib/dal/.sqlx/query-ea244d902835a929b3871ba74a5a074871ce49f2c81be801c233032ec1d046ba.json b/core/lib/dal/.sqlx/query-ea244d902835a929b3871ba74a5a074871ce49f2c81be801c233032ec1d046ba.json new file mode 100644 index 000000000..3e41688ab --- /dev/null +++ b/core/lib/dal/.sqlx/query-ea244d902835a929b3871ba74a5a074871ce49f2c81be801c233032ec1d046ba.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number,\n blob_id,\n proof_tx_id\n FROM\n via_votable_transactions\n WHERE\n is_finalized = TRUE\n AND is_verified = TRUE\n AND withdrawal_tx_id IS NULL\n ORDER BY\n l1_batch_number ASC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "blob_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "proof_tx_id", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "ea244d902835a929b3871ba74a5a074871ce49f2c81be801c233032ec1d046ba" +} diff --git a/core/lib/dal/.sqlx/query-eb4413d3d75a4a014f741d6f23486ffccdb4ea2a1495525fe76be9c1e77580c9.json b/core/lib/dal/.sqlx/query-eb4413d3d75a4a014f741d6f23486ffccdb4ea2a1495525fe76be9c1e77580c9.json new file mode 100644 index 000000000..c1fda0d3d --- /dev/null +++ b/core/lib/dal/.sqlx/query-eb4413d3d75a4a014f741d6f23486ffccdb4ea2a1495525fe76be9c1e77580c9.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n MAX(l1_batch_number) AS max_batch_number\n FROM\n via_votable_transactions\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "max_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "eb4413d3d75a4a014f741d6f23486ffccdb4ea2a1495525fe76be9c1e77580c9" +} diff --git a/core/lib/dal/.sqlx/query-f9bb84f6a995fd6590f96491066703fbb5f734284c380b316be30fd38aac8675.json b/core/lib/dal/.sqlx/query-f9bb84f6a995fd6590f96491066703fbb5f734284c380b316be30fd38aac8675.json new file mode 100644 index 000000000..1f2be9fbe --- /dev/null +++ b/core/lib/dal/.sqlx/query-f9bb84f6a995fd6590f96491066703fbb5f734284c380b316be30fd38aac8675.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE via_votable_transactions\n SET\n is_verified = TRUE,\n l1_batch_status = $3,\n updated_at = NOW()\n WHERE\n l1_batch_number = $1\n AND tx_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "f9bb84f6a995fd6590f96491066703fbb5f734284c380b316be30fd38aac8675" +} diff --git a/core/lib/dal/migrations/20241209150000_create_via_votes.down.sql b/core/lib/dal/migrations/20241209150000_create_via_votes.down.sql new file mode 100644 index 000000000..9bce83ce9 --- /dev/null +++ b/core/lib/dal/migrations/20241209150000_create_via_votes.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS via_votes; +DROP TABLE IF EXISTS via_votable_transactions; diff --git a/core/lib/dal/migrations/20241209150000_create_via_votes.up.sql b/core/lib/dal/migrations/20241209150000_create_via_votes.up.sql new file mode 100644 index 000000000..1fdfa1484 --- /dev/null +++ b/core/lib/dal/migrations/20241209150000_create_via_votes.up.sql @@ -0,0 +1,25 @@ +CREATE TABLE IF NOT EXISTS via_votable_transactions ( + l1_batch_number BIGINT UNIQUE NOT NULL, + tx_id BYTEA, + da_identifier VARCHAR NOT NULL, + blob_id VARCHAR NOT NULL, + proof_tx_id VARCHAR NOT NULL, + withdrawal_tx_id BYTEA, + is_finalized BOOLEAN NOT NULL DEFAULT FALSE, + is_verified BOOLEAN NOT NULL DEFAULT FALSE, + l1_batch_status BOOLEAN NOT NULL DEFAULT FALSE, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP NOT NULL DEFAULT NOW(), + PRIMARY KEY (l1_batch_number, tx_id) +); + +CREATE TABLE IF NOT EXISTS via_votes ( + l1_batch_number BIGINT NOT NULL, + tx_id BYTEA NOT NULL, + verifier_address TEXT NOT NULL, + vote BOOLEAN NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + PRIMARY KEY (l1_batch_number, tx_id, verifier_address), + FOREIGN KEY (l1_batch_number, tx_id) REFERENCES via_votable_transactions (l1_batch_number, tx_id) ON DELETE CASCADE +); + diff --git a/core/lib/dal/src/btc_sender_dal.rs b/core/lib/dal/src/btc_sender_dal.rs index 9a37b387f..d22af890f 100644 --- a/core/lib/dal/src/btc_sender_dal.rs +++ b/core/lib/dal/src/btc_sender_dal.rs @@ -1,10 +1,7 @@ use anyhow::Context; use bitcoin::hash_types::Txid; use zksync_db_connection::connection::Connection; -use zksync_types::{ - btc_inscription_operations::ViaBtcInscriptionRequestType, - btc_sender::{ViaBtcInscriptionRequest, ViaBtcInscriptionRequestHistory}, -}; +use zksync_types::btc_sender::{ViaBtcInscriptionRequest, ViaBtcInscriptionRequestHistory}; use crate::{ models::storage_btc_inscription_request::{ @@ -21,7 +18,7 @@ pub struct ViaBtcSenderDal<'a, 'c> { impl ViaBtcSenderDal<'_, '_> { pub async fn via_save_btc_inscriptions_request( &mut self, - inscription_request_type: ViaBtcInscriptionRequestType, + inscription_request_type: String, inscription_message: Vec, predicted_fee: u64, ) -> sqlx::Result { @@ -35,7 +32,7 @@ impl ViaBtcSenderDal<'_, '_> { RETURNING * "#, - inscription_request_type.to_string(), + inscription_request_type, inscription_message, predicted_fee as i64, ) @@ -174,28 +171,6 @@ impl ViaBtcSenderDal<'_, '_> { Ok(inscription_request_history.map(ViaBtcInscriptionRequestHistory::from)) } - pub async fn get_total_inscription_request_history( - &mut self, - inscription_request_id: i64, - ) -> sqlx::Result { - let total = sqlx::query!( - r#" - SELECT - COUNT(id) AS COUNT - FROM - via_btc_inscriptions_request_history - WHERE - inscription_request_id = $1 - "#, - inscription_request_id - ) - .fetch_one(self.storage.conn()) - .await?; - - // Return the count or 0 if no records were found - Ok(total.count.unwrap_or(0)) - } - pub async fn confirm_inscription( &mut self, inscriptions_request_id: i64, diff --git a/core/lib/dal/src/lib.rs b/core/lib/dal/src/lib.rs index 5fe900c0c..7d914fa53 100644 --- a/core/lib/dal/src/lib.rs +++ b/core/lib/dal/src/lib.rs @@ -27,7 +27,7 @@ use crate::{ tokens_web3_dal::TokensWeb3Dal, transactions_dal::TransactionsDal, transactions_web3_dal::TransactionsWeb3Dal, via_blocks_dal::ViaBlocksDal, via_data_availability_dal::ViaDataAvailabilityDal, via_transactions_dal::ViaTransactionsDal, - vm_runner_dal::VmRunnerDal, + via_votes_dal::ViaVotesDal, vm_runner_dal::VmRunnerDal, }; pub mod base_token_dal; @@ -66,6 +66,7 @@ pub mod transactions_web3_dal; pub mod via_blocks_dal; pub mod via_data_availability_dal; pub mod via_transactions_dal; +pub mod via_votes_dal; pub mod vm_runner_dal; #[cfg(test)] @@ -86,6 +87,8 @@ where fn via_transactions_dal(&mut self) -> ViaTransactionsDal<'_, 'a>; + fn via_votes_dal(&mut self) -> ViaVotesDal<'_, 'a>; + fn transactions_web3_dal(&mut self) -> TransactionsWeb3Dal<'_, 'a>; fn tee_verifier_input_producer_dal(&mut self) -> TeeVerifierInputProducerDal<'_, 'a>; @@ -165,6 +168,10 @@ impl<'a> CoreDal<'a> for Connection<'a, Core> { ViaTransactionsDal { storage: self } } + fn via_votes_dal(&mut self) -> ViaVotesDal<'_, 'a> { + ViaVotesDal { storage: self } + } + fn transactions_web3_dal(&mut self) -> TransactionsWeb3Dal<'_, 'a> { TransactionsWeb3Dal { storage: self } } diff --git a/core/lib/dal/src/models/storage_btc_block.rs b/core/lib/dal/src/models/storage_btc_block.rs index 5726e7e3f..525e06384 100644 --- a/core/lib/dal/src/models/storage_btc_block.rs +++ b/core/lib/dal/src/models/storage_btc_block.rs @@ -21,7 +21,7 @@ impl From for ViaBtcL1BlockDetails { hash: details.hash, commit_tx_id: Txid::from_str(&details.commit_tx_id.clone().unwrap_or_default()) .unwrap_or(Txid::all_zeros()), - reveal_tx_id: Txid::from_str(&details.commit_tx_id.clone().unwrap_or_default()) + reveal_tx_id: Txid::from_str(&details.reveal_tx_id.clone().unwrap_or_default()) .unwrap_or(Txid::all_zeros()), blob_id: details.blob_id.unwrap_or_default(), } diff --git a/core/lib/dal/src/models/storage_btc_inscription_request.rs b/core/lib/dal/src/models/storage_btc_inscription_request.rs index e083c9870..baf0d71f3 100644 --- a/core/lib/dal/src/models/storage_btc_inscription_request.rs +++ b/core/lib/dal/src/models/storage_btc_inscription_request.rs @@ -2,10 +2,7 @@ use std::str::FromStr; use bitcoin::Txid; use sqlx::types::chrono::NaiveDateTime; -use zksync_types::{ - btc_inscription_operations::ViaBtcInscriptionRequestType, - btc_sender::{ViaBtcInscriptionRequest, ViaBtcInscriptionRequestHistory}, -}; +use zksync_types::btc_sender::{ViaBtcInscriptionRequest, ViaBtcInscriptionRequestHistory}; #[derive(Debug, Clone)] pub struct ViaStorageBtcInscriptionRequest { @@ -37,7 +34,7 @@ impl From for ViaBtcInscriptionRequest { fn from(req: ViaStorageBtcInscriptionRequest) -> ViaBtcInscriptionRequest { ViaBtcInscriptionRequest { id: req.id, - request_type: ViaBtcInscriptionRequestType::from_str(&req.request_type).unwrap(), + request_type: req.request_type, inscription_message: req.inscription_message, confirmed_inscriptions_request_history_id: req .confirmed_inscriptions_request_history_id, diff --git a/core/lib/dal/src/via_data_availability_dal.rs b/core/lib/dal/src/via_data_availability_dal.rs index c297ca18a..f806981b1 100644 --- a/core/lib/dal/src/via_data_availability_dal.rs +++ b/core/lib/dal/src/via_data_availability_dal.rs @@ -453,4 +453,35 @@ impl ViaDataAvailabilityDal<'_, '_> { }) .collect()) } + + /// Returns the data availability blob of the block. + pub async fn get_da_blob( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> DalResult> { + let result = sqlx::query_as!( + StorageDABlob, + r#" + SELECT + l1_batch_number, + blob_id, + inclusion_data, + sent_at + FROM + via_data_availability + WHERE + inclusion_data IS NOT NULL + AND is_proof = FALSE + AND l1_batch_number = $1 + LIMIT + 1 + "#, + i64::from(l1_batch_number.0) + ) + .instrument("get_da_blob") + .fetch_optional(self.storage) + .await?; + + Ok(result.map(DataAvailabilityBlob::from)) + } } diff --git a/core/lib/dal/src/via_votes_dal.rs b/core/lib/dal/src/via_votes_dal.rs new file mode 100644 index 000000000..2c8f1cdc0 --- /dev/null +++ b/core/lib/dal/src/via_votes_dal.rs @@ -0,0 +1,360 @@ +use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; +use zksync_types::H256; + +use crate::Core; + +pub struct ViaVotesDal<'c, 'a> { + pub(crate) storage: &'c mut Connection<'a, Core>, +} + +impl ViaVotesDal<'_, '_> { + /// Inserts a new row in `via_votable_transactions`. + /// Notice we haven’t changed this since the PK is still (l1_batch_number, tx_id). + pub async fn insert_votable_transaction( + &mut self, + l1_batch_number: u32, + tx_id: H256, + da_identifier: String, + blob_id: String, + proof_tx_id: String, + ) -> DalResult<()> { + sqlx::query!( + r#" + INSERT INTO + via_votable_transactions (l1_batch_number, tx_id, da_identifier, blob_id, proof_tx_id) + VALUES + ($1, $2, $3, $4, $5) + ON CONFLICT (l1_batch_number, tx_id) DO NOTHING + "#, + i64::from(l1_batch_number), + tx_id.as_bytes(), + da_identifier, + blob_id, + proof_tx_id + ) + .instrument("insert_votable_transaction") + .fetch_optional(self.storage) + .await?; + + Ok(()) + } + + /// Inserts a new vote row in `via_votes`. + /// Now requires `l1_batch_number` as part of the primary key / FK. + pub async fn insert_vote( + &mut self, + l1_batch_number: u32, + tx_id: H256, + verifier_address: &str, + vote: bool, + ) -> DalResult<()> { + sqlx::query!( + r#" + INSERT INTO + via_votes (l1_batch_number, tx_id, verifier_address, vote) + VALUES + ($1, $2, $3, $4) + ON CONFLICT (l1_batch_number, tx_id, verifier_address) DO NOTHING + "#, + l1_batch_number as i32, + tx_id.as_bytes(), + verifier_address, + vote + ) + .instrument("insert_vote") + .fetch_optional(self.storage) + .await?; + + Ok(()) + } + + /// Returns (ok_votes, total_votes) for the given `(l1_batch_number, tx_id)`. + /// Must also filter on `l1_batch_number`. + pub async fn get_vote_count( + &mut self, + l1_batch_number: u32, + tx_id: H256, + ) -> DalResult<(i64, i64)> { + let row = sqlx::query!( + r#" + SELECT + COUNT(*) FILTER ( + WHERE + vote = TRUE + ) AS ok_votes, + COUNT(*) AS total_votes + FROM + via_votes + WHERE + l1_batch_number = $1 + AND tx_id = $2 + "#, + l1_batch_number as i32, + tx_id.as_bytes() + ) + .instrument("get_vote_count") + .fetch_one(self.storage) + .await?; + + let ok_votes = row.ok_votes.unwrap_or(0); + let total_votes = row.total_votes.unwrap_or(0); + Ok((ok_votes, total_votes)) + } + + /// Marks the transaction as finalized if #ok_votes / #total_votes >= threshold. + /// Must use `(l1_batch_number, tx_id)` in both vote counting and the UPDATE statement. + pub async fn finalize_transaction_if_needed( + &mut self, + l1_batch_number: u32, + tx_id: H256, + threshold: f64, + number_of_verifiers: usize, + ) -> DalResult { + let row = sqlx::query!( + r#" + SELECT + is_finalized + FROM + via_votable_transactions + WHERE + l1_batch_number = $1 + AND tx_id = $2 + "#, + i64::from(l1_batch_number), + tx_id.as_bytes() + ) + .instrument("check_if_already_finalized") + .fetch_one(self.storage) + .await?; + + if row.is_finalized { + return Ok(false); + } + + let (ok_votes, _total_votes) = self.get_vote_count(l1_batch_number, tx_id).await?; + let is_threshold_reached = (ok_votes as f64) / (number_of_verifiers as f64) >= threshold; + + if is_threshold_reached { + sqlx::query!( + r#" + UPDATE via_votable_transactions + SET + is_finalized = TRUE, + updated_at = NOW() + WHERE + l1_batch_number = $1 + AND tx_id = $2 + "#, + i64::from(l1_batch_number), + tx_id.as_bytes() + ) + .instrument("finalize_transaction_if_needed") + .execute(self.storage) + .await?; + } + + Ok(is_threshold_reached) + } + + pub async fn get_last_inserted_block(&mut self) -> DalResult> { + let row = sqlx::query!( + r#" + SELECT + MAX(l1_batch_number) AS max_batch_number + FROM + via_votable_transactions + "# + ) + .instrument("get_last_inserted_block") + .fetch_one(self.storage) + .await?; + + Ok(row.max_batch_number.map(|n| n as u32)) + } + + pub async fn verify_votable_transaction( + &mut self, + l1_batch_number: u32, + tx_id: H256, + l1_batch_status: bool, + ) -> DalResult<()> { + sqlx::query!( + r#" + UPDATE via_votable_transactions + SET + is_verified = TRUE, + l1_batch_status = $3, + updated_at = NOW() + WHERE + l1_batch_number = $1 + AND tx_id = $2 + "#, + i64::from(l1_batch_number), + tx_id.as_bytes(), + l1_batch_status + ) + .instrument("verify_transaction") + .execute(self.storage) + .await?; + Ok(()) + } + + pub async fn get_first_non_finalized_block(&mut self) -> DalResult> { + let l1_block_number = sqlx::query_scalar!( + r#" + SELECT + MIN(l1_batch_number) as "l1_batch_number" + FROM via_votable_transactions + WHERE + is_finalized = FALSE + "#, + ) + .instrument("get_last_block_finilized") + .fetch_optional(self.storage) + .await? + .flatten(); + + Ok(l1_block_number) + } + + pub async fn get_verifier_vote_status( + &mut self, + block_number: i64, + ) -> DalResult)>> { + let row = sqlx::query!( + r#" + SELECT + l1_batch_status, + tx_id + FROM + via_votable_transactions + WHERE + l1_batch_number = $1 + AND is_verified = TRUE + LIMIT + 1 + "#, + block_number + ) + .instrument("get_verifier_vote_status") + .fetch_optional(self.storage) + .await?; + + let result = row.map(|r| { + let l1_batch_status = r.l1_batch_status; + let tx_id = r.tx_id; + (l1_batch_status, tx_id) + }); + + Ok(result) + } + + /// Retrieve the first not executed block. (Similar to `get_first_not_finilized_block`, just with `is_verified = FALSE`). + pub async fn get_first_not_verified_block(&mut self) -> DalResult)>> { + let row = sqlx::query!( + r#" + SELECT + l1_batch_number, + tx_id + FROM + via_votable_transactions + WHERE + is_verified = FALSE + ORDER BY + l1_batch_number ASC + LIMIT + 1 + "#, + ) + .instrument("get_first_not_executed_block") + .fetch_optional(self.storage) + .await?; + + let result = row.map(|r| { + let l1_batch_number = r.l1_batch_number; + let tx_id = r.tx_id; + (l1_batch_number, tx_id) + }); + + Ok(result) + } + pub async fn get_finalized_blocks_and_non_processed_withdrawals( + &mut self, + ) -> DalResult> { + let rows = sqlx::query!( + r#" + SELECT + l1_batch_number, + blob_id, + proof_tx_id + FROM + via_votable_transactions + WHERE + is_finalized = TRUE + AND is_verified = TRUE + AND withdrawal_tx_id IS NULL + ORDER BY + l1_batch_number ASC + "#, + ) + .instrument("get_finalized_blocks_and_non_processed_withdrawals") + .fetch_all(self.storage) + .await?; + + // Map the rows into a Vec<(l1_batch_number, blob_id, proof_tx_id)> + let result: Vec<(i64, String, String)> = rows + .into_iter() + .map(|r| (r.l1_batch_number, r.blob_id, r.proof_tx_id)) + .collect(); + + Ok(result) + } + + pub async fn mark_vote_transaction_as_processed_withdrawals( + &mut self, + tx_id: H256, + l1_batch_number: i64, + ) -> DalResult<()> { + sqlx::query!( + r#" + UPDATE via_votable_transactions + SET + withdrawal_tx_id = $1 + WHERE + is_finalized = TRUE + AND is_verified = TRUE + AND withdrawal_tx_id IS NULL + AND l1_batch_number = $2 + "#, + tx_id.as_bytes(), + l1_batch_number + ) + .instrument("mark_vote_transaction_as_processed_withdrawals") + .execute(self.storage) + .await?; + + Ok(()) + } + + pub async fn get_vote_transaction_withdrawal_tx( + &mut self, + l1_batch_number: i64, + ) -> DalResult>> { + let withdrawal_tx_id = sqlx::query_scalar!( + r#" + SELECT + withdrawal_tx_id + FROM via_votable_transactions + WHERE + l1_batch_number = $1 + "#, + l1_batch_number + ) + .instrument("get_vote_transaction_withdrawal_tx") + .fetch_optional(self.storage) + .await? + .flatten(); + + Ok(withdrawal_tx_id) + } +} diff --git a/core/lib/env_config/src/database.rs b/core/lib/env_config/src/database.rs index c067c96de..aa0d1f325 100644 --- a/core/lib/env_config/src/database.rs +++ b/core/lib/env_config/src/database.rs @@ -45,11 +45,17 @@ impl FromEnv for DatabaseSecrets { .map(|s| s.parse()) .transpose()? .or_else(|| server_url.clone()); + let verifier_url = env::var("DATABASE_VERIFIER_URL") + .ok() + .map(|s| s.parse()) + .transpose()? + .or_else(|| server_url.clone()); Ok(Self { server_url, prover_url, server_replica_url, + verifier_url, }) } } @@ -217,6 +223,7 @@ mod tests { DATABASE_URL=postgres://postgres:notsecurepassword@localhost/zksync_local DATABASE_REPLICA_URL=postgres://postgres:notsecurepassword@localhost/zksync_replica_local DATABASE_PROVER_URL=postgres://postgres:notsecurepassword@localhost/zksync_prover_local + DATABASE_VERIFIER_URL=postgres://postgres:notsecurepassword@localhost/via_verifier_local "#; lock.set_env(config); @@ -239,5 +246,11 @@ mod tests { .parse() .unwrap() ); + assert_eq!( + postgres_config.verifier_url().unwrap(), + "postgres://postgres:notsecurepassword@localhost/via_verifier_local" + .parse() + .unwrap() + ); } } diff --git a/core/lib/env_config/src/lib.rs b/core/lib/env_config/src/lib.rs index 1402fbb30..d91b87d9d 100644 --- a/core/lib/env_config/src/lib.rs +++ b/core/lib/env_config/src/lib.rs @@ -23,6 +23,8 @@ mod utils; mod via_btc_sender; mod via_celestia; +mod via_verifier; + mod base_token_adjuster; mod da_dispatcher; mod external_price_api_client; diff --git a/core/lib/env_config/src/via_verifier.rs b/core/lib/env_config/src/via_verifier.rs new file mode 100644 index 000000000..6ecfd3e75 --- /dev/null +++ b/core/lib/env_config/src/via_verifier.rs @@ -0,0 +1,9 @@ +use zksync_config::ViaVerifierConfig; + +use crate::{envy_load, FromEnv}; + +impl FromEnv for ViaVerifierConfig { + fn from_env() -> anyhow::Result { + envy_load("via_verifier", "VIA_VERIFIER_") + } +} diff --git a/core/lib/multivm/src/utils/mod.rs b/core/lib/multivm/src/utils/mod.rs index 5d8fba7a2..d46dbe385 100644 --- a/core/lib/multivm/src/utils/mod.rs +++ b/core/lib/multivm/src/utils/mod.rs @@ -58,6 +58,11 @@ pub fn derive_base_fee_and_gas_per_pubdata( batch_fee_input.into_pubdata_independent(), ) } + VmVersion::VmBitcoin1_0_0 => { + crate::vm_latest::utils::fee::derive_base_fee_and_gas_per_pubdata( + batch_fee_input.into_pubdata_independent(), + ) + } } } @@ -84,6 +89,7 @@ pub fn get_batch_base_fee(l1_batch_env: &L1BatchEnv, vm_version: VmVersion) -> u VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::utils::fee::get_batch_base_fee(l1_batch_env) } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::utils::fee::get_batch_base_fee(l1_batch_env), } } @@ -212,6 +218,9 @@ pub fn derive_overhead( VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::utils::overhead::derive_overhead(encoded_len) } + VmVersion::VmBitcoin1_0_0 => { + crate::vm_latest::utils::overhead::derive_overhead(encoded_len) + } } } @@ -245,6 +254,9 @@ pub fn get_bootloader_encoding_space(version: VmVersion) -> u32 { crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, ) } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::constants::get_bootloader_tx_encoding_space( + crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + ), } } @@ -267,6 +279,7 @@ pub fn get_bootloader_max_txs_in_batch(version: VmVersion) -> usize { VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::MAX_TXS_IN_BATCH } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::constants::MAX_TXS_IN_BATCH, } } @@ -290,6 +303,7 @@ pub fn gas_bootloader_batch_tip_overhead(version: VmVersion) -> u32 { VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_OVERHEAD } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_OVERHEAD, } } @@ -313,6 +327,9 @@ pub fn circuit_statistics_bootloader_batch_tip_overhead(version: VmVersion) -> u VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_CIRCUIT_STATISTICS_OVERHEAD as usize } + VmVersion::VmBitcoin1_0_0 => { + crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_CIRCUIT_STATISTICS_OVERHEAD as usize + } } } @@ -336,6 +353,9 @@ pub fn execution_metrics_bootloader_batch_tip_overhead(version: VmVersion) -> us VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_METRICS_SIZE_OVERHEAD as usize } + VmVersion::VmBitcoin1_0_0 => { + crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_METRICS_SIZE_OVERHEAD as usize + } } } @@ -360,6 +380,7 @@ pub fn get_max_gas_per_pubdata_byte(version: VmVersion) -> u64 { VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::MAX_GAS_PER_PUBDATA_BYTE } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::constants::MAX_GAS_PER_PUBDATA_BYTE, } } @@ -393,6 +414,9 @@ pub fn get_used_bootloader_memory_bytes(version: VmVersion) -> usize { crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, ) } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::constants::get_used_bootloader_memory_bytes( + crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + ), } } @@ -426,6 +450,9 @@ pub fn get_used_bootloader_memory_words(version: VmVersion) -> usize { crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, ) } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::constants::get_used_bootloader_memory_bytes( + crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + ), } } @@ -450,6 +477,7 @@ pub fn get_max_batch_gas_limit(version: VmVersion) -> u64 { VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::BATCH_GAS_LIMIT } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::constants::BATCH_GAS_LIMIT, } } @@ -476,6 +504,7 @@ pub fn get_eth_call_gas_limit(version: VmVersion) -> u64 { VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::ETH_CALL_GAS_LIMIT } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::constants::ETH_CALL_GAS_LIMIT, } } @@ -499,6 +528,7 @@ pub fn get_max_batch_base_layer_circuits(version: VmVersion) -> usize { VmVersion::Vm1_5_0SmallBootloaderMemory | VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::MAX_BASE_LAYER_CIRCUITS } + VmVersion::VmBitcoin1_0_0 => crate::vm_latest::constants::MAX_BASE_LAYER_CIRCUITS, } } diff --git a/core/lib/multivm/src/versions/vm_latest/vm.rs b/core/lib/multivm/src/versions/vm_latest/vm.rs index 1c85133e1..5d9528913 100644 --- a/core/lib/multivm/src/versions/vm_latest/vm.rs +++ b/core/lib/multivm/src/versions/vm_latest/vm.rs @@ -52,6 +52,7 @@ impl TryFrom for MultiVMSubversion { match value { VmVersion::Vm1_5_0SmallBootloaderMemory => Ok(Self::SmallBootloaderMemory), VmVersion::Vm1_5_0IncreasedBootloaderMemory => Ok(Self::IncreasedBootloaderMemory), + VmVersion::VmBitcoin1_0_0 => Ok(Self::IncreasedBootloaderMemory), _ => Err(VmVersionIsNotVm150Error), } } diff --git a/core/lib/multivm/src/vm_instance.rs b/core/lib/multivm/src/vm_instance.rs index 0e4cefd3c..4cd1d65e1 100644 --- a/core/lib/multivm/src/vm_instance.rs +++ b/core/lib/multivm/src/vm_instance.rs @@ -245,6 +245,15 @@ impl VmInstance { ); VmInstance::Vm1_5_0(vm) } + VmVersion::VmBitcoin1_0_0 => { + let vm = crate::vm_latest::Vm::new_with_subversion( + l1_batch_env, + system_env, + storage_view, + crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + ); + VmInstance::Vm1_5_0(vm) + } } } diff --git a/core/lib/protobuf_config/src/proto/config/secrets.proto b/core/lib/protobuf_config/src/proto/config/secrets.proto index b711d81d5..704afbe19 100644 --- a/core/lib/protobuf_config/src/proto/config/secrets.proto +++ b/core/lib/protobuf_config/src/proto/config/secrets.proto @@ -7,6 +7,7 @@ message DatabaseSecrets { optional string server_url = 1; // optional optional string server_replica_url = 2; // optional optional string prover_url = 3; // optional + optional string verifier_url = 4; // optional } message L1Secrets { diff --git a/core/lib/protobuf_config/src/secrets.rs b/core/lib/protobuf_config/src/secrets.rs index 7d10bef88..a6add9632 100644 --- a/core/lib/protobuf_config/src/secrets.rs +++ b/core/lib/protobuf_config/src/secrets.rs @@ -53,10 +53,17 @@ impl ProtoRepr for proto::DatabaseSecrets { .map(str::parse::) .transpose() .context("prover_url")?; + let verifier_url = self + .verifier_url + .as_deref() + .map(str::parse::) + .transpose() + .context("verifier_url")?; Ok(Self::Type { server_url, prover_url, server_replica_url, + verifier_url, }) } @@ -68,6 +75,10 @@ impl ProtoRepr for proto::DatabaseSecrets { .as_ref() .map(|a| a.expose_str().to_string()), prover_url: this.prover_url.as_ref().map(|a| a.expose_str().to_string()), + verifier_url: this + .verifier_url + .as_ref() + .map(|a| a.expose_str().to_string()), } } } diff --git a/core/lib/types/src/btc.rs b/core/lib/types/src/btc.rs deleted file mode 100644 index 0519ecba6..000000000 --- a/core/lib/types/src/btc.rs +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/core/lib/types/src/btc_sender.rs b/core/lib/types/src/btc_sender.rs index 99b2ab0e2..6dbd4cfa7 100644 --- a/core/lib/types/src/btc_sender.rs +++ b/core/lib/types/src/btc_sender.rs @@ -1,12 +1,10 @@ use bitcoin::Txid; use chrono::NaiveDateTime; -use crate::btc_inscription_operations::ViaBtcInscriptionRequestType; - #[derive(Clone)] pub struct ViaBtcInscriptionRequest { pub id: i64, - pub request_type: ViaBtcInscriptionRequestType, + pub request_type: String, pub inscription_message: Option>, pub predicted_fee: Option, pub confirmed_inscriptions_request_history_id: Option, diff --git a/core/lib/types/src/lib.rs b/core/lib/types/src/lib.rs index 0055adb10..4ca03aa0d 100644 --- a/core/lib/types/src/lib.rs +++ b/core/lib/types/src/lib.rs @@ -61,6 +61,7 @@ pub mod helpers; pub mod proto; pub mod transaction_request; pub mod utils; +pub mod via_verifier_btc_inscription_operations; /// Denotes the first byte of the special ZKsync's EIP-712-signed transaction. pub const EIP_712_TX_TYPE: u8 = 0x71; diff --git a/core/lib/types/src/via_verifier_btc_inscription_operations.rs b/core/lib/types/src/via_verifier_btc_inscription_operations.rs new file mode 100644 index 000000000..909a109d8 --- /dev/null +++ b/core/lib/types/src/via_verifier_btc_inscription_operations.rs @@ -0,0 +1,45 @@ +use std::{fmt, str::FromStr}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ViaVerifierBtcInscriptionRequestType { + VoteOnchain, +} + +impl ViaVerifierBtcInscriptionRequestType { + pub fn as_str(self) -> &'static str { + match self { + Self::VoteOnchain => "VoteOnchain", + } + } +} + +impl fmt::Display for ViaVerifierBtcInscriptionRequestType { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str(self.as_str()) + } +} + +impl From for ViaVerifierBtcInscriptionRequestType { + fn from(s: String) -> Self { + match s.as_str() { + "VoteOnchain" => ViaVerifierBtcInscriptionRequestType::VoteOnchain, + _ => panic!( + "Unexpected value for ViaVerifierBtcInscriptionRequestType: {}", + s + ), + } + } +} + +impl FromStr for ViaVerifierBtcInscriptionRequestType { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + match s { + "VoteOnchain" => Ok(Self::VoteOnchain), + _ => Err( + "Incorrect aggregated action type; expected one of `VoteOnchain`, `CommitProofOnchain`", + ), + } + } +} diff --git a/core/lib/via_btc_client/Cargo.toml b/core/lib/via_btc_client/Cargo.toml index 71ea11391..0c6a7c272 100644 --- a/core/lib/via_btc_client/Cargo.toml +++ b/core/lib/via_btc_client/Cargo.toml @@ -21,6 +21,7 @@ thiserror.workspace = true async-trait.workspace = true lazy_static.workspace = true tokio.workspace = true +futures.workspace = true bitcoin = { version = "0.32.2", features = ["serde"] } bitcoincore-rpc = "0.19.0" rand.workspace = true @@ -34,6 +35,8 @@ serde.workspace = true tracing.workspace = true tracing-subscriber.workspace = true bincode = "1.3" +musig2 = "0.2.0" + [dev-dependencies] mockall = "0.13.0" @@ -62,3 +65,11 @@ path = "examples/bootstrap.rs" [[example]] name = "verify_batch" path = "examples/verify_batch.rs" + +[[example]] +name = "fee_history" +path = "examples/fee_history.rs" + +[[example]] +name = "deposit_opreturn" +path = "examples/deposit_opreturn.rs" diff --git a/core/lib/via_btc_client/DEV.md b/core/lib/via_btc_client/DEV.md index 87a707dde..cdb285c8d 100644 --- a/core/lib/via_btc_client/DEV.md +++ b/core/lib/via_btc_client/DEV.md @@ -96,7 +96,7 @@ Votable : No | OP_PUSHBYTES_32 b"verifier_5_p2wpkh_address" | | OP_PUSHBYTES_32 b"verifier_6_p2wpkh_address" | | OP_PUSHBYTES_32 b"verifier_7_p2wpkh_address" | -| OP_PUSHBYTES_32 b"bridge_p2wpkh_mpc_address" | +| OP_PUSHBYTES_32 b"bridge_musig2_address" | | OP_PUSHBYTES_32 b"Str('bootloader_hash')" | | OP_PUSHBYTES_32 b"Str('abstract_account_hash')" | | OP_ENDIF | @@ -144,7 +144,7 @@ Votable: No (4) L1BatchDAReference -Votable: Yes +Votable: No Sender Validation: only valid sequencer |----------------------------------------------------------| | Schnorr Signature | diff --git a/core/lib/via_btc_client/examples/bootstrap.rs b/core/lib/via_btc_client/examples/bootstrap.rs index d88d88f4b..9ee8eba0b 100644 --- a/core/lib/via_btc_client/examples/bootstrap.rs +++ b/core/lib/via_btc_client/examples/bootstrap.rs @@ -10,8 +10,8 @@ use tracing::info; use via_btc_client::{ inscriber::Inscriber, types::{ - BitcoinAddress, BitcoinNetwork, InscriptionConfig, InscriptionMessage, NodeAuth, - ProposeSequencerInput, SystemBootstrappingInput, ValidatorAttestationInput, Vote, + BitcoinAddress, BitcoinNetwork, InscriptionMessage, NodeAuth, ProposeSequencerInput, + SystemBootstrappingInput, ValidatorAttestationInput, Vote, }, }; use zksync_basic_types::H256; @@ -49,9 +49,13 @@ async fn main() -> Result<()> { let rpc_password = args[4].clone(); // Regtest verifier keys + // pubkey: 03d8e2443ef58aa80fb6256bf3b94d2ecf9117f19cb17661ec60ad35fd84ff4a8b let verifier_1_private_key = "cRaUbRSn8P8cXUcg6cMZ7oTZ1wbDjktYTsbdGw62tuqqD9ttQWMm".to_string(); + // pubkey: 02043f839b8ecd9ffd79f26ec7d05750555cd0d1e0777cfc84a29b7e38e6324662 let verifier_2_private_key = "cQ4UHjdsGWFMcQ8zXcaSr7m4Kxq9x7g9EKqguTaFH7fA34mZAnqW".to_string(); - let verifier_3_private_key = "cS9UbUKKepDjthBFPBDBe5vGVjNXXygCN75kPWmNKk7HTPV8p6he".to_string(); + // pubkey: 03cf1b1c7ad2952a99e6e2d12d52437f41f867c30eceef1bf88f402296424d6eb8 + let _verifier_3_private_key = + "cS9UbUKKepDjthBFPBDBe5vGVjNXXygCN75kPWmNKk7HTPV8p6he".to_string(); let sequencer_p2wpkh_address = "bcrt1qx2lk0unukm80qmepjp49hwf9z6xnz0s73k9j56" .parse::>()?; @@ -59,9 +63,11 @@ async fn main() -> Result<()> { .parse::>()?; let verifier_2_p2wpkh_address = "bcrt1qk8mkhrmgtq24nylzyzejznfzws6d98g4kmuuh4" .parse::>()?; - let verifier_3_p2wpkh_address = "bcrt1q23lgaa90s85jvtl6dsrkvn0g949cwjkwuyzwdm" + let _verifier_3_p2wpkh_address = "bcrt1q23lgaa90s85jvtl6dsrkvn0g949cwjkwuyzwdm" .parse::>()?; - let bridge_p2wpkh_mpc_address = "bcrt1qdrzjq2mwlhrnhan94em5sl032zd95m73ud8ddw" + + // cargo run --example key_generation_setup coordinator 03d8e2443ef58aa80fb6256bf3b94d2ecf9117f19cb17661ec60ad35fd84ff4a8b 02043f839b8ecd9ffd79f26ec7d05750555cd0d1e0777cfc84a29b7e38e6324662 + let bridge_musig2_address = "bcrt1p3s7m76wp5seprjy4gdxuxrr8pjgd47q5s8lu9vefxmp0my2p4t9qh6s8kq" .parse::>()?; let mut verifier_inscribers: Vec = vec![ @@ -81,14 +87,14 @@ async fn main() -> Result<()> { network, ) .await?, - create_inscriber( - &verifier_3_private_key, - &rpc_url, - &rpc_username, - &rpc_password, - network, - ) - .await?, + // create_inscriber( + // &verifier_3_private_key, + // &rpc_url, + // &rpc_username, + // &rpc_password, + // network, + // ) + // .await?, ]; // Bootstrapping message @@ -97,17 +103,14 @@ async fn main() -> Result<()> { verifier_p2wpkh_addresses: vec![ verifier_1_p2wpkh_address, verifier_2_p2wpkh_address, - verifier_3_p2wpkh_address, + // verifier_3_p2wpkh_address, ], - bridge_p2wpkh_mpc_address, + bridge_musig2_address, bootloader_hash: H256::zero(), abstract_account_hash: H256::random(), }; let bootstrap_info = verifier_inscribers[0] - .inscribe( - InscriptionMessage::SystemBootstrapping(input), - InscriptionConfig::default(), - ) + .inscribe(InscriptionMessage::SystemBootstrapping(input)) .await?; info!( "Bootstrapping tx sent: {:?}", @@ -121,10 +124,7 @@ async fn main() -> Result<()> { sequencer_new_p2wpkh_address: sequencer_p2wpkh_address, }; let propose_info = verifier_inscribers[1] - .inscribe( - InscriptionMessage::ProposeSequencer(input), - InscriptionConfig::default(), - ) + .inscribe(InscriptionMessage::ProposeSequencer(input)) .await?; info!( "Propose sequencer tx sent: {:?}", @@ -143,10 +143,7 @@ async fn main() -> Result<()> { for (i, inscriber) in verifier_inscribers.iter_mut().enumerate() { let validator_info = inscriber - .inscribe( - InscriptionMessage::ValidatorAttestation(input.clone()), - InscriptionConfig::default(), - ) + .inscribe(InscriptionMessage::ValidatorAttestation(input.clone())) .await?; info!( "Validator {} attestation tx sent: {:?}", diff --git a/core/lib/via_btc_client/examples/deposit.rs b/core/lib/via_btc_client/examples/deposit.rs index 4be2b10eb..82a74544b 100644 --- a/core/lib/via_btc_client/examples/deposit.rs +++ b/core/lib/via_btc_client/examples/deposit.rs @@ -11,8 +11,8 @@ use tracing::info; use via_btc_client::{ inscriber::Inscriber, types::{ - BitcoinAddress, BitcoinNetwork, InscriberContext, InscriptionConfig, InscriptionMessage, - L1ToL2MessageInput, NodeAuth, Recipient, + BitcoinAddress, BitcoinNetwork, InscriberContext, InscriptionMessage, L1ToL2MessageInput, + NodeAuth, Recipient, }, }; use zksync_types::Address as EVMAddress; @@ -51,7 +51,7 @@ async fn main() -> Result<()> { let rpc_username = args[6].clone(); let rpc_password = args[7].clone(); - let bridge_p2wpkh_mpc_address = "bcrt1qdrzjq2mwlhrnhan94em5sl032zd95m73ud8ddw" + let bridge_musig2_address = "bcrt1p3s7m76wp5seprjy4gdxuxrr8pjgd47q5s8lu9vefxmp0my2p4t9qh6s8kq" .parse::>()? .require_network(network)?; @@ -85,9 +85,8 @@ async fn main() -> Result<()> { let deposit_info = inscriber .inscribe_with_recipient( InscriptionMessage::L1ToL2Message(input), - InscriptionConfig::default(), Some(Recipient { - address: bridge_p2wpkh_mpc_address, + address: bridge_musig2_address, amount: Amount::from_btc(amount)?, }), ) diff --git a/core/lib/via_btc_client/examples/deposit_opreturn.rs b/core/lib/via_btc_client/examples/deposit_opreturn.rs new file mode 100644 index 000000000..23c42a450 --- /dev/null +++ b/core/lib/via_btc_client/examples/deposit_opreturn.rs @@ -0,0 +1,158 @@ +use std::{env, str::FromStr}; + +use anyhow::Result; +use bitcoin::{ + absolute, + address::NetworkUnchecked, + consensus::encode::serialize_hex, + secp256k1::{Message, Secp256k1}, + sighash::{EcdsaSighashType, SighashCache}, + transaction, Address, Amount, CompressedPublicKey, PrivateKey, ScriptBuf, Sequence, + Transaction, TxIn, TxOut, Witness, +}; +use tracing::info; +use via_btc_client::{ + client::BitcoinClient, + traits::BitcoinOps, + types::{BitcoinAddress, NodeAuth}, +}; +use zksync_types::Address as EVMAddress; + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::INFO) + .init(); + + let secp = Secp256k1::new(); + + let args: Vec = env::args().collect(); + let amount = Amount::from_btc(args[1].parse::()?)?; + let fees = Amount::from_btc(0.0001)?; + let receiver_l2_address = EVMAddress::from_str(&args[2])?; + info!( + "Depositing {} BTC to receiver L2 address {}", + amount, receiver_l2_address + ); + + let depositor_private_key = args[3].clone(); + info!( + "Depositor L1 private key: {}...{}", + &depositor_private_key[..4], + &depositor_private_key[depositor_private_key.len() - 4..] + ); + + let network: bitcoin::Network = args[4].parse().expect("Invalid network value"); + let rpc_url = args[5].clone(); + let rpc_username = args[6].clone(); + let rpc_password = args[7].clone(); + + let private_key = + PrivateKey::from_wif(&depositor_private_key).map_err(|e| anyhow::anyhow!(e.to_string()))?; + let pk = private_key.inner.public_key(&secp); + let compressed_pk = CompressedPublicKey::from_private_key(&secp, &private_key) + .map_err(|e| anyhow::anyhow!(e.to_string()))?; + let address = Address::p2wpkh(&compressed_pk, network); + + let bridge_musig2_address = "bcrt1p3s7m76wp5seprjy4gdxuxrr8pjgd47q5s8lu9vefxmp0my2p4t9qh6s8kq" + .parse::>()? + .require_network(network)?; + + let client = BitcoinClient::new( + &rpc_url, + network, + NodeAuth::UserPass(rpc_username, rpc_password), + )?; + + // Fetch UTXOs available at our address. + let all_utxos = client.fetch_utxos(&address).await?; + + // Select only the UTXOs needed to cover the total amount (amount + fees) + let total_needed = amount + fees; + let mut selected_utxos = Vec::new(); + let mut input_amount = Amount::from_sat(0); + for (outpoint, txout) in all_utxos.into_iter() { + selected_utxos.push((outpoint, txout)); + input_amount += selected_utxos.last().unwrap().1.value; + if input_amount >= total_needed { + break; + } + } + + if input_amount < total_needed { + return Err(anyhow::anyhow!("Insufficient funds")); + } + + // Create transaction inputs from the selected UTXOs. + let tx_inputs: Vec = selected_utxos + .iter() + .map(|(outpoint, _)| TxIn { + previous_output: *outpoint, + script_sig: ScriptBuf::new(), + sequence: Sequence::ENABLE_RBF_NO_LOCKTIME, + witness: Witness::new(), + }) + .collect(); + + // Create transaction outputs. + let mut outputs = Vec::new(); + // Output to bridge address. + outputs.push(TxOut { + value: amount, + script_pubkey: bridge_musig2_address.script_pubkey(), + }); + // OP_RETURN output with L2 address. + outputs.push(TxOut { + value: Amount::from_sat(0), + script_pubkey: ScriptBuf::new_op_return(receiver_l2_address.to_fixed_bytes()), + }); + // Change output (if any). + let change_amount = input_amount - total_needed; + if change_amount > Amount::from_sat(0) { + outputs.push(TxOut { + value: change_amount, + script_pubkey: address.script_pubkey(), + }); + } + + let mut tx = Transaction { + version: transaction::Version::TWO, + lock_time: absolute::LockTime::ZERO, + input: tx_inputs, + output: outputs, + }; + + let sighash_type = EcdsaSighashType::All; + let mut cache = SighashCache::new(&mut tx); + for (i, (_, utxo)) in selected_utxos.iter().enumerate() { + let sighash = cache + .p2wpkh_signature_hash(i, &utxo.script_pubkey, utxo.value, sighash_type) + .map_err(|e| anyhow::anyhow!(e.to_string()))?; + + let msg = Message::from(sighash); + let signature = secp.sign_ecdsa(&msg, &private_key.inner); + + // Create a Bitcoin ECDSA signature with sighash type + let signature = bitcoin::ecdsa::Signature { + signature, + sighash_type, + }; + + // Set the witness using p2wpkh helper + cache + .witness_mut(i) + .ok_or_else(|| anyhow::anyhow!("Failed to get witness")) + .map(|witness| *witness = Witness::p2wpkh(&signature, &pk))?; + } + + let tx = cache.into_transaction(); + // -------------------------------- + + // Broadcast transaction + let tx_hex = serialize_hex(&tx); + let txid = client.broadcast_signed_transaction(&tx_hex).await?; + + info!("Transaction broadcasted with txid: {}", txid); + + Ok(()) +} diff --git a/core/lib/via_btc_client/examples/fee_history.rs b/core/lib/via_btc_client/examples/fee_history.rs new file mode 100644 index 000000000..fbbc6acdd --- /dev/null +++ b/core/lib/via_btc_client/examples/fee_history.rs @@ -0,0 +1,49 @@ +use std::{env, str::FromStr}; + +use anyhow::{Context, Result}; +use tracing::info; +use via_btc_client::{ + inscriber::Inscriber, + types::{BitcoinNetwork, NodeAuth}, +}; + +const RPC_URL: &str = "http://0.0.0.0:18443"; +const RPC_USERNAME: &str = "rpcuser"; +const RPC_PASSWORD: &str = "rpcpassword"; +const NETWORK: BitcoinNetwork = BitcoinNetwork::Regtest; +const PK: &str = "cRaUbRSn8P8cXUcg6cMZ7oTZ1wbDjktYTsbdGw62tuqqD9ttQWMm"; + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::INFO) + .init(); + + let args: Vec = env::args().collect(); + let number_blocks = usize::from_str(&args[1].to_string())?; + + let inscriber = Inscriber::new( + RPC_URL, + NETWORK, + NodeAuth::UserPass(RPC_USERNAME.to_string(), RPC_PASSWORD.to_string()), + PK, + None, + ) + .await + .context("Failed to Inscriber")?; + + let client = inscriber.get_client().await; + + let to_block = client.fetch_block_height().await? as usize; + let from_block = to_block - number_blocks; + info!( + "Fetch blocks fee history from block {} to {}", + from_block, to_block + ); + + let fee_history = client.get_fee_history(from_block, to_block).await?; + + info!("Fee history {:?}", fee_history); + + Ok(()) +} diff --git a/core/lib/via_btc_client/examples/inscriber.rs b/core/lib/via_btc_client/examples/inscriber.rs index e147541e1..647ac1b71 100644 --- a/core/lib/via_btc_client/examples/inscriber.rs +++ b/core/lib/via_btc_client/examples/inscriber.rs @@ -1,7 +1,7 @@ use anyhow::{Context, Result}; use via_btc_client::{ inscriber::Inscriber, - types::{self as inscribe_types, BitcoinNetwork, InscriptionConfig, NodeAuth}, + types::{self as inscribe_types, BitcoinNetwork, NodeAuth}, }; #[tokio::main] @@ -40,10 +40,9 @@ async fn main() -> Result<()> { }; let inscribe_info = inscriber_instance - .inscribe( - inscribe_types::InscriptionMessage::L1BatchDAReference(l1_da_batch_ref), - InscriptionConfig::default(), - ) + .inscribe(inscribe_types::InscriptionMessage::L1BatchDAReference( + l1_da_batch_ref, + )) .await .context("Failed to inscribe L1BatchDAReference")?; @@ -58,10 +57,9 @@ async fn main() -> Result<()> { }; let _da_proof_ref_reveal_txid = inscriber_instance - .inscribe( - inscribe_types::InscriptionMessage::ProofDAReference(l1_da_proof_ref), - InscriptionConfig::default(), - ) + .inscribe(inscribe_types::InscriptionMessage::ProofDAReference( + l1_da_proof_ref, + )) .await .context("Failed to inscribe ProofDAReference")?; diff --git a/core/lib/via_btc_client/examples/verify_batch.rs b/core/lib/via_btc_client/examples/verify_batch.rs index 422863a89..fb984ca91 100644 --- a/core/lib/via_btc_client/examples/verify_batch.rs +++ b/core/lib/via_btc_client/examples/verify_batch.rs @@ -5,10 +5,7 @@ use bitcoin::Txid; use tracing::info; use via_btc_client::{ inscriber::Inscriber, - types::{ - BitcoinNetwork, InscriptionConfig, InscriptionMessage, NodeAuth, ValidatorAttestationInput, - Vote, - }, + types::{BitcoinNetwork, InscriptionMessage, NodeAuth, ValidatorAttestationInput, Vote}, }; const RPC_URL: &str = "http://0.0.0.0:18443"; @@ -63,10 +60,7 @@ async fn main() -> Result<()> { for (i, inscriber) in verifier_inscribers.iter_mut().enumerate() { let validator_info = inscriber - .inscribe( - InscriptionMessage::ValidatorAttestation(input.clone()), - InscriptionConfig::default(), - ) + .inscribe(InscriptionMessage::ValidatorAttestation(input.clone())) .await?; info!( "Validator {} attestation tx sent: {:?}", diff --git a/core/lib/via_btc_client/src/client/mod.rs b/core/lib/via_btc_client/src/client/mod.rs index f4d512eef..b6bc24e5f 100644 --- a/core/lib/via_btc_client/src/client/mod.rs +++ b/core/lib/via_btc_client/src/client/mod.rs @@ -2,7 +2,8 @@ use std::sync::Arc; use async_trait::async_trait; use bitcoin::{Address, Block, BlockHash, Network, OutPoint, Transaction, TxOut, Txid}; -use bitcoincore_rpc::json::EstimateMode; +use bitcoincore_rpc::json::{EstimateMode, GetBlockStatsResult}; +use futures::future::join_all; use tracing::{debug, error, instrument}; mod rpc_client; @@ -156,6 +157,42 @@ impl BitcoinOps for BitcoinClient { debug!("Fetching block by hash"); self.rpc.get_block_by_hash(block_hash).await } + + #[instrument(skip(self), target = "bitcoin_client")] + async fn get_block_stats(&self, height: u64) -> BitcoinClientResult { + debug!("Fetching block by hash"); + self.rpc.get_block_stats(height).await + } + + /// Retrieve the "fee_history" for the Bitcoin blockchain between provided blocks 'from_block_height' and 'to_block_height'. + #[instrument(skip(self), target = "bitcoin_client")] + async fn get_fee_history( + &self, + from_block_height: usize, + to_block_height: usize, + ) -> BitcoinClientResult> { + debug!("Fetching blocks fee history"); + + let mut fetch_blocks_futures = Vec::new(); + for block_height in from_block_height..to_block_height { + fetch_blocks_futures.push(self.get_block_stats(block_height as u64)); + } + + let blocks = join_all(fetch_blocks_futures).await; + let mut fee_history: Vec = Vec::new(); + + for block_result in blocks { + match block_result { + Ok(block) => { + fee_history.push(std::cmp::max(block.min_fee_rate.to_sat(), 1)); + } + Err(err) => { + return BitcoinClientResult::Err(err.clone()); + } + } + } + Ok(fee_history) + } } impl Clone for BitcoinClient { @@ -198,6 +235,7 @@ mod tests { async fn get_raw_transaction_info(&self, txid: &Txid) -> BitcoinClientResult; async fn estimate_smart_fee(&self, conf_target: u16, estimate_mode: Option) -> BitcoinClientResult; async fn get_blockchain_info(&self) -> BitcoinRpcResult; + async fn get_block_stats(&self, height: u64) -> BitcoinClientResult; } } diff --git a/core/lib/via_btc_client/src/client/rpc_client.rs b/core/lib/via_btc_client/src/client/rpc_client.rs index 517cb7f7a..8f9ee7d9f 100644 --- a/core/lib/via_btc_client/src/client/rpc_client.rs +++ b/core/lib/via_btc_client/src/client/rpc_client.rs @@ -4,7 +4,9 @@ use async_trait::async_trait; use bitcoin::{Address, Block, BlockHash, OutPoint, Transaction, Txid}; use bitcoincore_rpc::{ bitcoincore_rpc_json::EstimateMode, - json::{EstimateSmartFeeResult, GetBlockchainInfoResult, ScanTxOutRequest}, + json::{ + EstimateSmartFeeResult, GetBlockStatsResult, GetBlockchainInfoResult, ScanTxOutRequest, + }, Client, RpcApi, }; use tracing::{debug, instrument}; @@ -221,6 +223,15 @@ impl BitcoinRpc for BitcoinRpcClient { }) .await } + + #[instrument(skip(self), target = "bitcoin_client::rpc_client")] + async fn get_block_stats(&self, height: u64) -> BitcoinRpcResult { + Self::retry_rpc(|| { + debug!("Getting block stats"); + self.client.get_block_stats(height).map_err(|e| e.into()) + }) + .await + } } impl Clone for BitcoinRpcClient { diff --git a/core/lib/via_btc_client/src/indexer/mod.rs b/core/lib/via_btc_client/src/indexer/mod.rs index 10cbf6d66..f05cf418f 100644 --- a/core/lib/via_btc_client/src/indexer/mod.rs +++ b/core/lib/via_btc_client/src/indexer/mod.rs @@ -1,12 +1,12 @@ use std::{collections::HashMap, sync::Arc}; -use bitcoin::{Address, BlockHash, Network, Txid}; +use bitcoin::{Address, Amount, BlockHash, Network, Transaction as BitcoinTransaction, Txid}; use bitcoincore_rpc::Auth; use tracing::{debug, error, info, instrument, warn}; mod parser; -pub use parser::get_eth_address; -use parser::MessageParser; +pub use parser::{get_eth_address, MessageParser}; +use zksync_basic_types::L1BatchNumber; use zksync_types::H256; use crate::{ @@ -94,7 +94,7 @@ impl BitcoinInscriptionIndexer { for txid in bootstrap_txids { debug!("Processing bootstrap transaction: {}", txid); let tx = client.get_transaction(&txid).await?; - let messages = parser.parse_transaction(&tx, 0); + let messages = parser.parse_system_transaction(&tx, 0); for message in messages { Self::process_bootstrap_message(&mut bootstrap_state, message, txid, network); @@ -139,21 +139,109 @@ impl BitcoinInscriptionIndexer { } let block = self.client.fetch_block(block_height as u128).await?; + // TODO: check block header is belong to a valid chain of blocks (reorg detection and management) + // TODO: deal with malicious sequencer, verifiers from being able to make trouble by sending invalid messages / valid messages with invalid data - let messages: Vec<_> = block - .txdata - .iter() - .flat_map(|tx| self.parser.parse_transaction(tx, block_height)) - // TODO: Implement message validation - // .filter(|message| self.is_valid_message(message)) - .collect(); + let mut valid_messages = Vec::new(); + + let (system_tx, bridge_tx) = self.extract_important_transactions(&block.txdata); + + if let Some(system_tx) = system_tx { + let parsed_messages: Vec<_> = system_tx + .iter() + .flat_map(|tx| self.parser.parse_system_transaction(tx, block_height)) + .collect(); + + let messages: Vec<_> = parsed_messages + .into_iter() + .filter(|message| self.is_valid_system_message(message)) + .collect(); + + valid_messages.extend(messages); + } + + if let Some(bridge_tx) = bridge_tx { + let parsed_messages: Vec<_> = bridge_tx + .iter() + .flat_map(|tx| self.parser.parse_bridge_transaction(tx, block_height)) + .collect(); + + let messages: Vec<_> = parsed_messages + .into_iter() + .filter(|message| self.is_valid_bridge_message(message)) + .collect(); + + valid_messages.extend(messages); + } debug!( "Processed {} valid messages in block {}", - messages.len(), + valid_messages.len(), block_height ); - Ok(messages) + Ok(valid_messages) + } + + fn extract_important_transactions( + &self, + transactions: &[BitcoinTransaction], + ) -> ( + Option>, + Option>, + ) { + // We only care about the transactions that sequencer, verifiers are sending and the bridge is receiving + let system_txs: Vec = transactions + .iter() + .filter(|tx| { + tx.input.iter().any(|input| { + if let Some(btc_address) = self.parser.parse_p2wpkh(&input.witness) { + btc_address == self.sequencer_address + || self.verifier_addresses.contains(&btc_address) + } else { + false + } + }) + }) + .cloned() + .collect(); + + let bridge_txs: Vec = transactions + .iter() + .filter(|tx| { + // Check if bridge address is in outputs (deposit destination) + tx.output.iter().any(|output| { + let script_pubkey = &output.script_pubkey; + script_pubkey == &self.bridge_address.script_pubkey() + }) && !tx.output.iter().any(|output| { + if output.script_pubkey.is_op_return() { + // Extract OP_RETURN data + if let Some(op_return_data) = output.script_pubkey.as_bytes().get(2..) { + // Return true if it starts with withdrawal prefix (which will be negated) + op_return_data.starts_with(b"VIA_PROTOCOL:WITHDRAWAL") + } else { + false + } + } else { + false // Not an OP_RETURN output + } + }) + }) + .cloned() + .collect(); + + let system_txs = if !system_txs.is_empty() { + Some(system_txs) + } else { + None + }; + + let bridge_txs = if !bridge_txs.is_empty() { + Some(bridge_txs) + } else { + None + }; + + (system_txs, bridge_txs) } #[instrument(skip(self), target = "bitcoin_indexer")] @@ -184,6 +272,35 @@ impl BitcoinInscriptionIndexer { self.starting_block_number, ) } + + pub async fn get_l1_batch_number( + &mut self, + msg: &FullInscriptionMessage, + ) -> Option { + match msg { + FullInscriptionMessage::ProofDAReference(proof_msg) => self + .get_l1_batch_number_from_proof_tx_id(&proof_msg.input.l1_batch_reveal_txid) + .await + .ok(), + FullInscriptionMessage::ValidatorAttestation(va_msg) => self + .get_l1_batch_number_from_validation_tx_id(&va_msg.input.reference_txid) + .await + .ok(), + _ => None, + } + } + + pub fn get_number_of_verifiers(&self) -> usize { + self.verifier_addresses.len() + } + + pub async fn parse_transaction( + &mut self, + tx: &Txid, + ) -> BitcoinIndexerResult> { + let tx = self.client.get_transaction(tx).await?; + Ok(self.parser.parse_system_transaction(&tx, 0)) + } } impl BitcoinInscriptionIndexer { @@ -224,31 +341,40 @@ impl BitcoinInscriptionIndexer { } #[instrument(skip(self, message), target = "bitcoin_indexer")] - fn is_valid_message(&self, message: &FullInscriptionMessage) -> bool { + fn is_valid_system_message(&self, message: &FullInscriptionMessage) -> bool { match message { - FullInscriptionMessage::ProposeSequencer(m) => { - self.verifier_addresses.contains(&m.common.p2wpkh_address) - } - FullInscriptionMessage::ValidatorAttestation(m) => { - self.verifier_addresses.contains(&m.common.p2wpkh_address) - } - FullInscriptionMessage::L1BatchDAReference(m) => { - m.common.p2wpkh_address == self.sequencer_address - } - FullInscriptionMessage::ProofDAReference(m) => { - m.common.p2wpkh_address == self.sequencer_address - } - FullInscriptionMessage::L1ToL2Message(m) => { - // TODO: also check sender address - let is_valid = - m.amount > bitcoin::Amount::ZERO && self.is_valid_l1_to_l2_transfer(m); - debug!("L1ToL2Message validity: {}", is_valid); - is_valid - } + FullInscriptionMessage::ProposeSequencer(m) => m + .common + .p2wpkh_address + .as_ref() + .map_or(false, |addr| self.verifier_addresses.contains(addr)), + FullInscriptionMessage::ValidatorAttestation(m) => m + .common + .p2wpkh_address + .as_ref() + .map_or(false, |addr| self.verifier_addresses.contains(addr)), + FullInscriptionMessage::L1BatchDAReference(m) => m + .common + .p2wpkh_address + .as_ref() + .map_or(false, |addr| addr == &self.sequencer_address), + FullInscriptionMessage::ProofDAReference(m) => m + .common + .p2wpkh_address + .as_ref() + .map_or(false, |addr| addr == &self.sequencer_address), FullInscriptionMessage::SystemBootstrapping(_) => { debug!("SystemBootstrapping message is always valid"); true } + _ => false, + } + } + + fn is_valid_bridge_message(&self, message: &FullInscriptionMessage) -> bool { + match message { + FullInscriptionMessage::L1ToL2Message(m) => self.is_valid_l1_to_l2_transfer(m), + _ => false, } } @@ -264,7 +390,7 @@ impl BitcoinInscriptionIndexer { debug!("Processing SystemBootstrapping message"); // convert the verifier addresses to the correct network - // scince the bootstrap message should run on the bootstrapping phase of sequencer + // since the bootstrap message should run on the bootstrapping phase of sequencer // i consume it's ok to using unwrap let verifier_addresses = sb .input @@ -277,7 +403,7 @@ impl BitcoinInscriptionIndexer { let bridge_address = sb .input - .bridge_p2wpkh_mpc_address + .bridge_musig2_address .require_network(network) .unwrap(); state.bridge_address = Some(bridge_address); @@ -287,7 +413,12 @@ impl BitcoinInscriptionIndexer { } FullInscriptionMessage::ProposeSequencer(ps) => { debug!("Processing ProposeSequencer message"); - if state.verifier_addresses.contains(&ps.common.p2wpkh_address) { + let p2wpkh_address = ps + .common + .p2wpkh_address + .as_ref() + .expect("ProposeSequencer message must have a p2wpkh address"); + if state.verifier_addresses.contains(p2wpkh_address) { let sequencer_address = ps .input .sequencer_new_p2wpkh_address @@ -298,14 +429,19 @@ impl BitcoinInscriptionIndexer { } } FullInscriptionMessage::ValidatorAttestation(va) => { - if state.verifier_addresses.contains(&va.common.p2wpkh_address) + let p2wpkh_address = va + .common + .p2wpkh_address + .as_ref() + .expect("ValidatorAttestation message must have a p2wpkh address"); + if state.verifier_addresses.contains(p2wpkh_address) && state.proposed_sequencer.is_some() { if let Some(proposed_txid) = state.proposed_sequencer_txid { if va.input.reference_txid == proposed_txid { state .sequencer_votes - .insert(va.common.p2wpkh_address, va.input.attestation); + .insert(p2wpkh_address.clone(), va.input.attestation); } } } @@ -318,12 +454,60 @@ impl BitcoinInscriptionIndexer { #[instrument(skip(self, message), target = "bitcoin_indexer")] fn is_valid_l1_to_l2_transfer(&self, message: &L1ToL2Message) -> bool { - let is_valid = message + let is_valid_receiver = message .tx_outputs .iter() .any(|output| output.script_pubkey == self.bridge_address.script_pubkey()); - debug!("L1ToL2Message transfer validity: {}", is_valid); - is_valid + debug!("L1ToL2Message transfer validity: {}", is_valid_receiver); + + let total_bridge_amount = message + .tx_outputs + .iter() + .filter(|output| output.script_pubkey == self.bridge_address.script_pubkey()) + .map(|output| output.value) + .sum::(); + + let is_valid_amount = message.amount == total_bridge_amount; + debug!( + "Amount validation: message amount = {}, total bridge outputs = {}", + message.amount, total_bridge_amount + ); + + is_valid_receiver && is_valid_amount + } + + async fn get_l1_batch_number_from_proof_tx_id( + &mut self, + txid: &Txid, + ) -> anyhow::Result { + let a = self.client.get_transaction(txid).await?; + let b = self.parser.parse_system_transaction(&a, 0); + let msg = b + .first() + .ok_or_else(|| anyhow::anyhow!("No message found"))?; + + match msg { + FullInscriptionMessage::L1BatchDAReference(da_msg) => Ok(da_msg.input.l1_batch_index), + _ => Err(anyhow::anyhow!("Invalid message type")), + } + } + + async fn get_l1_batch_number_from_validation_tx_id( + &mut self, + txid: &Txid, + ) -> anyhow::Result { + let a = self.client.get_transaction(txid).await?; + let b = self.parser.parse_system_transaction(&a, 0); + let msg = b + .first() + .ok_or_else(|| anyhow::anyhow!("No message found"))?; + + match msg { + FullInscriptionMessage::ProofDAReference(da_msg) => Ok(self + .get_l1_batch_number_from_proof_tx_id(&da_msg.input.l1_batch_reveal_txid) + .await?), + _ => Err(anyhow::anyhow!("Invalid message type")), + } } } @@ -336,6 +520,7 @@ mod tests { block::Header, hashes::Hash, Amount, Block, OutPoint, ScriptBuf, Transaction, TxMerkleNode, TxOut, }; + use bitcoincore_rpc::json::GetBlockStatsResult; use mockall::{mock, predicate::*}; use super::*; @@ -355,6 +540,12 @@ mod tests { async fn fetch_block_height(&self) -> BitcoinClientResult; async fn get_fee_rate(&self, conf_target: u16) -> BitcoinClientResult; fn get_network(&self) -> Network; + async fn get_block_stats(&self, height: u64) -> BitcoinClientResult; + async fn get_fee_history( + &self, + from_block_height: usize, + to_block_height: usize, + ) -> BitcoinClientResult>; } } @@ -371,7 +562,7 @@ mod tests { encoded_public_key: bitcoin::script::PushBytesBuf::from([0u8; 32]), block_height: 0, tx_id: Txid::all_zeros(), - p2wpkh_address: get_test_addr(), + p2wpkh_address: Some(get_test_addr()), } } @@ -470,7 +661,7 @@ mod tests { .to_owned(), }, }); - assert!(!indexer.is_valid_message(&propose_sequencer)); + assert!(!indexer.is_valid_system_message(&propose_sequencer)); let validator_attestation = FullInscriptionMessage::ValidatorAttestation(types::ValidatorAttestation { @@ -480,7 +671,7 @@ mod tests { attestation: Vote::Ok, }, }); - assert!(!indexer.is_valid_message(&validator_attestation)); + assert!(!indexer.is_valid_system_message(&validator_attestation)); let l1_batch_da_reference = FullInscriptionMessage::L1BatchDAReference(types::L1BatchDAReference { @@ -489,7 +680,7 @@ mod tests { encoded_public_key: bitcoin::script::PushBytesBuf::from([0u8; 32]), block_height: 0, tx_id: Txid::all_zeros(), - p2wpkh_address: get_test_addr(), + p2wpkh_address: Some(get_test_addr()), }, input: types::L1BatchDAReferenceInput { l1_batch_hash: zksync_basic_types::H256::zero(), @@ -499,7 +690,7 @@ mod tests { }, }); // We didn't vote for the sequencer yet, so this message is invalid - assert!(indexer.is_valid_message(&l1_batch_da_reference)); + assert!(indexer.is_valid_system_message(&l1_batch_da_reference)); let l1_to_l2_message = FullInscriptionMessage::L1ToL2Message(L1ToL2Message { common: get_test_common_fields(), @@ -514,24 +705,20 @@ mod tests { script_pubkey: indexer.bridge_address.script_pubkey(), }], }); - assert!(indexer.is_valid_message(&l1_to_l2_message)); + assert!(indexer.is_valid_bridge_message(&l1_to_l2_message)); let system_bootstrapping = FullInscriptionMessage::SystemBootstrapping(types::SystemBootstrapping { common: get_test_common_fields(), input: types::SystemBootstrappingInput { start_block_height: 0, - bridge_p2wpkh_mpc_address: indexer - .bridge_address - .clone() - .as_unchecked() - .to_owned(), + bridge_musig2_address: indexer.bridge_address.clone().as_unchecked().to_owned(), verifier_p2wpkh_addresses: vec![], bootloader_hash: H256::zero(), abstract_account_hash: H256::zero(), }, }); - assert!(indexer.is_valid_message(&system_bootstrapping)); + assert!(indexer.is_valid_system_message(&system_bootstrapping)); } #[tokio::test] diff --git a/core/lib/via_btc_client/src/indexer/parser.rs b/core/lib/via_btc_client/src/indexer/parser.rs index fc43776d1..833634eab 100644 --- a/core/lib/via_btc_client/src/indexer/parser.rs +++ b/core/lib/via_btc_client/src/indexer/parser.rs @@ -3,7 +3,7 @@ use bitcoin::{ hashes::Hash, script::{Instruction, PushBytesBuf}, taproot::{ControlBlock, Signature as TaprootSignature}, - Address, Amount, CompressedPublicKey, Network, ScriptBuf, Transaction, Txid, Witness, + Address, Amount, CompressedPublicKey, Network, ScriptBuf, Transaction, TxOut, Txid, Witness, }; use tracing::{debug, instrument, warn}; use zksync_basic_types::H256; @@ -12,10 +12,10 @@ use zksync_types::{Address as EVMAddress, L1BatchNumber}; use crate::{ types, types::{ - CommonFields, FullInscriptionMessage as Message, L1BatchDAReference, - L1BatchDAReferenceInput, L1ToL2Message, L1ToL2MessageInput, ProofDAReference, - ProofDAReferenceInput, ProposeSequencer, ProposeSequencerInput, SystemBootstrapping, - SystemBootstrappingInput, ValidatorAttestation, ValidatorAttestationInput, Vote, + CommonFields, FullInscriptionMessage, L1BatchDAReference, L1BatchDAReferenceInput, + L1ToL2Message, L1ToL2MessageInput, ProofDAReference, ProofDAReferenceInput, + ProposeSequencer, ProposeSequencerInput, SystemBootstrapping, SystemBootstrappingInput, + ValidatorAttestation, ValidatorAttestationInput, Vote, }, }; @@ -43,7 +43,11 @@ impl MessageParser { } #[instrument(skip(self, tx), target = "bitcoin_indexer::parser")] - pub fn parse_transaction(&mut self, tx: &Transaction, block_height: u32) -> Vec { + pub fn parse_system_transaction( + &mut self, + tx: &Transaction, + block_height: u32, + ) -> Vec { // parsing btc address let mut sender_addresses: Option
= None; for input in tx.input.iter() { @@ -58,7 +62,9 @@ impl MessageParser { // parsing messages tx.input .iter() - .filter_map(|input| self.parse_input(input, tx, block_height, address.clone())) + .filter_map(|input| { + self.parse_system_input(input, tx, block_height, address.clone()) + }) .collect() } None => { @@ -67,14 +73,50 @@ impl MessageParser { } } + #[instrument(skip(self, tx), target = "bitcoin_indexer::parser")] + pub fn parse_bridge_transaction( + &mut self, + tx: &Transaction, + block_height: u32, + ) -> Vec { + let mut messages = Vec::new(); + + // Find bridge output and amount first + let bridge_output = match tx.output.iter().find(|output| { + if let Some(bridge_addr) = &self.bridge_address { + output.script_pubkey == bridge_addr.script_pubkey() + } else { + false + } + }) { + Some(output) => output, + None => return messages, // Not a bridge transaction + }; + + // Try to parse as inscription-based deposit first + if let Some(inscription_message) = self.parse_inscription_deposit(tx, block_height) { + messages.push(inscription_message); + return messages; + } + + // If not an inscription, try to parse as OP_RETURN based deposit + if let Some(op_return_message) = + self.parse_op_return_deposit(tx, block_height, bridge_output) + { + messages.push(op_return_message); + } + + messages + } + #[instrument(skip(self, input, tx), target = "bitcoin_indexer::parser")] - fn parse_input( + fn parse_system_input( &mut self, input: &bitcoin::TxIn, tx: &Transaction, block_height: u32, address: Address, - ) -> Option { + ) -> Option { let witness = &input.witness; if witness.len() < MIN_WITNESS_LENGTH { return None; @@ -111,14 +153,14 @@ impl MessageParser { encoded_public_key: PushBytesBuf::from(public_key.serialize()), block_height, tx_id: tx.compute_ntxid().into(), - p2wpkh_address: address, + p2wpkh_address: Some(address), }; - self.parse_message(tx, &instructions[via_index..], &common_fields) + self.parse_system_message(tx, &instructions[via_index..], &common_fields) } #[instrument(skip(self), target = "bitcoin_indexer::parser")] - fn parse_p2wpkh(&mut self, witness: &Witness) -> Option
{ + pub fn parse_p2wpkh(&self, witness: &Witness) -> Option
{ if witness.len() == 2 { let public_key = bitcoin::PublicKey::from_slice(&witness[1]).ok()?; let cm_pk = CompressedPublicKey::try_from(public_key).ok()?; @@ -133,12 +175,12 @@ impl MessageParser { skip(self, tx, instructions, common_fields), target = "bitcoin_indexer::parser" )] - fn parse_message( + fn parse_system_message( &mut self, tx: &Transaction, instructions: &[Instruction], common_fields: &CommonFields, - ) -> Option { + ) -> Option { let message_type = instructions.get(1)?; match message_type { @@ -177,7 +219,7 @@ impl MessageParser { self.parse_l1_to_l2_message(tx, instructions, common_fields) } Instruction::PushBytes(bytes) => { - warn!("Unknown message type"); + warn!("Unknown message type for system transaction parser"); warn!( "first instruction: {:?}", String::from_utf8(bytes.as_bytes().to_vec()) @@ -200,7 +242,7 @@ impl MessageParser { &mut self, instructions: &[Instruction], common_fields: &CommonFields, - ) -> Option { + ) -> Option { if instructions.len() < MIN_SYSTEM_BOOTSTRAPPING_INSTRUCTIONS { warn!("Insufficient instructions for system bootstrapping"); return None; @@ -275,16 +317,18 @@ impl MessageParser { debug!("Parsed abstract account hash"); - Some(Message::SystemBootstrapping(SystemBootstrapping { - common: common_fields.clone(), - input: SystemBootstrappingInput { - start_block_height, - bridge_p2wpkh_mpc_address: network_unchecked_bridge_address, - verifier_p2wpkh_addresses: network_unchecked_verifier_addresses, - bootloader_hash, - abstract_account_hash, + Some(FullInscriptionMessage::SystemBootstrapping( + SystemBootstrapping { + common: common_fields.clone(), + input: SystemBootstrappingInput { + start_block_height, + bridge_musig2_address: network_unchecked_bridge_address, + verifier_p2wpkh_addresses: network_unchecked_verifier_addresses, + bootloader_hash, + abstract_account_hash, + }, }, - })) + )) } #[instrument( @@ -295,7 +339,7 @@ impl MessageParser { &self, instructions: &[Instruction], common_fields: &CommonFields, - ) -> Option { + ) -> Option { if instructions.len() < MIN_PROPOSE_SEQUENCER_INSTRUCTIONS { warn!("Insufficient instructions for propose sequencer"); return None; @@ -316,7 +360,7 @@ impl MessageParser { debug!("Parsed sequencer address"); - Some(Message::ProposeSequencer(ProposeSequencer { + Some(FullInscriptionMessage::ProposeSequencer(ProposeSequencer { common: common_fields.clone(), input: ProposeSequencerInput { sequencer_new_p2wpkh_address: sequencer_address.as_unchecked().clone(), @@ -332,7 +376,7 @@ impl MessageParser { &self, instructions: &[Instruction], common_fields: &CommonFields, - ) -> Option { + ) -> Option { if instructions.len() < MIN_VALIDATOR_ATTESTATION_INSTRUCTIONS { warn!("Insufficient instructions for validator attestation"); return None; @@ -373,13 +417,15 @@ impl MessageParser { debug!("Parsed attestation: {:?}", attestation); - Some(Message::ValidatorAttestation(ValidatorAttestation { - common: common_fields.clone(), - input: ValidatorAttestationInput { - reference_txid, - attestation, + Some(FullInscriptionMessage::ValidatorAttestation( + ValidatorAttestation { + common: common_fields.clone(), + input: ValidatorAttestationInput { + reference_txid, + attestation, + }, }, - })) + )) } #[instrument( @@ -390,7 +436,7 @@ impl MessageParser { &self, instructions: &[Instruction], common_fields: &CommonFields, - ) -> Option { + ) -> Option { if instructions.len() < MIN_L1_BATCH_DA_REFERENCE_INSTRUCTIONS { warn!("Insufficient instructions for L1 batch DA reference"); return None; @@ -419,15 +465,17 @@ impl MessageParser { .to_string(); debug!("Parsed blob ID: {}", blob_id); - Some(Message::L1BatchDAReference(L1BatchDAReference { - common: common_fields.clone(), - input: L1BatchDAReferenceInput { - l1_batch_hash, - l1_batch_index, - da_identifier, - blob_id, + Some(FullInscriptionMessage::L1BatchDAReference( + L1BatchDAReference { + common: common_fields.clone(), + input: L1BatchDAReferenceInput { + l1_batch_hash, + l1_batch_index, + da_identifier, + blob_id, + }, }, - })) + )) } #[instrument( @@ -438,7 +486,7 @@ impl MessageParser { &self, instructions: &[Instruction], common_fields: &CommonFields, - ) -> Option { + ) -> Option { if instructions.len() < MIN_PROOF_DA_REFERENCE_INSTRUCTIONS { warn!("Insufficient instructions for proof DA reference"); return None; @@ -466,7 +514,7 @@ impl MessageParser { .to_string(); debug!("Parsed blob ID: {}", blob_id); - Some(Message::ProofDAReference(ProofDAReference { + Some(FullInscriptionMessage::ProofDAReference(ProofDAReference { common: common_fields.clone(), input: ProofDAReferenceInput { l1_batch_reveal_txid, @@ -485,7 +533,7 @@ impl MessageParser { tx: &Transaction, instructions: &[Instruction], common_fields: &CommonFields, - ) -> Option { + ) -> Option { if instructions.len() < MIN_L1_TO_L2_MESSAGE_INSTRUCTIONS { warn!("Insufficient instructions for L1 to L2 message"); return None; @@ -507,7 +555,7 @@ impl MessageParser { .iter() .find(|output| { if let Some(address) = self.bridge_address.as_ref() { - output.script_pubkey.is_p2wpkh() + output.script_pubkey.is_p2tr() && output.script_pubkey == address.script_pubkey() } else { tracing::error!("Bridge address not found"); @@ -518,7 +566,7 @@ impl MessageParser { .unwrap_or(Amount::ZERO); debug!("Parsed amount: {}", amount); - Some(Message::L1ToL2Message(L1ToL2Message { + Some(FullInscriptionMessage::L1ToL2Message(L1ToL2Message { common: common_fields.clone(), amount, input: L1ToL2MessageInput { @@ -529,6 +577,95 @@ impl MessageParser { tx_outputs: tx.output.clone(), })) } + + fn parse_inscription_deposit( + &self, + tx: &Transaction, + block_height: u32, + ) -> Option { + // Try to find any witness data that contains a valid inscription + for input in tx.input.iter() { + let witness = &input.witness; + if witness.len() < MIN_WITNESS_LENGTH { + continue; + } + + // Parse signature and control block + let signature = TaprootSignature::from_slice(&witness[0]).ok()?; + let script = ScriptBuf::from_bytes(witness[1].to_vec()); + let control_block = ControlBlock::decode(&witness[2]).ok()?; + + let instructions: Vec<_> = script.instructions().filter_map(Result::ok).collect(); + let via_index = find_via_inscription_protocol(&instructions)?; + + // Try to parse p2wpkh address if possible, but make it optional + let p2wpkh_address = self.parse_p2wpkh(witness); + + let common_fields = CommonFields { + schnorr_signature: signature, + encoded_public_key: PushBytesBuf::from(control_block.internal_key.serialize()), + block_height, + tx_id: tx.compute_ntxid().into(), + p2wpkh_address, + }; + + // Parse L1ToL2Message from instructions + return self.parse_l1_to_l2_message(tx, &instructions[via_index..], &common_fields); + } + + None + } + + fn parse_op_return_deposit( + &self, + tx: &Transaction, + block_height: u32, + bridge_output: &TxOut, + ) -> Option { + // Find OP_RETURN output + let op_return_output = tx + .output + .iter() + .find(|output| output.script_pubkey.is_op_return())?; + + // Parse OP_RETURN data + let op_return_data = op_return_output.script_pubkey.as_bytes(); + if op_return_data.len() < 2 { + return None; + } + + // Parse receiver address from OP_RETURN data + + let receiver_l2_address = EVMAddress::from_slice(&op_return_data[2..22]); + + let input = L1ToL2MessageInput { + receiver_l2_address, + l2_contract_address: EVMAddress::zero(), + call_data: vec![], + }; + + // Try to parse p2wpkh address from the first input if possible + let p2wpkh_address = tx + .input + .first() + .and_then(|input| self.parse_p2wpkh(&input.witness)); + + // Create common fields with empty signature for OP_RETURN + let common_fields = CommonFields { + schnorr_signature: TaprootSignature::from_slice(&[0; 64]).ok()?, + encoded_public_key: PushBytesBuf::new(), + block_height, + tx_id: tx.compute_ntxid().into(), + p2wpkh_address, + }; + + Some(FullInscriptionMessage::L1ToL2Message(L1ToL2Message { + common: common_fields, + amount: bridge_output.value, + input, + tx_outputs: tx.output.clone(), + })) + } } #[instrument(skip(instructions), target = "bitcoin_indexer::parser")] @@ -579,7 +716,7 @@ mod tests { let mut parser = MessageParser::new(network); let tx = setup_test_transaction(); - let messages = parser.parse_transaction(&tx, 0); + let messages = parser.parse_system_transaction(&tx, 0); assert_eq!(messages.len(), 1); } @@ -590,8 +727,8 @@ mod tests { let mut parser = MessageParser::new(network); let tx = setup_test_transaction(); - if let Some(Message::SystemBootstrapping(bootstrapping)) = - parser.parse_transaction(&tx, 0).pop() + if let Some(FullInscriptionMessage::SystemBootstrapping(bootstrapping)) = + parser.parse_system_transaction(&tx, 0).pop() { assert_eq!(bootstrapping.input.start_block_height, 10); assert_eq!(bootstrapping.input.verifier_p2wpkh_addresses.len(), 1); diff --git a/core/lib/via_btc_client/src/inscriber/internal_type.rs b/core/lib/via_btc_client/src/inscriber/internal_type.rs index 6ea40ad63..d79778435 100644 --- a/core/lib/via_btc_client/src/inscriber/internal_type.rs +++ b/core/lib/via_btc_client/src/inscriber/internal_type.rs @@ -13,7 +13,7 @@ pub struct CommitTxOutputRes { pub commit_tx_change_output: TxOut, pub commit_tx_tapscript_output: TxOut, pub commit_tx_fee_rate: u64, - pub _commit_tx_fee: Amount, + pub commit_tx_fee: Amount, } #[derive(Debug)] diff --git a/core/lib/via_btc_client/src/inscriber/mod.rs b/core/lib/via_btc_client/src/inscriber/mod.rs index 473cb8fab..7b0cb7baa 100644 --- a/core/lib/via_btc_client/src/inscriber/mod.rs +++ b/core/lib/via_btc_client/src/inscriber/mod.rs @@ -27,7 +27,7 @@ use crate::{ }, signer::KeyManager, traits::{BitcoinOps, BitcoinSigner}, - types::{BitcoinNetwork, InscriberContext, InscriptionConfig, InscriptionMessage, Recipient}, + types::{BitcoinNetwork, InscriberContext, InscriptionMessage, Recipient}, }; mod fee; @@ -45,6 +45,10 @@ const REVEAL_TX_FEE_INPUT_INDEX: u32 = 0; const REVEAL_TX_TAPSCRIPT_REVEAL_INDEX: u32 = 1; const FEE_RATE_INCREASE_PER_PENDING_TX: u64 = 5; // percentage +/// The fee percentage amount reduced from the commit transaction and added to the reveal transaction. +const FEE_RATE_DECREASE_COMMIT_TX: u64 = 20; +/// The additional fee percentage added to the base reveal transaction fee serves as an incentive for the minter to include commit and reveal transactions. +const FEE_RATE_INCENTIVE: u64 = 5; const COMMIT_TX_P2TR_OUTPUT_COUNT: u32 = 1; const COMMIT_TX_P2WPKH_OUTPUT_COUNT: u32 = 1; @@ -97,7 +101,6 @@ impl Inscriber { pub async fn prepare_inscribe( &mut self, input: &InscriptionMessage, - config: InscriptionConfig, recipient: Option, ) -> Result { self.sync_context_with_blockchain().await?; @@ -114,7 +117,6 @@ impl Inscriber { .prepare_commit_tx_output( &commit_tx_input_info, inscription_data.script_pubkey.clone(), - config, ) .await?; @@ -127,7 +129,12 @@ impl Inscriber { )?; let reveal_tx_output_info = self - .prepare_reveal_tx_output(&reveal_tx_input_info, &inscription_data, recipient) + .prepare_reveal_tx_output( + &reveal_tx_input_info, + &inscription_data, + recipient, + commit_tx_output_info.commit_tx_fee, + ) .await?; let final_reveal_tx = self.sign_reveal_tx( @@ -149,13 +156,12 @@ impl Inscriber { pub async fn inscribe_with_recipient( &mut self, input: InscriptionMessage, - config: InscriptionConfig, recipient: Option, ) -> Result { info!("Starting inscription process"); let inscriber_info = self - .prepare_inscribe(&input, config, recipient) + .prepare_inscribe(&input, recipient) .await .context("Error prepare inscriber infos")?; @@ -172,12 +178,8 @@ impl Inscriber { } #[instrument(skip(self, input), target = "bitcoin_inscriber")] - pub async fn inscribe( - &mut self, - input: InscriptionMessage, - config: InscriptionConfig, - ) -> Result { - self.inscribe_with_recipient(input, config, None).await + pub async fn inscribe(&mut self, input: InscriptionMessage) -> Result { + self.inscribe_with_recipient(input, None).await } #[instrument(skip(self), target = "bitcoin_inscriber")] @@ -336,7 +338,6 @@ impl Inscriber { &self, tx_input_data: &CommitTxInputRes, inscription_pubkey: ScriptBuf, - config: InscriptionConfig, ) -> Result { debug!("Preparing commit transaction output"); let inscription_commitment_output = TxOut { @@ -344,16 +345,9 @@ impl Inscriber { script_pubkey: inscription_pubkey, }; - let mut fee_rate = self.get_fee_rate().await?; - let pending_tx_in_context = self.context.fifo_queue.len(); - - // increase fee rate based on pending transactions in context - - let increase_factor = (FEE_RATE_INCREASE_PER_PENDING_TX * config.fee_multiplier) - * pending_tx_in_context as u64; - fee_rate += fee_rate * increase_factor / 100; + let fee_rate = self.get_fee_rate().await?; - let fee_amount = InscriberFeeCalculator::estimate_fee( + let mut fee_amount = InscriberFeeCalculator::estimate_fee( tx_input_data.inputs_count, COMMIT_TX_P2TR_INPUT_COUNT, COMMIT_TX_P2WPKH_OUTPUT_COUNT, @@ -361,6 +355,8 @@ impl Inscriber { vec![], fee_rate, )?; + let fee_amount_before_decrease = fee_amount; + fee_amount -= (fee_amount * FEE_RATE_DECREASE_COMMIT_TX) / 100; let commit_tx_change_output_value = tx_input_data .unlocked_value @@ -384,7 +380,7 @@ impl Inscriber { commit_tx_change_output, commit_tx_tapscript_output: inscription_commitment_output, commit_tx_fee_rate: fee_rate, - _commit_tx_fee: fee_amount, + commit_tx_fee: fee_amount_before_decrease, }; Ok(res) @@ -395,7 +391,7 @@ impl Inscriber { debug!("Getting fee rate"); let res = self.client.get_fee_rate(FEE_RATE_CONF_TARGET).await?; debug!("Fee rate obtained: {}", res); - Ok(res) + Ok(std::cmp::max(res, 1)) } #[instrument(skip(self, input, output), target = "bitcoin_inscriber")] @@ -558,19 +554,40 @@ impl Inscriber { tx_input_data: &RevealTxInputRes, inscription_data: &InscriptionData, recipient: Option, + commit_tx_fee: Amount, ) -> Result { debug!("Preparing reveal transaction output"); let fee_rate = self.get_fee_rate().await?; + let pending_tx_in_context = self.context.fifo_queue.len(); + + let mut reveal_tx_p2wpkh_output_count = REVEAL_TX_P2WPKH_OUTPUT_COUNT; + let mut reveal_tx_p2tr_output_count = REVEAL_TX_P2TR_OUTPUT_COUNT; + + if let Some(r) = &recipient { + if r.address.script_pubkey().is_p2tr() { + reveal_tx_p2tr_output_count += 1; + } else { + reveal_tx_p2wpkh_output_count += 1; + }; + } - let fee_amount = InscriberFeeCalculator::estimate_fee( + let mut fee_amount = InscriberFeeCalculator::estimate_fee( REVEAL_TX_P2WPKH_INPUT_COUNT, REVEAL_TX_P2TR_INPUT_COUNT, - REVEAL_TX_P2WPKH_OUTPUT_COUNT + recipient.as_ref().map_or(0, |_| 1), - REVEAL_TX_P2TR_OUTPUT_COUNT, + reveal_tx_p2wpkh_output_count, + reveal_tx_p2tr_output_count, vec![inscription_data.script_size], fee_rate, )?; + let txs_stuck_factor = FEE_RATE_INCREASE_PER_PENDING_TX * pending_tx_in_context as u64; + + let increase_factor = txs_stuck_factor + FEE_RATE_INCENTIVE; + + fee_amount += (fee_amount * increase_factor) / 100; + // Add the fee amount removed from the commit tx to reveal + fee_amount += (commit_tx_fee * FEE_RATE_DECREASE_COMMIT_TX) / 100; + let recipient_amount = recipient.as_ref().map_or(Amount::ZERO, |r| r.amount); let reveal_change_amount = tx_input_data @@ -808,6 +825,7 @@ mod tests { Block, BlockHash, CompressedPublicKey, OutPoint, PrivateKey, ScriptBuf, Transaction, TxOut, Txid, }; + use bitcoincore_rpc::json::GetBlockStatsResult; use mockall::{mock, predicate::*}; use super::*; @@ -829,6 +847,12 @@ mod tests { async fn fetch_block_height(&self) -> BitcoinClientResult; async fn get_fee_rate(&self, conf_target: u16) -> BitcoinClientResult; fn get_network(&self) -> BitcoinNetwork; + async fn get_block_stats(&self, height: u64) -> BitcoinClientResult; + async fn get_fee_history( + &self, + from_block_height: usize, + to_block_height: usize, + ) -> BitcoinClientResult>; } } @@ -940,10 +964,7 @@ mod tests { let inscribe_message = InscriptionMessage::L1BatchDAReference(l1_da_batch_ref); - let res = inscriber - .inscribe(inscribe_message, InscriptionConfig::default()) - .await - .unwrap(); + let res = inscriber.inscribe(inscribe_message).await.unwrap(); assert_ne!(res.final_commit_tx.txid, Txid::all_zeros()); assert_ne!(res.final_reveal_tx.txid, Txid::all_zeros()); diff --git a/core/lib/via_btc_client/src/inscriber/script_builder.rs b/core/lib/via_btc_client/src/inscriber/script_builder.rs index b00ab0b6e..7418df5ea 100644 --- a/core/lib/via_btc_client/src/inscriber/script_builder.rs +++ b/core/lib/via_btc_client/src/inscriber/script_builder.rs @@ -232,7 +232,7 @@ impl InscriptionData { } let bridge_address = input - .bridge_p2wpkh_mpc_address + .bridge_musig2_address .clone() .require_network(network)?; let bridge_address_encoded = Self::encode_push_bytes(bridge_address.to_string().as_bytes()); diff --git a/core/lib/via_btc_client/src/inscriber/test_utils.rs b/core/lib/via_btc_client/src/inscriber/test_utils.rs index fa5509b1c..abc03f8d3 100644 --- a/core/lib/via_btc_client/src/inscriber/test_utils.rs +++ b/core/lib/via_btc_client/src/inscriber/test_utils.rs @@ -8,9 +8,10 @@ use bitcoin::{ ecdsa::Signature as ECDSASignature, schnorr::Signature as SchnorrSignature, All, Keypair, Message, PublicKey, Secp256k1, }, - Address, Block, BlockHash, CompressedPublicKey, Network, OutPoint, PrivateKey, ScriptBuf, - Transaction, TxOut, Txid, + Address, Amount, Block, BlockHash, CompressedPublicKey, Network, OutPoint, PrivateKey, + ScriptBuf, Transaction, TxOut, Txid, }; +use bitcoincore_rpc::json::{FeeRatePercentiles, GetBlockStatsResult}; use super::Inscriber; use crate::{ @@ -27,6 +28,7 @@ pub struct MockBitcoinOpsConfig { pub tx_confirmation: bool, pub transaction: Option, pub block: Option, + pub fee_history: Vec, } impl MockBitcoinOpsConfig { @@ -37,9 +39,13 @@ impl MockBitcoinOpsConfig { pub fn set_tx_confirmation(&mut self, tx_confirmation: bool) { self.tx_confirmation = tx_confirmation; } + + pub fn set_fee_history(&mut self, fees: Vec) { + self.fee_history = fees; + } } -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct MockBitcoinOps { pub balance: u128, pub utxos: Vec<(OutPoint, TxOut)>, @@ -48,6 +54,7 @@ pub struct MockBitcoinOps { pub tx_confirmation: bool, pub transaction: Option, pub block: Option, + pub fee_history: Vec, } impl MockBitcoinOps { @@ -60,6 +67,7 @@ impl MockBitcoinOps { tx_confirmation: config.tx_confirmation, transaction: config.transaction, block: config.block, + fee_history: config.fee_history, } } } @@ -78,7 +86,16 @@ impl BitcoinOps for MockBitcoinOps { } async fn fetch_utxos(&self, _address: &Address) -> BitcoinClientResult> { - BitcoinClientResult::Ok(self.utxos.clone()) + BitcoinClientResult::Ok(vec![( + OutPoint { + txid: Txid::all_zeros(), + vout: 0, + }, + TxOut { + value: Amount::from_btc(1.0).unwrap(), + script_pubkey: _address.script_pubkey(), + }, + )]) } async fn check_tx_confirmation( @@ -112,6 +129,50 @@ impl BitcoinOps for MockBitcoinOps { async fn fetch_block_by_hash(&self, _block_hash: &BlockHash) -> BitcoinClientResult { BitcoinClientResult::Ok(self.block.clone().expect("No block found")) } + + async fn get_fee_history(&self, _: usize, _: usize) -> BitcoinClientResult> { + BitcoinClientResult::Ok(self.fee_history.clone()) + } + + async fn get_block_stats(&self, height: u64) -> BitcoinClientResult { + BitcoinClientResult::Ok(GetBlockStatsResult { + avg_fee: Amount::ZERO, + avg_fee_rate: Amount::ZERO, + avg_tx_size: 0, + block_hash: BlockHash::all_zeros(), + fee_rate_percentiles: FeeRatePercentiles { + fr_10th: Amount::ZERO, + fr_25th: Amount::ZERO, + fr_50th: Amount::ZERO, + fr_75th: Amount::ZERO, + fr_90th: Amount::ZERO, + }, + height, + ins: 0, + max_fee: Amount::ZERO, + max_fee_rate: Amount::ZERO, + max_tx_size: 0, + median_fee: Amount::ZERO, + median_time: 0, + median_tx_size: 0, + min_fee: Amount::ZERO, + min_fee_rate: Amount::ZERO, + min_tx_size: 0, + outs: 0, + subsidy: Amount::ZERO, + sw_total_size: 0, + sw_total_weight: 0, + sw_txs: 0, + time: 0, + total_out: Amount::ZERO, + total_size: 0, + total_weight: 0, + total_fee: Amount::ZERO, + txs: 0, + utxo_increase: 0, + utxo_size_inc: 0, + }) + } } #[derive(Debug, Clone)] diff --git a/core/lib/via_btc_client/src/lib.rs b/core/lib/via_btc_client/src/lib.rs index 44969c42f..263cab453 100644 --- a/core/lib/via_btc_client/src/lib.rs +++ b/core/lib/via_btc_client/src/lib.rs @@ -1,10 +1,11 @@ pub mod traits; pub mod types; -pub(crate) mod client; +pub mod client; pub mod indexer; pub mod inscriber; #[cfg(feature = "regtest")] pub mod regtest; pub(crate) mod signer; -pub(crate) mod utils; +pub mod utils; +pub mod withdrawal_builder; diff --git a/core/lib/via_btc_client/src/traits.rs b/core/lib/via_btc_client/src/traits.rs index e2b38f2f8..35ee9542c 100644 --- a/core/lib/via_btc_client/src/traits.rs +++ b/core/lib/via_btc_client/src/traits.rs @@ -6,7 +6,7 @@ use bitcoin::{ secp256k1::{All, Secp256k1}, Address, Block, BlockHash, Network, OutPoint, ScriptBuf, Transaction, TxOut, Txid, }; -use bitcoincore_rpc::bitcoincore_rpc_json::GetBlockchainInfoResult; +use bitcoincore_rpc::{bitcoincore_rpc_json::GetBlockchainInfoResult, json::GetBlockStatsResult}; use secp256k1::{ ecdsa::Signature as ECDSASignature, schnorr::Signature as SchnorrSignature, Message, PublicKey, }; @@ -37,6 +37,12 @@ pub trait BitcoinOps: Send + Sync { async fn get_transaction(&self, txid: &Txid) -> BitcoinClientResult; async fn fetch_block_by_hash(&self, block_hash: &BlockHash) -> BitcoinClientResult; + async fn get_block_stats(&self, height: u64) -> BitcoinClientResult; + async fn get_fee_history( + &self, + from_block_height: usize, + to_block_height: usize, + ) -> BitcoinClientResult>; } impl std::fmt::Debug for dyn BitcoinOps + 'static { @@ -61,6 +67,7 @@ pub trait BitcoinRpc: Send + Sync { async fn get_block_by_hash(&self, block_hash: &BlockHash) -> BitcoinRpcResult; async fn get_best_block_hash(&self) -> BitcoinRpcResult; + async fn get_block_stats(&self, height: u64) -> BitcoinRpcResult; async fn get_raw_transaction_info( &self, txid: &Txid, diff --git a/core/lib/via_btc_client/src/types.rs b/core/lib/via_btc_client/src/types.rs index ff92e5544..f60ef0a6b 100644 --- a/core/lib/via_btc_client/src/types.rs +++ b/core/lib/via_btc_client/src/types.rs @@ -2,8 +2,8 @@ use std::collections::VecDeque; use bincode::{deserialize, serialize}; use bitcoin::{ - address::NetworkUnchecked, script::PushBytesBuf, taproot::Signature as TaprootSignature, - Amount, TxIn, TxOut, Txid, + address::NetworkUnchecked, hashes::FromSliceError, script::PushBytesBuf, + taproot::Signature as TaprootSignature, Amount, TxIn, TxOut, Txid, }; pub use bitcoin::{Address as BitcoinAddress, Network as BitcoinNetwork, Txid as BitcoinTxid}; pub use bitcoincore_rpc::Auth as NodeAuth; @@ -67,14 +67,14 @@ pub struct CommonFields { pub encoded_public_key: PushBytesBuf, pub block_height: u32, pub tx_id: Txid, - pub p2wpkh_address: BitcoinAddress, + pub p2wpkh_address: Option, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SystemBootstrappingInput { pub start_block_height: u32, pub verifier_p2wpkh_addresses: Vec>, - pub bridge_p2wpkh_mpc_address: BitcoinAddress, + pub bridge_musig2_address: BitcoinAddress, pub bootloader_hash: H256, pub abstract_account_hash: H256, } @@ -134,17 +134,6 @@ impl Serializable for InscriptionMessage { } } -#[derive(Debug)] -pub struct InscriptionConfig { - pub fee_multiplier: u64, -} - -impl Default for InscriptionConfig { - fn default() -> Self { - InscriptionConfig { fee_multiplier: 1 } - } -} - #[derive(Debug)] pub struct Recipient { pub address: BitcoinAddress, @@ -246,7 +235,7 @@ pub struct InscriberOutput { pub is_broadcasted: bool, } -#[derive(Debug, Error)] +#[derive(Debug, Error, Clone)] pub enum BitcoinError { #[error("RPC error: {0}")] Rpc(String), @@ -323,6 +312,8 @@ pub enum IndexerError { InvalidBlockHeight(u32), #[error("Bitcoin client error: {0}")] BitcoinClientError(#[from] BitcoinError), + #[error("Tx_id parsing error: {0}")] + TxIdParsingError(#[from] FromSliceError), } pub type BitcoinIndexerResult = std::result::Result; diff --git a/core/lib/via_btc_client/src/utils.rs b/core/lib/via_btc_client/src/utils.rs index 719f7bf98..31f76fe6c 100644 --- a/core/lib/via_btc_client/src/utils.rs +++ b/core/lib/via_btc_client/src/utils.rs @@ -1,5 +1,8 @@ +use bitcoin::{hashes::Hash, Txid}; use tokio::time::Duration; +use crate::types; + pub(crate) async fn with_retry( f: F, max_retries: u8, @@ -28,3 +31,8 @@ where } } } + +pub fn bytes_to_txid(bytes: &[u8]) -> Result { + let txid = Txid::from_slice(bytes).map_err(types::IndexerError::TxIdParsingError)?; + Ok(txid) +} diff --git a/core/lib/via_btc_client/src/withdrawal_builder/mod.rs b/core/lib/via_btc_client/src/withdrawal_builder/mod.rs new file mode 100644 index 000000000..ef61d68fd --- /dev/null +++ b/core/lib/via_btc_client/src/withdrawal_builder/mod.rs @@ -0,0 +1,432 @@ +// Withdrawal Builder Service +// This service has main method, that receives a list of bitcoin address and amount to withdraw and also L1batch proofDaReference reveal transaction id +// and then it will use client to get available utxo, and then perform utxo selection based on the total amount of the withdrawal +// and now we know the number of input and output we can estimate the fee and perform final utxo selection +// create a unsigned transaction and return it to the caller +use std::{collections::HashMap, sync::Arc}; + +use anyhow::Result; +use bincode::{deserialize, serialize}; +use bitcoin::{ + absolute, hashes::Hash, script::PushBytesBuf, transaction, Address, Amount, OutPoint, + ScriptBuf, Sequence, Transaction, TxIn, TxOut, Txid, Witness, +}; +use serde::{Deserialize, Serialize}; +use tracing::{debug, info, instrument}; + +use crate::{ + client::BitcoinClient, + traits::{BitcoinOps, Serializable}, + types::BitcoinNetwork, +}; + +#[derive(Debug)] +pub struct WithdrawalBuilder { + client: Arc, + bridge_address: Address, +} + +#[derive(Debug)] +pub struct WithdrawalRequest { + pub address: Address, + pub amount: Amount, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UnsignedWithdrawalTx { + pub tx: Transaction, + pub txid: Txid, + pub utxos: Vec<(OutPoint, TxOut)>, + pub change_amount: Amount, +} + +impl Serializable for UnsignedWithdrawalTx { + fn to_bytes(&self) -> Vec { + serialize(self).expect("error serialize the UnsignedWithdrawalTx") + } + + fn from_bytes(bytes: &[u8]) -> Self + where + Self: Sized, + { + deserialize(bytes).expect("error deserialize the UnsignedWithdrawalTx") + } +} + +const OP_RETURN_PREFIX: &[u8] = b"VIA_PROTOCOL:WITHDRAWAL:"; + +impl WithdrawalBuilder { + #[instrument(skip(rpc_url, auth), target = "bitcoin_withdrawal")] + pub async fn new( + rpc_url: &str, + network: BitcoinNetwork, + auth: bitcoincore_rpc::Auth, + bridge_address: Address, + ) -> Result { + info!("Creating new WithdrawalBuilder"); + let client = Arc::new(BitcoinClient::new(rpc_url, network, auth)?); + + Ok(Self { + client, + bridge_address, + }) + } + + #[instrument(skip(self, withdrawals, proof_txid), target = "bitcoin_withdrawal")] + pub async fn create_unsigned_withdrawal_tx( + &self, + withdrawals: Vec, + proof_txid: Txid, + ) -> Result { + debug!("Creating unsigned withdrawal transaction"); + + // Group withdrawals by address and sum amounts + let mut grouped_withdrawals: HashMap = HashMap::new(); + for w in withdrawals { + *grouped_withdrawals.entry(w.address).or_insert(Amount::ZERO) = grouped_withdrawals + .get(&w.address) + .unwrap_or(&Amount::ZERO) + .checked_add(w.amount) + .ok_or_else(|| anyhow::anyhow!("Withdrawal amount overflow when grouping"))?; + } + + // Calculate total withdrawal amount from grouped withdrawals + let total_withdrawal_amount: Amount = grouped_withdrawals + .values() + .try_fold(Amount::ZERO, |acc, amount| acc.checked_add(*amount)) + .ok_or_else(|| anyhow::anyhow!("Withdrawal amount overflow"))?; + + // Get available UTXOs first to estimate number of inputs + let available_utxos = self.get_available_utxos().await?; + + // Get fee rate + let fee_rate = std::cmp::max(self.client.get_fee_rate(1).await?, 1); + + // Estimate initial fee with approximate input count + // We'll estimate high initially to avoid underestimating + let estimated_input_count = + self.estimate_input_count(&available_utxos, total_withdrawal_amount)?; + let initial_fee = self.estimate_fee( + estimated_input_count, + grouped_withdrawals.len() as u32 + 2, // +1 for OP_RETURN, +1 for potential change + fee_rate, + )?; + + // Calculate total amount needed including estimated fee + let total_needed = total_withdrawal_amount + .checked_add(initial_fee) + .ok_or_else(|| anyhow::anyhow!("Total amount overflow"))?; + + // Select UTXOs for the total amount including fee + let selected_utxos = self.select_utxos(&available_utxos, total_needed).await?; + + // Calculate total input amount + let total_input_amount: Amount = selected_utxos + .iter() + .try_fold(Amount::ZERO, |acc, (_, txout)| acc.checked_add(txout.value)) + .ok_or_else(|| anyhow::anyhow!("Input amount overflow"))?; + + // Create OP_RETURN output with proof txid + let op_return_data = WithdrawalBuilder::create_op_return_script(proof_txid)?; + let op_return_output = TxOut { + value: Amount::ZERO, + script_pubkey: op_return_data, + }; + + // Calculate actual fee with real input count + let actual_fee = self.estimate_fee( + selected_utxos.len() as u32, + grouped_withdrawals.len() as u32 + 1, // +1 for OP_RETURN output + fee_rate, + )?; + + // Verify we have enough funds with actual fee + let total_needed = total_withdrawal_amount + .checked_add(actual_fee) + .ok_or_else(|| anyhow::anyhow!("Total amount overflow"))?; + + if total_input_amount < total_needed { + return Err(anyhow::anyhow!( + "Insufficient funds: have {}, need {}", + total_input_amount, + total_needed + )); + } + + // Create inputs + let inputs: Vec = selected_utxos + .iter() + .map(|(outpoint, _)| TxIn { + previous_output: *outpoint, + script_sig: ScriptBuf::default(), + sequence: Sequence::ENABLE_RBF_NO_LOCKTIME, + witness: Witness::default(), + }) + .collect(); + + // Create outputs for grouped withdrawals + let mut outputs: Vec = grouped_withdrawals + .into_iter() + .map(|(address, amount)| TxOut { + value: amount, + script_pubkey: address.script_pubkey(), + }) + .collect(); + + // Add OP_RETURN output + outputs.push(op_return_output); + + // Add change output if needed + let change_amount = total_input_amount + .checked_sub(total_needed) + .ok_or_else(|| anyhow::anyhow!("Change amount calculation overflow"))?; + + if change_amount.to_sat() > 0 { + outputs.push(TxOut { + value: change_amount, + script_pubkey: self.bridge_address.script_pubkey(), + }); + } + + // Create unsigned transaction + let unsigned_tx = Transaction { + version: transaction::Version::TWO, + lock_time: absolute::LockTime::ZERO, + input: inputs, + output: outputs, + }; + + let txid = unsigned_tx.compute_txid(); + + debug!("Unsigned withdrawal transaction created successfully"); + + Ok(UnsignedWithdrawalTx { + tx: unsigned_tx, + txid, + utxos: selected_utxos, + change_amount, + }) + } + + #[instrument(skip(self), target = "bitcoin_withdrawal")] + async fn get_available_utxos(&self) -> Result> { + let utxos = self.client.fetch_utxos(&self.bridge_address).await?; + Ok(utxos) + } + + #[instrument(skip(self, utxos), target = "bitcoin_withdrawal")] + async fn select_utxos( + &self, + utxos: &[(OutPoint, TxOut)], + target_amount: Amount, + ) -> Result> { + // Simple implementation - could be improved with better UTXO selection algorithm + let mut selected = Vec::new(); + let mut total = Amount::ZERO; + + for utxo in utxos { + selected.push(utxo.clone()); + total = total + .checked_add(utxo.1.value) + .ok_or_else(|| anyhow::anyhow!("Amount overflow during UTXO selection"))?; + + if total >= target_amount { + break; + } + } + + if total < target_amount { + return Err(anyhow::anyhow!( + "Insufficient funds: have {}, need {}", + total, + target_amount + )); + } + + Ok(selected) + } + + #[instrument(skip(self), target = "bitcoin_withdrawal")] + fn estimate_fee(&self, input_count: u32, output_count: u32, fee_rate: u64) -> Result { + // Estimate transaction size + let base_size = 10_u64; // version + locktime + let input_size = 148_u64 * u64::from(input_count); // approximate size per input + let output_size = 34_u64 * u64::from(output_count); // approximate size per output + + let total_size = base_size + input_size + output_size; + let fee = fee_rate * total_size; + + Ok(Amount::from_sat(fee)) + } + + // Helper function to create OP_RETURN script + pub fn create_op_return_script(proof_txid: Txid) -> Result { + let mut data = Vec::with_capacity(OP_RETURN_PREFIX.len() + 32); + data.extend_from_slice(OP_RETURN_PREFIX); + data.extend_from_slice(&proof_txid.as_raw_hash().to_byte_array()); + + let mut encoded_data = PushBytesBuf::with_capacity(data.len()); + encoded_data.extend_from_slice(&data).ok(); + + Ok(ScriptBuf::new_op_return(encoded_data)) + } + + #[instrument(skip(self, utxos), target = "bitcoin_withdrawal")] + fn estimate_input_count( + &self, + utxos: &[(OutPoint, TxOut)], + target_amount: Amount, + ) -> Result { + let mut count: u32 = 0; + let mut total = Amount::ZERO; + + for utxo in utxos { + count += 1; + total = total + .checked_add(utxo.1.value) + .ok_or_else(|| anyhow::anyhow!("Amount overflow during input count estimation"))?; + + if total >= target_amount { + break; + } + } + // Add one more to our estimate to be safe + Ok(count.saturating_add(1)) + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use async_trait::async_trait; + use bitcoin::Network; + use mockall::{mock, predicate::*}; + + use super::*; + use crate::types::BitcoinError; + + mock! { + BitcoinOpsService {} + #[async_trait] + impl BitcoinOps for BitcoinOpsService { + async fn fetch_utxos(&self, _address: &Address) -> Result, BitcoinError> { + // Mock implementation + let txid = Txid::from_str( + "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b" + ).unwrap(); + let outpoint = OutPoint::new(txid, 0); + let txout = TxOut { + value: Amount::from_btc(1.0).unwrap(), + script_pubkey: ScriptBuf::new(), + }; + Ok(vec![(outpoint, txout)]) + } + + async fn get_fee_rate(&self, _target_blocks: u16) -> Result { + Ok(2) + } + + async fn broadcast_signed_transaction(&self, _tx_hex: &str) -> Result { + Ok(Txid::from_str("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b").unwrap()) + } + + async fn check_tx_confirmation(&self, _txid: &Txid, _min_confirmations: u32) -> Result { + Ok(true) + } + + async fn fetch_block_height(&self) -> Result { + Ok(100000) + } + + async fn get_balance(&self, _address: &Address) -> Result { + Ok(100000000) // 1 BTC in sats + } + fn get_network(&self) -> bitcoin::Network { + Network::Regtest + } + + async fn fetch_block(&self, _height: u128) -> Result { + Ok(bitcoin::Block::default()) + } + + async fn get_transaction(&self, _txid: &Txid) -> Result { + Ok(Transaction::default()) + } + + async fn fetch_block_by_hash(&self, _hash: &bitcoin::BlockHash) -> Result { + Ok(bitcoin::Block::default()) + } + + async fn get_block_stats(&self, _height: u64) -> Result { + todo!() + } + + async fn get_fee_history(&self, _start: usize, _end: usize) -> Result, BitcoinError> { + Ok(vec![1]) + } + } + } + + #[tokio::test] + async fn test_withdrawal_builder() -> Result<()> { + let network = Network::Regtest; + let bridge_address = + Address::from_str("bcrt1pxqkh0g270lucjafgngmwv7vtgc8mk9j5y4j8fnrxm77yunuh398qfv8tqp")? + .require_network(network)?; + + // Create mock and set expectations + let mut mock_ops = MockBitcoinOpsService::new(); + mock_ops.expect_fetch_utxos().returning(|_| { + let txid = + Txid::from_str("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b") + .unwrap(); + let outpoint = OutPoint::new(txid, 0); + let txout = TxOut { + value: Amount::from_btc(1.0).unwrap(), + script_pubkey: ScriptBuf::new(), + }; + Ok(vec![(outpoint, txout)]) + }); + + mock_ops.expect_get_fee_rate().returning(|_| Ok(2)); + + // Use mock client + let builder = WithdrawalBuilder { + client: Arc::new(mock_ops), + bridge_address, + }; + + let withdrawal_address = "bcrt1pv6dtdf0vrrj6ntas926v8vw9u0j3mga29vmfnxh39zfxya83p89qz9ze3l"; + let withdrawal_amount = Amount::from_btc(0.1)?; + + let withdrawals = vec![WithdrawalRequest { + address: Address::from_str(withdrawal_address)?.require_network(network)?, + amount: withdrawal_amount, + }]; + + let proof_txid = + Txid::from_str("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b")?; + + let withdrawal_tx = builder + .create_unsigned_withdrawal_tx(withdrawals, proof_txid) + .await?; + assert!(!withdrawal_tx.utxos.is_empty()); + + // Verify OP_RETURN output + let op_return_output = withdrawal_tx + .tx + .output + .iter() + .find(|output| output.script_pubkey.is_op_return()) + .expect("OP_RETURN output not found"); + + assert!(op_return_output + .script_pubkey + .as_bytes() + .windows(OP_RETURN_PREFIX.len()) + .any(|window| window == OP_RETURN_PREFIX)); + + Ok(()) + } +} diff --git a/core/node/api_server/src/tx_sender/mod.rs b/core/node/api_server/src/tx_sender/mod.rs index c6f652da0..88aa9ae0b 100644 --- a/core/node/api_server/src/tx_sender/mod.rs +++ b/core/node/api_server/src/tx_sender/mod.rs @@ -107,6 +107,8 @@ pub struct MultiVMBaseSystemContracts { pub(crate) vm_1_5_0_small_memory: BaseSystemContracts, /// Contracts to be used after the 1.5.0 upgrade pub(crate) vm_1_5_0_increased_memory: BaseSystemContracts, + /// Contracts to be used after the bitcoin upgrade + pub(crate) vm_bitcoin: BaseSystemContracts, } impl MultiVMBaseSystemContracts { @@ -138,6 +140,7 @@ impl MultiVMBaseSystemContracts { ProtocolVersionId::Version24 | ProtocolVersionId::Version25 => { self.vm_1_5_0_increased_memory } + ProtocolVersionId::Version26 => self.vm_bitcoin, } } } @@ -181,6 +184,7 @@ impl ApiContracts { vm_1_5_0_small_memory: BaseSystemContracts::estimate_gas_1_5_0_small_memory(), vm_1_5_0_increased_memory: BaseSystemContracts::estimate_gas_post_1_5_0_increased_memory(), + vm_bitcoin: BaseSystemContracts::estimate_gas_bitcoin_1_0_0(), }, eth_call: MultiVMBaseSystemContracts { pre_virtual_blocks: BaseSystemContracts::playground_pre_virtual_blocks(), @@ -194,6 +198,7 @@ impl ApiContracts { vm_1_5_0_small_memory: BaseSystemContracts::playground_1_5_0_small_memory(), vm_1_5_0_increased_memory: BaseSystemContracts::playground_post_1_5_0_increased_memory(), + vm_bitcoin: BaseSystemContracts::playground_bitcoin_1_0_0(), }, } } diff --git a/core/node/commitment_generator/src/utils.rs b/core/node/commitment_generator/src/utils.rs index 86643b6b5..90cd477ab 100644 --- a/core/node/commitment_generator/src/utils.rs +++ b/core/node/commitment_generator/src/utils.rs @@ -76,6 +76,14 @@ impl CommitmentComputer for RealCommitmentComputer { .collect(), ), )), + VmVersion::VmBitcoin1_0_0 => Ok(H256( + circuit_sequencer_api_1_5_0::commitments::events_queue_commitment_fixed( + &events_queue + .iter() + .map(|x| to_log_query_1_5_0(*x)) + .collect(), + ), + )), _ => anyhow::bail!("Unsupported protocol version: {protocol_version:?}"), } } @@ -111,6 +119,11 @@ impl CommitmentComputer for RealCommitmentComputer { &full_bootloader_memory, ), )), + VmVersion::VmBitcoin1_0_0 => Ok(H256( + circuit_sequencer_api_1_5_0::commitments::initial_heap_content_commitment_fixed( + &full_bootloader_memory, + ), + )), _ => unreachable!(), } } diff --git a/core/node/node_framework/Cargo.toml b/core/node/node_framework/Cargo.toml index be49135cf..655b61ce9 100644 --- a/core/node/node_framework/Cargo.toml +++ b/core/node/node_framework/Cargo.toml @@ -63,6 +63,12 @@ via_btc_sender.workspace = true via_fee_model.workspace = true via_da_dispatcher.workspace = true via_state_keeper.workspace = true +via_withdrawal_service.workspace = true +via_withdrawal_client.workspace = true +via_verifier_dal.workspace = true +via_zk_verifier.workspace = true +via_verifier_btc_watch.workspace = true +via_verifier_btc_sender.workspace = true pin-project-lite.workspace = true tracing.workspace = true thiserror.workspace = true diff --git a/core/node/node_framework/src/implementations/layers/mod.rs b/core/node/node_framework/src/implementations/layers/mod.rs index 980dff48c..9c2116ab9 100644 --- a/core/node/node_framework/src/implementations/layers/mod.rs +++ b/core/node/node_framework/src/implementations/layers/mod.rs @@ -38,7 +38,12 @@ pub mod validate_chain_ids; pub mod via_btc_sender; pub mod via_btc_watch; pub mod via_da_dispatcher; +pub mod via_gas_adjuster; pub mod via_l1_gas; pub mod via_state_keeper; +pub mod via_verifier_btc_watch; +// TODO: TMP in sequencer +pub mod via_verifier; +pub mod via_zk_verification; pub mod vm_runner; pub mod web3_api; diff --git a/core/node/node_framework/src/implementations/layers/pools_layer.rs b/core/node/node_framework/src/implementations/layers/pools_layer.rs index 734f6f0cc..7ed306965 100644 --- a/core/node/node_framework/src/implementations/layers/pools_layer.rs +++ b/core/node/node_framework/src/implementations/layers/pools_layer.rs @@ -2,7 +2,9 @@ use zksync_config::configs::{DatabaseSecrets, PostgresConfig}; use zksync_dal::{ConnectionPool, Core}; use crate::{ - implementations::resources::pools::{MasterPool, PoolResource, ProverPool, ReplicaPool}, + implementations::resources::pools::{ + MasterPool, PoolResource, ProverPool, ReplicaPool, VerifierPool, + }, wiring_layer::{WiringError, WiringLayer}, IntoContext, }; @@ -14,6 +16,7 @@ pub struct PoolsLayerBuilder { with_master: bool, with_replica: bool, with_prover: bool, + with_verifier: bool, secrets: DatabaseSecrets, } @@ -26,6 +29,7 @@ impl PoolsLayerBuilder { with_master: false, with_replica: false, with_prover: false, + with_verifier: false, secrets: database_secrets, } } @@ -48,6 +52,12 @@ impl PoolsLayerBuilder { self } + /// Allows to enable the verifier pool. + pub fn with_verifier(mut self, with_verifier: bool) -> Self { + self.with_verifier = with_verifier; + self + } + /// Builds the [`PoolsLayer`] with the provided configuration. pub fn build(self) -> PoolsLayer { PoolsLayer { @@ -56,6 +66,7 @@ impl PoolsLayerBuilder { with_master: self.with_master, with_replica: self.with_replica, with_prover: self.with_prover, + with_verifier: self.with_verifier, } } } @@ -75,6 +86,7 @@ pub struct PoolsLayer { with_master: bool, with_replica: bool, with_prover: bool, + with_verifier: bool, } #[derive(Debug, IntoContext)] @@ -83,6 +95,7 @@ pub struct Output { pub master_pool: Option>, pub replica_pool: Option>, pub prover_pool: Option>, + pub verifier_pool: Option>, } #[async_trait::async_trait] @@ -95,7 +108,7 @@ impl WiringLayer for PoolsLayer { } async fn wire(self, _input: Self::Input) -> Result { - if !self.with_master && !self.with_replica && !self.with_prover { + if !self.with_master && !self.with_replica && !self.with_prover && !self.with_verifier { return Err(WiringError::Configuration( "At least one pool should be enabled".to_string(), )); @@ -148,10 +161,22 @@ impl WiringLayer for PoolsLayer { None }; + let verifier_pool = if self.with_verifier { + Some(PoolResource::::new( + self.secrets.verifier_url()?, + self.config.max_connections()?, + None, + None, + )) + } else { + None + }; + Ok(Output { master_pool, replica_pool, prover_pool, + verifier_pool, }) } } diff --git a/core/node/node_framework/src/implementations/layers/via_btc_sender/mod.rs b/core/node/node_framework/src/implementations/layers/via_btc_sender/mod.rs index 6f0dc526e..3ca69bc78 100644 --- a/core/node/node_framework/src/implementations/layers/via_btc_sender/mod.rs +++ b/core/node/node_framework/src/implementations/layers/via_btc_sender/mod.rs @@ -1,2 +1,4 @@ pub mod aggregator; pub mod manager; +pub mod vote; +pub mod vote_manager; diff --git a/core/node/node_framework/src/implementations/layers/via_btc_sender/vote.rs b/core/node/node_framework/src/implementations/layers/via_btc_sender/vote.rs new file mode 100644 index 000000000..fe027b9cd --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/via_btc_sender/vote.rs @@ -0,0 +1,69 @@ +use via_verifier_btc_sender::btc_vote_inscription::ViaVoteInscription; +use zksync_config::ViaBtcSenderConfig; + +use crate::{ + implementations::resources::pools::{PoolResource, VerifierPool}, + service::StopReceiver, + task::{Task, TaskId}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +/// Wiring layer for btc_sender vote inscription +/// +/// Responsible for initialization and running of [`ViaVoteInscription`], that create `Vote` inscription requests +#[derive(Debug)] +pub struct ViaBtcVoteInscriptionLayer { + config: ViaBtcSenderConfig, +} + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input { + pub master_pool: PoolResource, +} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct Output { + #[context(task)] + pub via_vote_inscription: ViaVoteInscription, +} + +impl ViaBtcVoteInscriptionLayer { + pub fn new(config: ViaBtcSenderConfig) -> Self { + Self { config } + } +} + +#[async_trait::async_trait] +impl WiringLayer for ViaBtcVoteInscriptionLayer { + type Input = Input; + type Output = Output; + + fn layer_name(&self) -> &'static str { + "via_btc_verifier_vote_inscription_layer" + } + + async fn wire(self, input: Self::Input) -> Result { + // Get resources. + let master_pool = input.master_pool.get().await.unwrap(); + + let via_vote_inscription = ViaVoteInscription::new(master_pool, self.config).await?; + + Ok(Output { + via_vote_inscription, + }) + } +} + +#[async_trait::async_trait] +impl Task for ViaVoteInscription { + fn id(&self) -> TaskId { + "via_vote_inscription".into() + } + + async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { + (*self).run(stop_receiver.0).await + } +} diff --git a/core/node/node_framework/src/implementations/layers/via_btc_sender/vote_manager.rs b/core/node/node_framework/src/implementations/layers/via_btc_sender/vote_manager.rs new file mode 100644 index 000000000..0bd587046 --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/via_btc_sender/vote_manager.rs @@ -0,0 +1,100 @@ +use anyhow::Context; +use via_btc_client::{inscriber::Inscriber, types::NodeAuth}; +use via_btc_watch::BitcoinNetwork; +use via_verifier_btc_sender::btc_inscription_manager::ViaBtcInscriptionManager; +use zksync_config::ViaBtcSenderConfig; + +use crate::{ + implementations::resources::pools::{PoolResource, VerifierPool}, + service::StopReceiver, + task::{Task, TaskId}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +/// Wiring layer for btc_sender to manage `inscriptions_requests` +/// +/// Responsible for initialization and running [`ViaBtcInscriptionTxManager`] component. The layer is responsible +/// to process inscription requests and create btc transactions. +/// +/// ## Requests resources +/// +/// - `PoolResource` +/// +/// ## Adds tasks +/// +/// - `ViaBtcInscriptionManager` +#[derive(Debug)] +pub struct ViaInscriptionManagerLayer { + pub config: ViaBtcSenderConfig, +} + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input { + pub master_pool: PoolResource, +} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct Output { + #[context(task)] + pub via_btc_inscription_manager: ViaBtcInscriptionManager, +} + +impl ViaInscriptionManagerLayer { + pub fn new(config: ViaBtcSenderConfig) -> Self { + Self { config } + } +} + +#[async_trait::async_trait] +impl WiringLayer for ViaInscriptionManagerLayer { + type Input = Input; + type Output = Output; + + fn layer_name(&self) -> &'static str { + "via_btc_inscription_manager_layer" + } + + async fn wire(self, input: Self::Input) -> Result { + // Get resources. + let master_pool = input.master_pool.get().await.unwrap(); + + let network = BitcoinNetwork::from_core_arg(self.config.network()) + .map_err(|_| WiringError::Configuration("Wrong network in config".to_string()))?; + + let inscriber = Inscriber::new( + self.config.rpc_url(), + network, + NodeAuth::UserPass( + self.config.rpc_user().to_string(), + self.config.rpc_password().to_string(), + ), + self.config.private_key(), + None, + ) + .await + .context("Init inscriber")?; + + let via_btc_inscription_manager = + ViaBtcInscriptionManager::new(inscriber, master_pool, self.config) + .await + .unwrap(); + + Ok(Output { + via_btc_inscription_manager, + }) + } +} + +#[async_trait::async_trait] +impl Task for ViaBtcInscriptionManager { + fn id(&self) -> TaskId { + "via_btc_inscription_manager".into() + } + + async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { + (*self).run(stop_receiver.0).await + } +} diff --git a/core/node/node_framework/src/implementations/layers/via_btc_watch.rs b/core/node/node_framework/src/implementations/layers/via_btc_watch.rs index 532fa6573..da141de3d 100644 --- a/core/node/node_framework/src/implementations/layers/via_btc_watch.rs +++ b/core/node/node_framework/src/implementations/layers/via_btc_watch.rs @@ -89,6 +89,7 @@ impl WiringLayer for BtcWatchLayer { main_pool, self.btc_watch_config.poll_interval(), btc_blocks_lag, + self.btc_watch_config.actor_role(), ) .await?; diff --git a/core/node/node_framework/src/implementations/layers/via_gas_adjuster.rs b/core/node/node_framework/src/implementations/layers/via_gas_adjuster.rs new file mode 100644 index 000000000..a35cbb9eb --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/via_gas_adjuster.rs @@ -0,0 +1,109 @@ +use std::sync::Arc; + +use anyhow::Context; +use via_btc_client::{inscriber::Inscriber, types::NodeAuth}; +use via_btc_watch::BitcoinNetwork; +use via_fee_model::ViaGasAdjuster; +use zksync_config::{GasAdjusterConfig, ViaBtcSenderConfig}; + +use crate::{ + implementations::resources::via_gas_adjuster::ViaGasAdjusterResource, + service::StopReceiver, + task::{Task, TaskId}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +/// Wiring layer for sequencer L1 gas interfaces. +/// Adds several resources that depend on L1 gas price. +#[derive(Debug)] +pub struct ViaGasAdjusterLayer { + gas_adjuster_config: GasAdjusterConfig, + config: ViaBtcSenderConfig, +} + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input {} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct Output { + pub gas_adjuster: ViaGasAdjusterResource, + /// Only runs if someone uses the resources listed above. + #[context(task)] + pub gas_adjuster_task: ViaGasAdjusterTask, +} + +impl ViaGasAdjusterLayer { + pub fn new(gas_adjuster_config: GasAdjusterConfig, config: ViaBtcSenderConfig) -> Self { + Self { + gas_adjuster_config, + config, + } + } +} + +#[async_trait::async_trait] +impl WiringLayer for ViaGasAdjusterLayer { + type Input = Input; + type Output = Output; + + fn layer_name(&self) -> &'static str { + "via_gas_adjuster_layer" + } + + async fn wire(self, _: Self::Input) -> Result { + let network = BitcoinNetwork::from_core_arg(self.config.network()) + .map_err(|_| WiringError::Configuration("Wrong network in config".to_string()))?; + + let inscriber = Inscriber::new( + self.config.rpc_url(), + network, + NodeAuth::UserPass( + self.config.rpc_user().to_string(), + self.config.rpc_password().to_string(), + ), + self.config.private_key(), + None, + ) + .await + .context("Init inscriber")?; + + let adjuster = ViaGasAdjuster::new(self.gas_adjuster_config, inscriber) + .await + .context("GasAdjuster::new()")?; + let gas_adjuster = Arc::new(adjuster); + + Ok(Output { + gas_adjuster: gas_adjuster.clone().into(), + gas_adjuster_task: ViaGasAdjusterTask { gas_adjuster }, + }) + } +} + +#[derive(Debug)] +pub struct ViaGasAdjusterTask { + gas_adjuster: Arc, +} + +#[async_trait::async_trait] +impl Task for ViaGasAdjusterTask { + fn id(&self) -> TaskId { + "via_gas_adjuster".into() + } + + async fn run(self: Box, mut stop_receiver: StopReceiver) -> anyhow::Result<()> { + // Gas adjuster layer is added to provide a resource for anyone to use, but it comes with + // a support task. If nobody has used the resource, we don't need to run the support task. + if Arc::strong_count(&self.gas_adjuster) == 1 { + tracing::info!( + "Via gas adjuster is not used by any other task, not running the support task" + ); + stop_receiver.0.changed().await?; + return Ok(()); + } + + self.gas_adjuster.run(stop_receiver.0).await + } +} diff --git a/core/node/node_framework/src/implementations/layers/via_l1_gas.rs b/core/node/node_framework/src/implementations/layers/via_l1_gas.rs index 914c083bd..22f878989 100644 --- a/core/node/node_framework/src/implementations/layers/via_l1_gas.rs +++ b/core/node/node_framework/src/implementations/layers/via_l1_gas.rs @@ -2,10 +2,15 @@ use std::sync::Arc; use via_fee_model::{ViaApiFeeInputProvider, ViaMainNodeFeeInputProvider}; use zksync_config::configs::chain::StateKeeperConfig; +use zksync_node_framework_derive::FromContext; use zksync_types::fee_model::FeeModelConfig; use crate::{ - implementations::resources::fee_input::{ApiFeeInputResource, SequencerFeeInputResource}, + implementations::resources::{ + fee_input::{ApiFeeInputResource, SequencerFeeInputResource}, + pools::{MasterPool, PoolResource}, + via_gas_adjuster::ViaGasAdjusterResource, + }, wiring_layer::{WiringError, WiringLayer}, IntoContext, }; @@ -17,6 +22,13 @@ pub struct ViaL1GasLayer { state_keeper_config: StateKeeperConfig, } +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input { + pub master_pool: PoolResource, + pub gas_adjuster: ViaGasAdjusterResource, +} + #[derive(Debug, IntoContext)] #[context(crate = crate)] pub struct Output { @@ -34,20 +46,25 @@ impl ViaL1GasLayer { #[async_trait::async_trait] impl WiringLayer for ViaL1GasLayer { - type Input = (); + type Input = Input; type Output = Output; fn layer_name(&self) -> &'static str { "via_l1_gas_layer" } - async fn wire(self, _input: Self::Input) -> Result { + async fn wire(self, input: Self::Input) -> Result { let main_fee_input_provider = Arc::new(ViaMainNodeFeeInputProvider::new( + input.gas_adjuster.0.clone(), FeeModelConfig::from_state_keeper_config(&self.state_keeper_config), )?); - let api_fee_input_provider = - Arc::new(ViaApiFeeInputProvider::new(main_fee_input_provider.clone())); + let main_pool = input.master_pool.get().await?; + + let api_fee_input_provider = Arc::new(ViaApiFeeInputProvider::new( + main_fee_input_provider.clone(), + main_pool, + )); Ok(Output { sequencer_fee_input: main_fee_input_provider.into(), diff --git a/core/node/node_framework/src/implementations/layers/via_verifier/coordinator_api.rs b/core/node/node_framework/src/implementations/layers/via_verifier/coordinator_api.rs new file mode 100644 index 000000000..0044b6700 --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/via_verifier/coordinator_api.rs @@ -0,0 +1,110 @@ +use std::str::FromStr; + +use anyhow::Context; +use via_btc_client::{ + types::{BitcoinAddress, NodeAuth}, + withdrawal_builder::WithdrawalBuilder, +}; +use via_btc_watch::BitcoinNetwork; +use via_verifier_dal::{ConnectionPool, Verifier}; +use via_withdrawal_client::client::WithdrawalClient; +use zksync_config::{ViaBtcSenderConfig, ViaVerifierConfig}; + +use crate::{ + implementations::resources::{ + da_client::DAClientResource, + pools::{PoolResource, VerifierPool}, + }, + service::StopReceiver, + task::{Task, TaskId}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +/// Wiring layer for coordinator api +#[derive(Debug)] +pub struct ViaCoordinatorApiLayer { + pub config: ViaVerifierConfig, + pub btc_sender_config: ViaBtcSenderConfig, +} + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input { + pub master_pool: PoolResource, + pub client: DAClientResource, +} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct Output { + #[context(task)] + pub via_coordinator_api_task: ViaCoordinatorApiTask, +} + +#[async_trait::async_trait] +impl WiringLayer for ViaCoordinatorApiLayer { + type Input = Input; + type Output = Output; + + fn layer_name(&self) -> &'static str { + "via_coordinator_api_layer" + } + + async fn wire(self, input: Self::Input) -> Result { + let master_pool = input.master_pool.get().await?; + let auth = NodeAuth::UserPass( + self.btc_sender_config.rpc_user().to_string(), + self.btc_sender_config.rpc_password().to_string(), + ); + let network = BitcoinNetwork::from_str(self.btc_sender_config.network()).unwrap(); + let bridge_address = BitcoinAddress::from_str(self.config.bridge_address_str.as_str()) + .context("Error parse bridge address")? + .assume_checked(); + + let withdrawal_builder = WithdrawalBuilder::new( + self.btc_sender_config.rpc_url(), + network, + auth, + bridge_address, + ) + .await?; + + let withdrawal_client = WithdrawalClient::new(input.client.0, network); + let via_coordinator_api_task = ViaCoordinatorApiTask { + master_pool, + config: self.config, + withdrawal_builder, + withdrawal_client, + }; + Ok(Output { + via_coordinator_api_task, + }) + } +} + +#[derive(Debug)] +pub struct ViaCoordinatorApiTask { + master_pool: ConnectionPool, + config: ViaVerifierConfig, + withdrawal_builder: WithdrawalBuilder, + withdrawal_client: WithdrawalClient, +} + +#[async_trait::async_trait] +impl Task for ViaCoordinatorApiTask { + fn id(&self) -> TaskId { + "via_coordinator_api".into() + } + + async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { + via_withdrawal_service::coordinator::api::start_coordinator_server( + self.config, + self.master_pool, + self.withdrawal_builder, + self.withdrawal_client, + stop_receiver.0, + ) + .await + } +} diff --git a/core/node/node_framework/src/implementations/layers/via_verifier/mod.rs b/core/node/node_framework/src/implementations/layers/via_verifier/mod.rs new file mode 100644 index 000000000..18a507a8f --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/via_verifier/mod.rs @@ -0,0 +1,2 @@ +pub mod coordinator_api; +pub mod verifier; diff --git a/core/node/node_framework/src/implementations/layers/via_verifier/verifier.rs b/core/node/node_framework/src/implementations/layers/via_verifier/verifier.rs new file mode 100644 index 000000000..f9dc3ef08 --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/via_verifier/verifier.rs @@ -0,0 +1,86 @@ +use std::{str::FromStr, sync::Arc}; + +use anyhow::Context; +use via_btc_client::{client::BitcoinClient, types::NodeAuth}; +use via_btc_watch::BitcoinNetwork; +use via_withdrawal_client::client::WithdrawalClient; +use via_withdrawal_service::verifier::ViaWithdrawalVerifier; +use zksync_config::{ViaBtcSenderConfig, ViaVerifierConfig}; + +use crate::{ + implementations::resources::{ + da_client::DAClientResource, + pools::{PoolResource, VerifierPool}, + }, + service::StopReceiver, + task::{Task, TaskId}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +/// Wiring layer for verifier task +#[derive(Debug)] +pub struct ViaWithdrawalVerifierLayer { + pub config: ViaVerifierConfig, + pub btc_sender_config: ViaBtcSenderConfig, +} + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input { + pub master_pool: PoolResource, + pub client: DAClientResource, +} + +#[derive(IntoContext)] +#[context(crate = crate)] +pub struct Output { + #[context(task)] + pub via_withdrawal_verifier_task: ViaWithdrawalVerifier, +} + +#[async_trait::async_trait] +impl WiringLayer for ViaWithdrawalVerifierLayer { + type Input = Input; + type Output = Output; + + fn layer_name(&self) -> &'static str { + "via_withdrawal_verifier_layer" + } + + async fn wire(self, input: Self::Input) -> Result { + let master_pool = input.master_pool.get().await?; + let auth = NodeAuth::UserPass( + self.btc_sender_config.rpc_user().to_string(), + self.btc_sender_config.rpc_password().to_string(), + ); + let network = BitcoinNetwork::from_str(self.btc_sender_config.network()).unwrap(); + + let btc_client = Arc::new( + BitcoinClient::new(self.btc_sender_config.rpc_url(), network, auth) + .context("Error to init the btc client for verifier task")?, + ); + + let withdrawal_client = WithdrawalClient::new(input.client.0, network); + + let via_withdrawal_verifier_task = + ViaWithdrawalVerifier::new(master_pool, btc_client, withdrawal_client, self.config) + .await + .context("Error to init the via withdrawal verifier")?; + + Ok(Output { + via_withdrawal_verifier_task, + }) + } +} + +#[async_trait::async_trait] +impl Task for ViaWithdrawalVerifier { + fn id(&self) -> TaskId { + "via_withdrawal_verifier".into() + } + + async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { + (*self).run(stop_receiver.0).await + } +} diff --git a/core/node/node_framework/src/implementations/layers/via_verifier_btc_watch.rs b/core/node/node_framework/src/implementations/layers/via_verifier_btc_watch.rs new file mode 100644 index 000000000..6f4a50462 --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/via_verifier_btc_watch.rs @@ -0,0 +1,113 @@ +use via_btc_client::{indexer::BitcoinInscriptionIndexer, types::NodeAuth}; +use via_btc_watch::BitcoinNetwork; +use via_verifier_btc_watch::VerifierBtcWatch; +use zksync_config::ViaBtcWatchConfig; + +use crate::{ + implementations::resources::{ + pools::{PoolResource, VerifierPool}, + via_btc_indexer::BtcIndexerResource, + }, + service::StopReceiver, + task::{Task, TaskId}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +/// Wiring layer for bitcoin watcher +/// +/// Responsible for initializing and running of [`VerifierBtcWatch`] component, that polls the Bitcoin node for the relevant events. +#[derive(Debug)] +pub struct VerifierBtcWatchLayer { + // TODO: divide into multiple configs + btc_watch_config: ViaBtcWatchConfig, +} + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input { + pub master_pool: PoolResource, +} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct Output { + pub btc_indexer_resource: BtcIndexerResource, + #[context(task)] + pub btc_watch: VerifierBtcWatch, +} + +impl VerifierBtcWatchLayer { + pub fn new(btc_watch_config: ViaBtcWatchConfig) -> Self { + Self { btc_watch_config } + } +} + +#[async_trait::async_trait] +impl WiringLayer for VerifierBtcWatchLayer { + type Input = Input; + type Output = Output; + + fn layer_name(&self) -> &'static str { + "verifier_btc_watch_layer" + } + + async fn wire(self, input: Self::Input) -> Result { + let main_pool = input.master_pool.get().await?; + let network = BitcoinNetwork::from_core_arg(self.btc_watch_config.network()) + .map_err(|_| WiringError::Configuration("Wrong network in config".to_string()))?; + let node_auth = NodeAuth::UserPass( + self.btc_watch_config.rpc_user().to_string(), + self.btc_watch_config.rpc_password().to_string(), + ); + let bootstrap_txids = self + .btc_watch_config + .bootstrap_txids() + .iter() + .map(|txid| { + txid.parse() + .map_err(|_| WiringError::Configuration("Wrong txid in config".to_string())) + }) + .collect::, _>>()?; + let btc_blocks_lag = self.btc_watch_config.btc_blocks_lag(); + + let indexer = BtcIndexerResource::from( + BitcoinInscriptionIndexer::new( + self.btc_watch_config.rpc_url(), + network, + node_auth.clone(), + bootstrap_txids.clone(), + ) + .await + .map_err(|e| WiringError::Internal(e.into()))?, + ); + let btc_watch = VerifierBtcWatch::new( + self.btc_watch_config.rpc_url(), + network, + node_auth, + self.btc_watch_config.confirmations_for_btc_msg, + bootstrap_txids, + main_pool, + self.btc_watch_config.poll_interval(), + btc_blocks_lag, + self.btc_watch_config.actor_role(), + ) + .await?; + + Ok(Output { + btc_indexer_resource: indexer, + btc_watch, + }) + } +} + +#[async_trait::async_trait] +impl Task for VerifierBtcWatch { + fn id(&self) -> TaskId { + "verifier_btc_watch".into() + } + + async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { + (*self).run(stop_receiver.0).await + } +} diff --git a/core/node/node_framework/src/implementations/layers/via_zk_verification.rs b/core/node/node_framework/src/implementations/layers/via_zk_verification.rs new file mode 100644 index 000000000..ab3f0d2a1 --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/via_zk_verification.rs @@ -0,0 +1,100 @@ +use async_trait::async_trait; +use via_btc_client::types::{BitcoinNetwork, NodeAuth}; +use via_zk_verifier::ViaVerifier; +use zksync_config::{ViaBtcWatchConfig, ViaVerifierConfig}; + +use crate::{ + implementations::resources::{ + da_client::DAClientResource, + pools::{PoolResource, VerifierPool}, + }, + service::StopReceiver, + task::{Task, TaskId}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +#[derive(Debug)] +pub struct ViaBtcProofVerificationLayer { + pub config: ViaVerifierConfig, + pub btc_watcher_config: ViaBtcWatchConfig, +} + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct ProofVerificationInput { + pub master_pool: PoolResource, + pub da_client: DAClientResource, +} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct ProofVerificationOutput { + #[context(task)] + pub via_proof_verification: ViaVerifier, +} + +impl ViaBtcProofVerificationLayer { + pub fn new(config: ViaVerifierConfig, btc_watcher_config: ViaBtcWatchConfig) -> Self { + Self { + config, + btc_watcher_config, + } + } +} + +#[async_trait] +impl WiringLayer for ViaBtcProofVerificationLayer { + type Input = ProofVerificationInput; + type Output = ProofVerificationOutput; + + fn layer_name(&self) -> &'static str { + "via_btc_proof_verification_layer" + } + + async fn wire(self, input: Self::Input) -> Result { + let main_pool = input.master_pool.get().await?; + let network = BitcoinNetwork::from_core_arg(self.btc_watcher_config.network()) + .map_err(|_| WiringError::Configuration("Wrong network in config".to_string()))?; + let node_auth = NodeAuth::UserPass( + self.btc_watcher_config.rpc_user().to_string(), + self.btc_watcher_config.rpc_password().to_string(), + ); + let bootstrap_txids = self + .btc_watcher_config + .bootstrap_txids() + .iter() + .map(|txid| { + txid.parse() + .map_err(|_| WiringError::Configuration("Wrong txid in config".to_string())) + }) + .collect::, _>>()?; + + let via_proof_verification = ViaVerifier::new( + self.btc_watcher_config.rpc_url(), + network, + node_auth, + bootstrap_txids, + main_pool, + input.da_client.0, + self.config.clone(), + ) + .await + .map_err(WiringError::internal)?; + + Ok(ProofVerificationOutput { + via_proof_verification, + }) + } +} + +#[async_trait::async_trait] +impl Task for ViaVerifier { + fn id(&self) -> TaskId { + "via_proof_verification".into() + } + + async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { + (*self).run(stop_receiver.0).await + } +} diff --git a/core/node/node_framework/src/implementations/resources/mod.rs b/core/node/node_framework/src/implementations/resources/mod.rs index 374c470d2..e65731499 100644 --- a/core/node/node_framework/src/implementations/resources/mod.rs +++ b/core/node/node_framework/src/implementations/resources/mod.rs @@ -15,5 +15,6 @@ pub mod reverter; pub mod state_keeper; pub mod sync_state; pub mod via_btc_indexer; +pub mod via_gas_adjuster; pub mod via_state_keeper; pub mod web3_api; diff --git a/core/node/node_framework/src/implementations/resources/pools.rs b/core/node/node_framework/src/implementations/resources/pools.rs index 8355bb1bd..0e6176f68 100644 --- a/core/node/node_framework/src/implementations/resources/pools.rs +++ b/core/node/node_framework/src/implementations/resources/pools.rs @@ -7,6 +7,7 @@ use std::{ }; use tokio::sync::Mutex; +use via_verifier_dal::Verifier; use zksync_dal::{ConnectionPool, Core}; use zksync_db_connection::connection_pool::ConnectionPoolBuilder; use zksync_prover_dal::Prover; @@ -113,6 +114,10 @@ pub struct ReplicaPool {} #[non_exhaustive] pub struct ProverPool {} +#[derive(Debug, Clone)] +#[non_exhaustive] +pub struct VerifierPool {} + pub trait PoolKind: Clone + Sync + Send + 'static { type DbMarker: zksync_db_connection::connection::DbMarker; @@ -142,3 +147,11 @@ impl PoolKind for ProverPool { "prover" } } + +impl PoolKind for VerifierPool { + type DbMarker = Verifier; + + fn kind_str() -> &'static str { + "verifier" + } +} diff --git a/core/node/node_framework/src/implementations/resources/via_gas_adjuster.rs b/core/node/node_framework/src/implementations/resources/via_gas_adjuster.rs new file mode 100644 index 000000000..5d9ea43d7 --- /dev/null +++ b/core/node/node_framework/src/implementations/resources/via_gas_adjuster.rs @@ -0,0 +1,21 @@ +use std::sync::Arc; + +use via_fee_model::ViaGasAdjuster; + +use crate::resource::Resource; + +/// A resource that provides [`GasAdjuster`] to the service. +#[derive(Debug, Clone)] +pub struct ViaGasAdjusterResource(pub Arc); + +impl Resource for ViaGasAdjusterResource { + fn name() -> String { + "common/via_gas_adjuster".into() + } +} + +impl From> for ViaGasAdjusterResource { + fn from(gas_adjuster: Arc) -> Self { + Self(gas_adjuster) + } +} diff --git a/core/node/via_btc_sender/src/aggregator.rs b/core/node/via_btc_sender/src/aggregator.rs index aa2270f27..e992a890b 100644 --- a/core/node/via_btc_sender/src/aggregator.rs +++ b/core/node/via_btc_sender/src/aggregator.rs @@ -81,14 +81,15 @@ impl ViaAggregator { ) .await?; - tracing::debug!( - "Found {} l1 batches ready for commit", - ready_for_commit_l1_batches.len() - ); + if !ready_for_commit_l1_batches.is_empty() { + tracing::debug!( + "Found {} l1 batches ready for commit", + ready_for_commit_l1_batches.len() + ); + } validate_l1_batch_sequence(&ready_for_commit_l1_batches); - tracing::debug!("Extracting ready subrange"); if let Some(l1_batches) = extract_ready_subrange( &mut self.commit_l1_block_criteria, ready_for_commit_l1_batches, diff --git a/core/node/via_btc_sender/src/btc_inscription_aggregator.rs b/core/node/via_btc_sender/src/btc_inscription_aggregator.rs index 9c5ab99ec..af3199fa4 100644 --- a/core/node/via_btc_sender/src/btc_inscription_aggregator.rs +++ b/core/node/via_btc_sender/src/btc_inscription_aggregator.rs @@ -1,16 +1,10 @@ -use std::str::FromStr; - use anyhow::{Context, Result}; use tokio::sync::watch; -use via_btc_client::{ - inscriber::Inscriber, - traits::Serializable, - types::{InscriptionConfig, InscriptionMessage}, -}; +use via_btc_client::{inscriber::Inscriber, traits::Serializable, types::InscriptionMessage}; use zksync_config::ViaBtcSenderConfig; use zksync_contracts::BaseSystemContractsHashes; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; -use zksync_types::{ProtocolVersionId, H256}; +use zksync_types::ProtocolVersionId; use crate::aggregator::ViaAggregator; @@ -70,12 +64,12 @@ impl ViaBtcInscriptionAggregator { &mut self, storage: &mut Connection<'_, Core>, ) -> Result<(), anyhow::Error> { - let base_system_contracts_hashes = BaseSystemContractsHashes { - bootloader: self.get_bootloader_code_hash().await?, - default_aa: self.get_aa_code_hash().await?, - }; let protocol_version_id = self.get_protocol_version_id().await?; + let base_system_contracts_hashes = self + .load_base_system_contracts(storage, protocol_version_id) + .await?; + if let Some(operation) = self .aggregator .get_next_ready_operation(storage, base_system_contracts_hashes, protocol_version_id) @@ -94,17 +88,17 @@ impl ViaBtcInscriptionAggregator { // Estimate the tx fee to execute the inscription request. let inscribe_info = self .inscriber - .prepare_inscribe(&inscription_message, InscriptionConfig::default(), None) + .prepare_inscribe(&inscription_message, None) .await .context("Via get inscriber info")?; let prediction_fee = inscribe_info.reveal_tx_output_info._reveal_fee - + inscribe_info.commit_tx_output_info._commit_tx_fee; + + inscribe_info.commit_tx_output_info.commit_tx_fee; let inscription_request = transaction .btc_sender_dal() .via_save_btc_inscriptions_request( - operation.get_inscription_request_type(), + operation.get_inscription_request_type().to_string(), InscriptionMessage::to_bytes(&inscription_message), prediction_fee.to_sat(), ) @@ -126,19 +120,28 @@ impl ViaBtcInscriptionAggregator { Ok(()) } - // Todo: call indexer to fetch the data - async fn get_bootloader_code_hash(&self) -> anyhow::Result { - let hex_str = "010008e742608b21bf7eb23c1a9d0602047e3618b464c9b59c0fba3b3d7ab66e"; - Ok(H256::from_str(hex_str).unwrap()) - } - - // Todo: call indexer to fetch the data - async fn get_aa_code_hash(&self) -> anyhow::Result { - let hex_str = "01000563374c277a2c1e34659a2a1e87371bb6d852ce142022d497bfb50b9e32"; - Ok(H256::from_str(hex_str).unwrap()) + async fn load_base_system_contracts( + &self, + storage: &mut Connection<'_, Core>, + protocol_version: ProtocolVersionId, + ) -> anyhow::Result { + let base_system_contracts = storage + .protocol_versions_dal() + .load_base_system_contracts_by_version_id(protocol_version as u16) + .await + .context("failed loading base system contracts")?; + if let Some(contracts) = base_system_contracts { + return Ok(BaseSystemContractsHashes { + bootloader: contracts.bootloader.hash, + default_aa: contracts.default_aa.hash, + }); + } + anyhow::bail!( + "Failed to load the base system contracts for version {}", + protocol_version + ) } - // Todo: call indexer to fetch the data async fn get_protocol_version_id(&self) -> anyhow::Result { Ok(ProtocolVersionId::latest()) } diff --git a/core/node/via_btc_sender/src/btc_inscription_manager.rs b/core/node/via_btc_sender/src/btc_inscription_manager.rs index eb46dd7a1..5d1cac3ca 100644 --- a/core/node/via_btc_sender/src/btc_inscription_manager.rs +++ b/core/node/via_btc_sender/src/btc_inscription_manager.rs @@ -1,11 +1,7 @@ use anyhow::{Context, Result}; use bincode::serialize; use tokio::sync::watch; -use via_btc_client::{ - inscriber::Inscriber, - traits::Serializable, - types::{InscriptionConfig, InscriptionMessage}, -}; +use via_btc_client::{inscriber::Inscriber, traits::Serializable, types::InscriptionMessage}; use zksync_config::ViaBtcSenderConfig; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_types::btc_sender::ViaBtcInscriptionRequest; @@ -45,9 +41,7 @@ impl ViaBtcInscriptionManager { let mut storage = pool.connection_tagged("via_btc_sender").await?; match self.loop_iteration(&mut storage).await { - Ok(()) => { - tracing::info!("Inscription manager task finished"); - } + Ok(()) => {} Err(err) => { tracing::error!("Failed to process btc_sender_inscription_manager: {err}"); } @@ -112,23 +106,10 @@ impl ViaBtcInscriptionManager { { continue; } - - let number_inscription_request_history = storage - .btc_sender_dal() - .get_total_inscription_request_history(inscription.id) - .await?; - - let config = InscriptionConfig { - fee_multiplier: number_inscription_request_history as u64 + 1, - }; - - tracing::info!( - "Inscription {reveal_tx} stuck for more than {BLOCK_RESEND} block, retry sending the inscription.", + tracing::warn!( + "Inscription {reveal_tx} stuck for more than {BLOCK_RESEND} block.", reveal_tx = last_inscription_history.reveal_tx_id ); - - self.send_inscription_tx(storage, &inscription, config) - .await?; } } } @@ -168,8 +149,7 @@ impl ViaBtcInscriptionManager { .await?; for inscription in list_new_inscription_request { - self.send_inscription_tx(storage, &inscription, InscriptionConfig::default()) - .await?; + self.send_inscription_tx(storage, &inscription).await?; } } Ok(()) @@ -179,7 +159,6 @@ impl ViaBtcInscriptionManager { &mut self, storage: &mut Connection<'_, Core>, tx: &ViaBtcInscriptionRequest, - config: InscriptionConfig, ) -> anyhow::Result<()> { let sent_at_block = self .inscriber @@ -194,7 +173,7 @@ impl ViaBtcInscriptionManager { let inscribe_info = self .inscriber - .inscribe(input, config) + .inscribe(input) .await .context("Sent inscription tx")?; @@ -204,7 +183,7 @@ impl ViaBtcInscriptionManager { serialize(&inscribe_info.final_reveal_tx.tx).context("Serilize the reveal tx")?; let actual_fees = inscribe_info.reveal_tx_output_info._reveal_fee - + inscribe_info.commit_tx_output_info._commit_tx_fee; + + inscribe_info.commit_tx_output_info.commit_tx_fee; tracing::info!( "New inscription created {commit_tx} {reveal_tx}", diff --git a/core/node/via_btc_sender/src/tests/aggregator_test.rs b/core/node/via_btc_sender/src/tests/aggregator_test.rs index 96d53d98d..a0d713106 100644 --- a/core/node/via_btc_sender/src/tests/aggregator_test.rs +++ b/core/node/via_btc_sender/src/tests/aggregator_test.rs @@ -4,14 +4,15 @@ mod tests { use via_btc_client::{traits::Serializable, types::InscriptionMessage}; use zksync_contracts::BaseSystemContractsHashes; use zksync_dal::{ConnectionPool, Core, CoreDal}; - use zksync_node_test_utils::{create_l1_batch, l1_batch_metadata_to_commitment_artifacts}; + use zksync_node_test_utils::l1_batch_metadata_to_commitment_artifacts; use zksync_types::{ btc_block::ViaBtcL1BlockDetails, btc_inscription_operations::ViaBtcInscriptionRequestType, ProtocolVersionId, }; use crate::tests::utils::{ - default_l1_batch_metadata, generate_random_bytes, get_btc_sender_config, ViaAggregatorTest, + create_l1_batch, default_l1_batch_metadata, generate_random_bytes, get_btc_sender_config, + ViaAggregatorTest, }; // Get the current operation (commitBatch or commitProof) to execute when there is no batches. Should return 'None' diff --git a/core/node/via_btc_sender/src/tests/btc_inscription_aggregator_test.rs b/core/node/via_btc_sender/src/tests/btc_inscription_aggregator_test.rs index bc70ce1b8..ab59d2533 100644 --- a/core/node/via_btc_sender/src/tests/btc_inscription_aggregator_test.rs +++ b/core/node/via_btc_sender/src/tests/btc_inscription_aggregator_test.rs @@ -2,19 +2,21 @@ mod tests { use std::str::FromStr; + use chrono::Utc; use tokio::{sync::watch, time}; use zksync_config::ViaBtcSenderConfig; use zksync_contracts::BaseSystemContractsHashes; use zksync_dal::{ConnectionPool, Core, CoreDal}; - use zksync_node_test_utils::{create_l1_batch, l1_batch_metadata_to_commitment_artifacts}; + use zksync_node_test_utils::l1_batch_metadata_to_commitment_artifacts; use zksync_types::{ block::L1BatchHeader, btc_inscription_operations::ViaBtcInscriptionRequestType, btc_sender::ViaBtcInscriptionRequest, ProtocolVersionId, H256, }; use crate::tests::utils::{ - default_l1_batch_metadata, get_btc_sender_config, get_inscription_aggregator_mock, - ViaAggregatorTest, + create_l1_batch, default_l1_batch_metadata, get_btc_sender_config, + get_inscription_aggregator_mock, ViaAggregatorTest, BOOTLOADER_CODE_HASH_TEST, + DEFAULT_AA_CODE_HASH_TEST, }; #[tokio::test] @@ -55,6 +57,19 @@ mod tests { l1_batch_metadata_to_commitment_artifacts(&default_l1_batch_metadata()), ) .await; + let sent_at = Utc::now().naive_utc(); + + let _ = aggregator_test + .storage + .via_data_availability_dal() + .insert_l1_batch_da(header.number, "blob_id", sent_at) + .await; + + let _ = aggregator_test + .storage + .via_data_availability_dal() + .insert_proof_da(header.number, "blob_id", sent_at) + .await; } // ----------------------------------------- EXECTION 1 ----------------------------------------- @@ -96,7 +111,7 @@ mod tests { assert_eq!(inscription_request_list.len(), 1); assert_eq!( inscription_request_list[0].request_type, - ViaBtcInscriptionRequestType::CommitProofOnchain + ViaBtcInscriptionRequestType::CommitProofOnchain.to_string() ); // We confirm that the proof inscription was processed. @@ -223,11 +238,10 @@ mod tests { } pub fn via_create_l1_batch(number: u32) -> L1BatchHeader { - let hex_str = "0000000000000000000000000000000000000000000000000000000000000000"; let mut header = create_l1_batch(number); header.base_system_contracts_hashes = BaseSystemContractsHashes { - bootloader: H256::from_str(hex_str).unwrap(), - default_aa: H256::from_str(hex_str).unwrap(), + bootloader: H256::from_str(BOOTLOADER_CODE_HASH_TEST).unwrap(), + default_aa: H256::from_str(DEFAULT_AA_CODE_HASH_TEST).unwrap(), }; header.protocol_version = Some(ProtocolVersionId::latest()); diff --git a/core/node/via_btc_sender/src/tests/btc_inscription_manager_test.rs b/core/node/via_btc_sender/src/tests/btc_inscription_manager_test.rs index 95ef5880b..b77f8d8cd 100644 --- a/core/node/via_btc_sender/src/tests/btc_inscription_manager_test.rs +++ b/core/node/via_btc_sender/src/tests/btc_inscription_manager_test.rs @@ -2,20 +2,22 @@ mod tests { use std::str::FromStr; + use chrono::Utc; use tokio::{sync::watch, time}; use via_btc_client::inscriber::test_utils::MockBitcoinOpsConfig; use zksync_config::ViaBtcSenderConfig; use zksync_contracts::BaseSystemContractsHashes; use zksync_dal::{ConnectionPool, Core, CoreDal}; - use zksync_node_test_utils::{create_l1_batch, l1_batch_metadata_to_commitment_artifacts}; + use zksync_node_test_utils::l1_batch_metadata_to_commitment_artifacts; use zksync_types::{ block::L1BatchHeader, btc_inscription_operations::ViaBtcInscriptionRequestType, ProtocolVersionId, H256, }; use crate::tests::utils::{ - default_l1_batch_metadata, get_btc_sender_config, get_inscription_aggregator_mock, - get_inscription_manager_mock, ViaAggregatorTest, + create_l1_batch, default_l1_batch_metadata, get_btc_sender_config, + get_inscription_aggregator_mock, get_inscription_manager_mock, ViaAggregatorTest, + BOOTLOADER_CODE_HASH_TEST, DEFAULT_AA_CODE_HASH_TEST, }; #[tokio::test] @@ -26,13 +28,14 @@ mod tests { } #[tokio::test] - async fn test_btc_inscription_manager_run_one_inscription_request_with_retry() { + async fn test_btc_inscription_manager_run_one_inscription_request() { let pool = ConnectionPool::::test_pool().await; let config = get_btc_sender_config(1, 1); let mut mock_btc_ops_config = MockBitcoinOpsConfig::default(); mock_btc_ops_config.set_block_height(1); - let number_of_batches = 1; + let number_of_batches: u32 = 1; + let mut protocol_version: Option = None; let mut base_system_contracts_hashes: Option = None; let mut l1_headers = vec![]; @@ -64,116 +67,19 @@ mod tests { l1_batch_metadata_to_commitment_artifacts(&default_l1_batch_metadata()), ) .await; - } - - run_aggregator(pool.clone(), config.clone()).await; - run_manager(pool.clone(), config.clone(), mock_btc_ops_config.clone()).await; - - let inflight_inscriptions_before = aggregator_test - .storage - .btc_sender_dal() - .get_inflight_inscriptions() - .await - .unwrap(); - - assert!(!inflight_inscriptions_before.is_empty()); - - let last_inscription_history_before = aggregator_test - .storage - .btc_sender_dal() - .get_last_inscription_request_history(inflight_inscriptions_before[0].id) - .await - .unwrap(); - - assert!(last_inscription_history_before.is_some()); - - // Simulate the transaction is stuck for 10 blocks - mock_btc_ops_config.set_block_height(10); - - // THis hould create a new inscription_history - run_manager(pool.clone(), config.clone(), mock_btc_ops_config.clone()).await; - - let last_inscription_history_after = aggregator_test - .storage - .btc_sender_dal() - .get_last_inscription_request_history(inflight_inscriptions_before[0].id) - .await - .unwrap(); - - assert!(last_inscription_history_after.is_some()); - - assert_ne!( - last_inscription_history_after.unwrap().id, - last_inscription_history_before.unwrap().id - ); - - // Simulate the transaction was processed in next block - mock_btc_ops_config.set_block_height(11); - mock_btc_ops_config.set_tx_confirmation(true); - run_manager(pool.clone(), config.clone(), mock_btc_ops_config.clone()).await; + let sent_at = Utc::now().naive_utc(); - let inflight_inscriptions_after = aggregator_test - .storage - .btc_sender_dal() - .get_inflight_inscriptions() - .await - .unwrap(); - - assert!(inflight_inscriptions_after.is_empty()); - - let last_inscription_history_after = aggregator_test - .storage - .btc_sender_dal() - .get_last_inscription_request_history(inflight_inscriptions_before[0].id) - .await - .unwrap(); - - assert!(last_inscription_history_after - .unwrap() - .confirmed_at - .is_some()); - } - - #[tokio::test] - async fn test_btc_inscription_manager_run_one_inscription_request() { - let pool = ConnectionPool::::test_pool().await; - let config = get_btc_sender_config(1, 1); - let mut mock_btc_ops_config = MockBitcoinOpsConfig::default(); - mock_btc_ops_config.set_block_height(1); - - let number_of_batches: u32 = 1; - - let mut protocol_version: Option = None; - let mut base_system_contracts_hashes: Option = None; - let mut l1_headers = vec![]; - - for batch_number in 1..number_of_batches + 1 { - let header: L1BatchHeader = via_create_l1_batch(batch_number); - l1_headers.push(header.clone()); - - if protocol_version.is_none() { - protocol_version = header.protocol_version; - } - if base_system_contracts_hashes.is_none() { - base_system_contracts_hashes = Some(header.base_system_contracts_hashes); - } - } - - let mut aggregator_test = ViaAggregatorTest::new( - protocol_version.unwrap(), - base_system_contracts_hashes.unwrap(), - pool.clone(), - Some(config.clone()), - ) - .await; + let _ = aggregator_test + .storage + .via_data_availability_dal() + .insert_l1_batch_da(header.number, "blob_id", sent_at) + .await; - for header in l1_headers { - aggregator_test - .insert_l1_batch( - header.clone(), - l1_batch_metadata_to_commitment_artifacts(&default_l1_batch_metadata()), - ) + let _ = aggregator_test + .storage + .via_data_availability_dal() + .insert_proof_da(header.number, "blob_id", sent_at) .await; } @@ -203,7 +109,7 @@ mod tests { .is_none()); assert_eq!( inflight_inscriptions[0].request_type, - ViaBtcInscriptionRequestType::CommitL1BatchOnchain + ViaBtcInscriptionRequestType::CommitL1BatchOnchain.to_string() ); // Start the manager @@ -246,7 +152,7 @@ mod tests { .is_none()); assert_eq!( inflight_inscriptions[0].request_type, - ViaBtcInscriptionRequestType::CommitProofOnchain + ViaBtcInscriptionRequestType::CommitProofOnchain.to_string() ); } @@ -314,11 +220,10 @@ mod tests { } pub fn via_create_l1_batch(number: u32) -> L1BatchHeader { - let hex_str = "0000000000000000000000000000000000000000000000000000000000000000"; let mut header = create_l1_batch(number); header.base_system_contracts_hashes = BaseSystemContractsHashes { - bootloader: H256::from_str(hex_str).unwrap(), - default_aa: H256::from_str(hex_str).unwrap(), + bootloader: H256::from_str(BOOTLOADER_CODE_HASH_TEST).unwrap(), + default_aa: H256::from_str(DEFAULT_AA_CODE_HASH_TEST).unwrap(), }; header.protocol_version = Some(ProtocolVersionId::latest()); diff --git a/core/node/via_btc_sender/src/tests/utils.rs b/core/node/via_btc_sender/src/tests/utils.rs index cd66a9920..32eeaf281 100644 --- a/core/node/via_btc_sender/src/tests/utils.rs +++ b/core/node/via_btc_sender/src/tests/utils.rs @@ -19,8 +19,9 @@ use zksync_types::{ btc_block::ViaBtcL1BlockDetails, btc_inscription_operations::ViaBtcInscriptionRequestType, commitment::{L1BatchCommitmentArtifacts, L1BatchMetaParameters, L1BatchMetadata}, + l2_to_l1_log::{L2ToL1Log, UserL2ToL1Log}, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, - L1BatchNumber, ProtocolVersion, ProtocolVersionId, H256, + L1BatchNumber, ProtocolVersion, ProtocolVersionId, H160, H256, }; use crate::{ @@ -29,6 +30,11 @@ use crate::{ btc_inscription_manager::ViaBtcInscriptionManager, }; +pub const BOOTLOADER_CODE_HASH_TEST: &str = + "010008e74e40a94b1c6e6eb5a1dfbbdbd9eb9e0ec90fd358d29e8c07c30d8491"; +pub const DEFAULT_AA_CODE_HASH_TEST: &str = + "01000563426437b886b132bf5bcf9b0d98c3648f02a6e362893db4345078d09f"; + pub fn generate_random_bytes(length: usize) -> Vec { let mut bytes: Vec = vec![]; for _ in 0..length { @@ -38,8 +44,34 @@ pub fn generate_random_bytes(length: usize) -> Vec { bytes } +/// Creates an L1 batch header with the specified number and deterministic contents. +pub fn create_l1_batch(number: u32) -> L1BatchHeader { + let mut header = L1BatchHeader::new( + L1BatchNumber(number), + number.into(), + BaseSystemContractsHashes { + bootloader: H256::from_str(BOOTLOADER_CODE_HASH_TEST).unwrap(), + default_aa: H256::from_str(DEFAULT_AA_CODE_HASH_TEST).unwrap(), + }, + ProtocolVersionId::latest(), + ); + header.l1_tx_count = 3; + header.l2_tx_count = 5; + header.l2_to_l1_logs.push(UserL2ToL1Log(L2ToL1Log { + shard_id: 0, + is_service: false, + tx_number_in_block: 2, + sender: H160::random(), + key: H256::repeat_byte(3), + value: H256::zero(), + })); + header.l2_to_l1_messages.push(vec![22; 22]); + header.l2_to_l1_messages.push(vec![33; 33]); + + header +} + pub fn default_l1_batch_metadata() -> L1BatchMetadata { - let hex_str = "0000000000000000000000000000000000000000000000000000000000000000"; L1BatchMetadata { root_hash: H256::default(), rollup_last_leaf_index: 0, @@ -49,8 +81,8 @@ pub fn default_l1_batch_metadata() -> L1BatchMetadata { l2_l1_merkle_root: H256::default(), block_meta_params: L1BatchMetaParameters { zkporter_is_available: false, - bootloader_code_hash: H256::from_str(hex_str).unwrap(), - default_aa_code_hash: H256::from_str(hex_str).unwrap(), + bootloader_code_hash: H256::from_str(BOOTLOADER_CODE_HASH_TEST).unwrap(), + default_aa_code_hash: H256::from_str(DEFAULT_AA_CODE_HASH_TEST).unwrap(), protocol_version: Some(ProtocolVersionId::latest()), }, aux_data_hash: H256::default(), @@ -239,7 +271,7 @@ impl ViaAggregatorTest { .storage .btc_sender_dal() .via_save_btc_inscriptions_request( - ViaBtcInscriptionRequestType::CommitL1BatchOnchain, + ViaBtcInscriptionRequestType::CommitL1BatchOnchain.to_string(), InscriptionMessage::to_bytes(&inscription_message), 0, ) @@ -271,6 +303,13 @@ impl ViaAggregatorTest { ) .await .unwrap(); + let sent_at = Utc::now().naive_utc(); + + let _ = self + .storage + .via_data_availability_dal() + .insert_proof_da(batch.number, "blob_id", sent_at) + .await; (inscription.id, inscription_request_history_id as i64) } diff --git a/core/node/via_btc_watch/Cargo.toml b/core/node/via_btc_watch/Cargo.toml index 333ab9faa..72ebcf9ac 100644 --- a/core/node/via_btc_watch/Cargo.toml +++ b/core/node/via_btc_watch/Cargo.toml @@ -16,11 +16,13 @@ via_btc_client.workspace = true zksync_shared_metrics.workspace = true zksync_dal.workspace = true zksync_types.workspace = true +zksync_config.workspace = true tokio.workspace = true anyhow.workspace = true thiserror.workspace = true async-trait.workspace = true tracing.workspace = true +sqlx.workspace = true [dev-dependencies] diff --git a/core/node/via_btc_watch/src/lib.rs b/core/node/via_btc_watch/src/lib.rs index faaaaa8bb..0bc7ac266 100644 --- a/core/node/via_btc_watch/src/lib.rs +++ b/core/node/via_btc_watch/src/lib.rs @@ -11,14 +11,18 @@ use via_btc_client::{ indexer::BitcoinInscriptionIndexer, types::{BitcoinAddress, BitcoinTxid, NodeAuth}, }; +use zksync_config::ActorRole; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_types::PriorityOpId; use self::{ - message_processors::{L1ToL2MessageProcessor, MessageProcessor, MessageProcessorError}, - metrics::METRICS, + message_processors::{ + L1ToL2MessageProcessor, MessageProcessor, MessageProcessorError, VotableMessageProcessor, + }, + metrics::{ErrorType, METRICS}, }; -use crate::metrics::ErrorType; + +const DEFAULT_VOTING_THRESHOLD: f64 = 0.5; #[derive(Debug)] struct BtcWatchState { @@ -49,6 +53,7 @@ impl BtcWatch { pool: ConnectionPool, poll_interval: Duration, btc_blocks_lag: u32, + actor_role: &ActorRole, ) -> anyhow::Result { let indexer = BitcoinInscriptionIndexer::new(rpc_url, network, node_auth, bootstrap_txids).await?; @@ -57,12 +62,16 @@ impl BtcWatch { tracing::info!("initialized state: {state:?}"); drop(storage); - // TODO: add other message processors if needed - let message_processors: Vec> = - vec![Box::new(L1ToL2MessageProcessor::new( + assert_eq!(actor_role, &ActorRole::Sequencer); + + // Only build message processors that match the actor role: + let message_processors: Vec> = vec![ + Box::new(L1ToL2MessageProcessor::new( state.bridge_address.clone(), state.next_expected_priority_id, - ))]; + )), + Box::new(VotableMessageProcessor::new(DEFAULT_VOTING_THRESHOLD)), + ]; let confirmations_for_btc_msg = confirmations_for_btc_msg.unwrap_or(0); @@ -173,9 +182,11 @@ impl BtcWatch { .await .map_err(|e| MessageProcessorError::Internal(e.into()))?; - // temporary use only one processor to avoid cloning - if let Some(processor) = self.message_processors.first_mut() { - processor.process_messages(storage, messages).await?; + for processor in self.message_processors.iter_mut() { + processor + .process_messages(storage, messages.clone(), &mut self.indexer) + .await + .map_err(|e| MessageProcessorError::Internal(e.into()))?; } self.last_processed_bitcoin_block = to_block; diff --git a/core/node/via_btc_watch/src/message_processors/l1_to_l2.rs b/core/node/via_btc_watch/src/message_processors/l1_to_l2.rs index 9e47b3b93..504aca18c 100644 --- a/core/node/via_btc_watch/src/message_processors/l1_to_l2.rs +++ b/core/node/via_btc_watch/src/message_processors/l1_to_l2.rs @@ -1,4 +1,7 @@ -use via_btc_client::types::{BitcoinAddress, FullInscriptionMessage, L1ToL2Message}; +use via_btc_client::{ + indexer::BitcoinInscriptionIndexer, + types::{BitcoinAddress, FullInscriptionMessage, L1ToL2Message}, +}; use zksync_dal::{Connection, Core, CoreDal}; use zksync_types::{ abi::L2CanonicalTransaction, @@ -33,6 +36,7 @@ impl MessageProcessor for L1ToL2MessageProcessor { &mut self, storage: &mut Connection<'_, Core>, msgs: Vec, + _: &mut BitcoinInscriptionIndexer, ) -> Result<(), MessageProcessorError> { let mut priority_ops = Vec::new(); for msg in msgs { @@ -96,9 +100,9 @@ impl L1ToL2MessageProcessor { let eth_address_l2 = msg.input.receiver_l2_address; let calldata = msg.input.call_data.clone(); - let value = U256::from(amount); - let mantissa = U256::from(10_000_000_000u64); // scale down the cost Eth 18 decimals - BTC 8 decimals - let max_fee_per_gas = U256::from(100_000_000_000u64) / mantissa; + let mantissa = U256::from(10_000_000_000u64); // Eth 18 decimals - BTC 8 decimals + let value = U256::from(amount) * mantissa; + let max_fee_per_gas = U256::from(100_000_000u64); let gas_limit = U256::from(1_000_000u64); let gas_per_pubdata_limit = U256::from(800u64); diff --git a/core/node/via_btc_watch/src/message_processors/mod.rs b/core/node/via_btc_watch/src/message_processors/mod.rs index f6b4eaefc..a8ad2979e 100644 --- a/core/node/via_btc_watch/src/message_processors/mod.rs +++ b/core/node/via_btc_watch/src/message_processors/mod.rs @@ -1,8 +1,14 @@ pub(crate) use l1_to_l2::L1ToL2MessageProcessor; -use via_btc_client::types::FullInscriptionMessage; +use via_btc_client::{ + indexer::BitcoinInscriptionIndexer, + types::{BitcoinTxid, FullInscriptionMessage}, +}; +pub(crate) use votable::VotableMessageProcessor; use zksync_dal::{Connection, Core}; +use zksync_types::H256; mod l1_to_l2; +mod votable; #[derive(Debug, thiserror::Error)] pub(super) enum MessageProcessorError { @@ -18,5 +24,12 @@ pub(super) trait MessageProcessor: 'static + std::fmt::Debug + Send + Sync { &mut self, storage: &mut Connection<'_, Core>, msgs: Vec, + indexer: &mut BitcoinInscriptionIndexer, ) -> Result<(), MessageProcessorError>; } + +pub(crate) fn convert_txid_to_h256(txid: BitcoinTxid) -> H256 { + let mut tx_id_bytes = txid.as_raw_hash()[..].to_vec(); + tx_id_bytes.reverse(); + H256::from_slice(&tx_id_bytes) +} diff --git a/core/node/via_btc_watch/src/message_processors/votable.rs b/core/node/via_btc_watch/src/message_processors/votable.rs new file mode 100644 index 000000000..9a3a7dc8a --- /dev/null +++ b/core/node/via_btc_watch/src/message_processors/votable.rs @@ -0,0 +1,166 @@ +use sqlx::types::chrono::{DateTime, Utc}; +use via_btc_client::{indexer::BitcoinInscriptionIndexer, types::FullInscriptionMessage}; +use zksync_dal::{Connection, Core, CoreDal}; +use zksync_types::aggregated_operations::AggregatedActionType; + +use super::{convert_txid_to_h256, MessageProcessor, MessageProcessorError}; + +#[derive(Debug)] +pub struct VotableMessageProcessor { + threshold: f64, +} + +impl VotableMessageProcessor { + pub fn new(threshold: f64) -> Self { + Self { threshold } + } +} + +#[async_trait::async_trait] +impl MessageProcessor for VotableMessageProcessor { + async fn process_messages( + &mut self, + storage: &mut Connection<'_, Core>, + msgs: Vec, + indexer: &mut BitcoinInscriptionIndexer, + ) -> Result<(), MessageProcessorError> { + // Get the current timestamp + let dt = Utc::now(); + let naive_utc = dt.naive_utc(); + let offset = *dt.offset(); + let dt = DateTime::::from_naive_utc_and_offset(naive_utc, offset); + + for msg in msgs { + match msg { + ref f @ FullInscriptionMessage::ProofDAReference(ref proof_msg) => { + if let Some(l1_batch_number) = indexer.get_l1_batch_number(f).await { + let mut votes_dal = storage.via_votes_dal(); + + let last_inserted_block = votes_dal + .get_last_inserted_block() + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))? + .unwrap_or(0); + + if l1_batch_number.0 != last_inserted_block + 1 { + tracing::warn!( + "Skipping ProofDAReference message with l1_batch_number: {:?}. Last inserted block: {:?}", + l1_batch_number, last_inserted_block + ); + continue; + } + + let tx_id = convert_txid_to_h256(proof_msg.common.tx_id); + let batch_tx_id = + convert_txid_to_h256(proof_msg.input.l1_batch_reveal_txid); + + votes_dal + .insert_votable_transaction( + l1_batch_number.0, + tx_id, + proof_msg.input.da_identifier.clone(), + proof_msg.input.blob_id.clone(), + proof_msg.input.l1_batch_reveal_txid.to_string(), + ) + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))?; + + let mut eth_sender_dal = storage.eth_sender_dal(); + + eth_sender_dal + .insert_bogus_confirmed_eth_tx( + l1_batch_number, + AggregatedActionType::Commit, + batch_tx_id, + dt, + ) + .await?; + + eth_sender_dal + .insert_bogus_confirmed_eth_tx( + l1_batch_number, + AggregatedActionType::PublishProofOnchain, + tx_id, + dt, + ) + .await?; + } else { + tracing::warn!( + "L1BatchNumber not found for ProofDAReference message : {:?}", + proof_msg + ); + } + } + ref f @ FullInscriptionMessage::ValidatorAttestation(ref attestation_msg) => { + if let Some(l1_batch_number) = indexer.get_l1_batch_number(f).await { + let mut votes_dal = storage.via_votes_dal(); + + let reference_txid = + convert_txid_to_h256(attestation_msg.input.reference_txid); + let tx_id = convert_txid_to_h256(attestation_msg.common.tx_id); + + // Vote = true if attestation_msg.input.attestation == Vote::Ok + let is_ok = matches!( + attestation_msg.input.attestation, + via_btc_client::types::Vote::Ok + ); + + let p2wpkh_address = attestation_msg + .common + .p2wpkh_address + .as_ref() + .expect("ValidatorAttestation message must have a p2wpkh address"); + votes_dal + .insert_vote( + l1_batch_number.0, + reference_txid, + &p2wpkh_address.to_string(), + is_ok, + ) + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))?; + + // Check finalization + if votes_dal + .finalize_transaction_if_needed( + l1_batch_number.0, + reference_txid, + self.threshold, + indexer.get_number_of_verifiers(), + ) + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))? + { + let mut eth_sender_dal = storage.eth_sender_dal(); + + tracing::info!( + "Finalizing transaction with tx_id: {:?} and block number: {:?}", + tx_id, + l1_batch_number + ); + eth_sender_dal + .insert_bogus_confirmed_eth_tx( + l1_batch_number, + AggregatedActionType::Execute, + tx_id, + dt, + ) + .await?; + } + } + } + // bootstrapping phase is already covered + FullInscriptionMessage::ProposeSequencer(_) + | FullInscriptionMessage::SystemBootstrapping(_) => { + // do nothing + } + // Non-votable messages like L1BatchDAReference or L1ToL2Message are ignored by this processor + FullInscriptionMessage::L1ToL2Message(_) + | FullInscriptionMessage::L1BatchDAReference(_) => { + // do nothing + } + } + } + Ok(()) + } +} diff --git a/core/node/via_da_dispatcher/src/da_dispatcher.rs b/core/node/via_da_dispatcher/src/da_dispatcher.rs index be2dd2bbc..4ef437058 100644 --- a/core/node/via_da_dispatcher/src/da_dispatcher.rs +++ b/core/node/via_da_dispatcher/src/da_dispatcher.rs @@ -10,7 +10,7 @@ use zksync_da_client::{ DataAvailabilityClient, }; use zksync_dal::{ConnectionPool, Core, CoreDal}; -use zksync_l1_contract_interface::{i_executor::methods::ProveBatches, Tokenize}; +use zksync_l1_contract_interface::i_executor::methods::ProveBatches; use zksync_object_store::{ObjectStore, ObjectStoreError}; use zksync_prover_interface::outputs::L1BatchProofForL1; use zksync_types::{protocol_version::ProtocolSemanticVersion, L1BatchNumber}; @@ -212,7 +212,7 @@ impl ViaDataAvailabilityDispatcher { for proof in proofs { // fetch the proof from object store - let proof_data = match self.load_real_proof_operation(proof.l1_batch_number).await { + let final_proof = match self.load_real_proof_operation(proof.l1_batch_number).await { Some(proof) => proof, None => { tracing::error!("Failed to load proof for batch {}", proof.l1_batch_number.0); @@ -220,16 +220,6 @@ impl ViaDataAvailabilityDispatcher { } }; - let serelize_proof = proof_data.into_tokens(); - // iterate over tokens and convert them to bytes - let mut proof_bytes = Vec::new(); - for token in serelize_proof { - proof_bytes.extend(token.into_bytes()); - } - - // concatenate all bytes - let final_proof = proof_bytes.into_iter().flatten().collect::>(); - let dispatch_latency = METRICS.proof_dispatch_latency.start(); let dispatch_response = retry(self.config.max_retries(), proof.l1_batch_number, || { @@ -273,10 +263,7 @@ impl ViaDataAvailabilityDispatcher { } /// Loads a real proof operation for a given L1 batch number. - async fn load_real_proof_operation( - &self, - batch_to_prove: L1BatchNumber, - ) -> Option { + async fn load_real_proof_operation(&self, batch_to_prove: L1BatchNumber) -> Option> { let mut storage = self.pool.connection_tagged("da_dispatcher").await.ok()?; let previous_proven_batch_number = @@ -403,12 +390,14 @@ impl ViaDataAvailabilityDispatcher { } }; - Some(ProveBatches { + let res = ProveBatches { prev_l1_batch: previous_proven_batch_metadata, l1_batches: vec![metadata_for_batch_being_proved], proofs: vec![proof], should_verify: true, - }) + }; + + serialize_prove_batches(&res) } async fn prepare_dummy_proof_operation( @@ -479,41 +468,9 @@ impl ViaDataAvailabilityDispatcher { should_verify: false, }; - let prev_l1_batch_bytes = bincode::serialize(&res.prev_l1_batch) - .map_err(|e| { - tracing::error!("Failed to serialize prev_l1_batch: {}", e); - None::> - }) - .ok()?; - let l1_batches_bytes = bincode::serialize(&res.l1_batches) - .map_err(|e| { - tracing::error!("Failed to serialize l1_batches: {}", e); - None::> - }) - .ok()?; - let proofs_bytes = bincode::serialize(&res.proofs) - .map_err(|e| { - tracing::error!("Failed to serialize proofs: {}", e); - None::> - }) - .ok()?; - let should_verify = bincode::serialize(&res.should_verify) - .map_err(|e| { - tracing::error!("Failed to serialize should_verify: {}", e); - None::> - }) - .ok()?; - - let final_proof = [ - prev_l1_batch_bytes, - l1_batches_bytes, - proofs_bytes, - should_verify, - ] - .concat(); - - Some(final_proof) + serialize_prove_batches(&res) } + /// Loads wrapped FRI proofs for a given L1 batch number and allowed protocol versions. pub async fn load_wrapped_fri_proofs_for_range( &self, @@ -522,7 +479,9 @@ impl ViaDataAvailabilityDispatcher { ) -> Option { for version in allowed_versions { match self.blob_store.get((l1_batch_number, *version)).await { - Ok(proof) => return Some(proof), + Ok(proof) => { + return Some(proof); + } Err(ObjectStoreError::KeyNotFound(_)) => continue, // Proof is not ready yet. Err(err) => { tracing::error!( @@ -544,8 +503,15 @@ impl ViaDataAvailabilityDispatcher { .get_by_encoded_key(format!("l1_batch_proof_{}.bin", l1_batch_number)) .await { - Ok(proof) => return Some(proof), - Err(ObjectStoreError::KeyNotFound(_)) => (), + Ok(proof) => { + return Some(proof); + } + Err(ObjectStoreError::KeyNotFound(_)) => { + tracing::error!( + "KeyNotFound for loading proof for batch {}", + l1_batch_number.0 + ); + } Err(err) => { tracing::error!( "Failed to load proof for batch {} from deprecated naming: {}", @@ -686,6 +652,43 @@ impl ViaDataAvailabilityDispatcher { } } +fn serialize_prove_batches(prove_batches: &ProveBatches) -> Option> { + let prev_l1_batch_bytes = bincode::serialize(&prove_batches.prev_l1_batch) + .map_err(|e| { + tracing::error!("Failed to serialize prev_l1_batch: {}", e); + None::> + }) + .ok()?; + let l1_batches_bytes = bincode::serialize(&prove_batches.l1_batches) + .map_err(|e| { + tracing::error!("Failed to serialize l1_batches: {}", e); + None::> + }) + .ok()?; + let proofs_bytes = bincode::serialize(&prove_batches.proofs) + .map_err(|e| { + tracing::error!("Failed to serialize proofs: {}", e); + None::> + }) + .ok()?; + let should_verify = bincode::serialize(&prove_batches.should_verify) + .map_err(|e| { + tracing::error!("Failed to serialize should_verify: {}", e); + None::> + }) + .ok()?; + + Some( + [ + prev_l1_batch_bytes, + l1_batches_bytes, + proofs_bytes, + should_verify, + ] + .concat(), + ) +} + async fn retry( max_retries: u16, batch_number: L1BatchNumber, diff --git a/core/node/via_fee_model/Cargo.toml b/core/node/via_fee_model/Cargo.toml index 8ee41ed97..9ae96d73c 100644 --- a/core/node/via_fee_model/Cargo.toml +++ b/core/node/via_fee_model/Cargo.toml @@ -14,8 +14,10 @@ categories.workspace = true vise.workspace = true zksync_types.workspace = true zksync_config.workspace = true +zksync_dal.workspace = true zksync_utils.workspace = true zksync_node_fee_model.workspace = true +via_btc_client.workspace = true tokio = { workspace = true, features = ["time"] } anyhow.workspace = true diff --git a/core/node/via_fee_model/src/l1_gas_price/gas_adjuster/mod.rs b/core/node/via_fee_model/src/l1_gas_price/gas_adjuster/mod.rs new file mode 100644 index 000000000..af9582c15 --- /dev/null +++ b/core/node/via_fee_model/src/l1_gas_price/gas_adjuster/mod.rs @@ -0,0 +1,212 @@ +//! This module determines the fees to pay in txs containing blocks submitted to the L1. + +use std::{ + collections::VecDeque, + sync::{Arc, RwLock}, +}; + +use tokio::sync::watch; +use via_btc_client::inscriber::Inscriber; +use zksync_config::GasAdjusterConfig; + +/// This component keeps track of the median `base_fee` from the last `max_base_fee_samples` blocks +/// and of the median `blob_base_fee` from the last `max_blob_base_fee_sample` blocks. +/// It is used to adjust the base_fee of transactions sent to L1. +#[derive(Debug)] +pub struct ViaGasAdjuster { + pub(super) base_fee_statistics: GasStatistics, + pub(super) config: GasAdjusterConfig, + pub(super) inscriber: Inscriber, +} + +impl ViaGasAdjuster { + pub async fn new(config: GasAdjusterConfig, inscriber: Inscriber) -> anyhow::Result { + // Subtracting 1 from the "latest" block number to prevent errors in case + // the info about the latest block is not yet present on the node. + // This sometimes happens on Infura. + let current_block = inscriber + .get_client() + .await + .fetch_block_height() + .await? + .saturating_sub(1) as usize; + + let fee_history = inscriber + .get_client() + .await + .get_fee_history( + current_block as usize - config.max_base_fee_samples, + current_block, + ) + .await?; + + let base_fee_statistics = + GasStatistics::new(config.max_base_fee_samples, current_block, fee_history); + + Ok(Self { + base_fee_statistics, + config, + inscriber, + }) + } + + /// Performs an actualization routine for `GasAdjuster`. + /// This method is intended to be invoked periodically. + pub async fn keep_updated(&self) -> anyhow::Result<()> { + let current_block = self + .inscriber + .get_client() + .await + .fetch_block_height() + .await? + .saturating_sub(1) as usize; + let last_processed_block = self.base_fee_statistics.last_processed_block(); + + if current_block > last_processed_block { + let n_blocks = current_block - last_processed_block; + let fee_history = self + .inscriber + .get_client() + .await + .get_fee_history(current_block - n_blocks, current_block) + .await?; + + self.base_fee_statistics.add_samples(fee_history); + } + Ok(()) + } + + fn bound_gas_price(&self, gas_price: u64) -> u64 { + let max_l1_gas_price = self.config.max_l1_gas_price(); + if gas_price > max_l1_gas_price { + tracing::warn!( + "Effective gas price is too high: {gas_price}, using max allowed: {}", + max_l1_gas_price + ); + return max_l1_gas_price; + } + gas_price + } + + pub async fn run(self: Arc, stop_receiver: watch::Receiver) -> anyhow::Result<()> { + loop { + if *stop_receiver.borrow() { + tracing::info!("Stop signal received, gas_adjuster is shutting down"); + break; + } + + if let Err(err) = self.keep_updated().await { + tracing::warn!("Cannot add the base fee to gas statistics: {}", err); + } + + tokio::time::sleep(self.config.poll_period()).await; + } + Ok(()) + } + + /// Returns the sum of base and priority fee, in wei, not considering time in mempool. + /// Can be used to get an estimate of current gas price. + pub(crate) fn estimate_effective_gas_price(&self) -> u64 { + if let Some(price) = self.config.internal_enforced_l1_gas_price { + return price; + } + + let effective_gas_price = self.get_base_fee() + self.get_priority_fee(); + + let calculated_price = + (self.config.internal_l1_pricing_multiplier * effective_gas_price as f64) as u64; + + // Bound the price if it's too high. + self.bound_gas_price(calculated_price) + } + + // Todo: investigate the DA layer gas cost + pub(crate) fn estimate_effective_pubdata_price(&self) -> u64 { + if let Some(price) = self.config.internal_enforced_pubdata_price { + return price; + } + 0 + } + + fn get_base_fee(&self) -> u64 { + self.base_fee_statistics.median() + } + + fn get_priority_fee(&self) -> u64 { + self.config.default_priority_fee_per_gas + } +} + +/// Helper structure responsible for collecting the data about recent transactions, +/// calculating the median base fee. +#[derive(Debug, Clone, Default)] +pub(super) struct GasStatisticsInner { + samples: VecDeque, + median_cached: T, + max_samples: usize, + last_processed_block: usize, +} + +impl GasStatisticsInner { + fn new(max_samples: usize, block: usize, fee_history: impl IntoIterator) -> Self { + let mut statistics = Self { + max_samples, + samples: VecDeque::with_capacity(max_samples), + median_cached: T::default(), + last_processed_block: 0, + }; + + statistics.add_samples(fee_history); + + Self { + last_processed_block: block, + ..statistics + } + } + + fn median(&self) -> T { + self.median_cached + } + + fn add_samples(&mut self, fees: impl IntoIterator) { + let old_len = self.samples.len(); + self.samples.extend(fees); + let processed_blocks = self.samples.len() - old_len; + self.last_processed_block += processed_blocks; + + let extra = self.samples.len().saturating_sub(self.max_samples); + self.samples.drain(..extra); + + let mut samples: Vec<_> = self.samples.iter().cloned().collect(); + + if !self.samples.is_empty() { + let (_, &mut median, _) = samples.select_nth_unstable(self.samples.len() / 2); + self.median_cached = median; + } + } +} + +#[derive(Debug, Default)] +pub(super) struct GasStatistics(RwLock>); + +impl GasStatistics { + pub fn new(max_samples: usize, block: usize, fee_history: impl IntoIterator) -> Self { + Self(RwLock::new(GasStatisticsInner::new( + max_samples, + block, + fee_history, + ))) + } + + pub fn median(&self) -> T { + self.0.read().unwrap().median() + } + + pub fn add_samples(&self, fees: impl IntoIterator) { + self.0.write().unwrap().add_samples(fees) + } + + pub fn last_processed_block(&self) -> usize { + self.0.read().unwrap().last_processed_block + } +} diff --git a/core/node/via_fee_model/src/l1_gas_price/mod.rs b/core/node/via_fee_model/src/l1_gas_price/mod.rs new file mode 100644 index 000000000..2239c1621 --- /dev/null +++ b/core/node/via_fee_model/src/l1_gas_price/mod.rs @@ -0,0 +1 @@ +pub mod gas_adjuster; diff --git a/core/node/via_fee_model/src/lib.rs b/core/node/via_fee_model/src/lib.rs index 8243f951b..b36744036 100644 --- a/core/node/via_fee_model/src/lib.rs +++ b/core/node/via_fee_model/src/lib.rs @@ -1,21 +1,39 @@ use std::{fmt::Debug, sync::Arc}; +use anyhow::Context; use async_trait::async_trait; +pub use l1_gas_price::gas_adjuster::ViaGasAdjuster; +use zksync_dal::{ConnectionPool, Core, CoreDal}; pub use zksync_node_fee_model::BatchFeeModelInputProvider; use zksync_types::fee_model::{ BaseTokenConversionRatio, BatchFeeInput, FeeModelConfig, FeeModelConfigV2, FeeParams, - FeeParamsV2, + FeeParamsV2, PubdataIndependentBatchFeeModelInput, }; +mod l1_gas_price; + +#[async_trait] +pub trait ViaBaseTokenRatioProvider: Debug + Send + Sync + 'static { + fn get_conversion_ratio_by_timestamp( + &self, + from_timestamp: u64, + to_timestamp: u64, + ) -> BaseTokenConversionRatio; +} + #[derive(Debug)] pub struct ViaMainNodeFeeInputProvider { + provider: Arc, fee_model_config: FeeModelConfigV2, } impl ViaMainNodeFeeInputProvider { - pub fn new(config: FeeModelConfig) -> anyhow::Result { + pub fn new(provider: Arc, config: FeeModelConfig) -> anyhow::Result { match config { - FeeModelConfig::V2(fee_model_config) => Ok(Self { fee_model_config }), + FeeModelConfig::V2(fee_model_config) => Ok(Self { + provider, + fee_model_config, + }), FeeModelConfig::V1(_) => Err(anyhow::anyhow!("Via fee model must be inited using V2")), } } @@ -28,18 +46,22 @@ impl BatchFeeModelInputProvider for ViaMainNodeFeeInputProvider { _l1_gas_price_scale_factor: f64, _l1_pubdata_price_scale_factor: f64, ) -> anyhow::Result { - Ok(BatchFeeInput::pubdata_independent( - self.fee_model_config.minimal_l2_gas_price, - self.fee_model_config.minimal_l2_gas_price, - self.fee_model_config.max_pubdata_per_batch, - )) + if let FeeParams::V2(params_v2) = self.get_fee_model_params() { + let fee = clip_batch_fee_model_input(compute_batch_fee_model_input( + params_v2, + _l1_gas_price_scale_factor, + _l1_pubdata_price_scale_factor, + )); + return Ok(BatchFeeInput::PubdataIndependent(fee)); + } + Err(anyhow::Error::msg("Via batch fee must be v2")) } fn get_fee_model_params(&self) -> FeeParams { FeeParams::V2(FeeParamsV2::new( self.fee_model_config, - self.fee_model_config.minimal_l2_gas_price, - self.fee_model_config.max_pubdata_per_batch, + self.provider.estimate_effective_gas_price(), + self.provider.estimate_effective_pubdata_price(), BaseTokenConversionRatio::default(), )) } @@ -48,11 +70,18 @@ impl BatchFeeModelInputProvider for ViaMainNodeFeeInputProvider { #[derive(Debug)] pub struct ViaApiFeeInputProvider { inner: Arc, + connection_pool: ConnectionPool, } impl ViaApiFeeInputProvider { - pub fn new(inner: Arc) -> Self { - Self { inner } + pub fn new( + inner: Arc, + connection_pool: ConnectionPool, + ) -> Self { + Self { + inner, + connection_pool, + } } } @@ -60,17 +89,25 @@ impl ViaApiFeeInputProvider { impl BatchFeeModelInputProvider for ViaApiFeeInputProvider { async fn get_batch_fee_input_scaled( &self, - _l1_gas_price_scale_factor: f64, - _l1_pubdata_price_scale_factor: f64, + l1_gas_price_scale_factor: f64, + l1_pubdata_price_scale_factor: f64, ) -> anyhow::Result { - if let FeeParams::V2(params_v2) = self.inner.get_fee_model_params() { - return Ok(BatchFeeInput::pubdata_independent( - params_v2.l1_gas_price(), - params_v2.l1_gas_price(), - params_v2.l1_pubdata_price(), - )); - } - Err(anyhow::Error::msg("Via batch fee must be v2")) + let inner_input = self + .inner + .get_batch_fee_input_scaled(l1_gas_price_scale_factor, l1_pubdata_price_scale_factor) + .await + .context("cannot get batch fee input from base provider")?; + let last_l2_block_params = self + .connection_pool + .connection_tagged("via_api_fee_input_provider") + .await? + .blocks_dal() + .get_last_sealed_l2_block_header() + .await?; + + Ok(last_l2_block_params + .map(|header| inner_input.stricter(header.batch_fee_input)) + .unwrap_or(inner_input)) } fn get_fee_model_params(&self) -> FeeParams { @@ -78,6 +115,71 @@ impl BatchFeeModelInputProvider for ViaApiFeeInputProvider { } } +/// Calculates the batch fee input based on the main node parameters. +fn compute_batch_fee_model_input( + params: FeeParamsV2, + l1_gas_price_scale_factor: f64, + l1_pubdata_price_scale_factor: f64, +) -> PubdataIndependentBatchFeeModelInput { + let config = params.config(); + let l1_gas_price = params.l1_gas_price(); + let l1_pubdata_price = params.l1_pubdata_price(); + + // Firstly, we scale the gas price and pubdata price in case it is needed. + let l1_gas_price = (l1_gas_price as f64 * l1_gas_price_scale_factor) as u64; + let l1_pubdata_price = (l1_pubdata_price as f64 * l1_pubdata_price_scale_factor) as u64; + + // Todo: rename "batch_overhead_l1_gas" to "total_inscription_gas_vbyte" + let inscriptions_cost_satoshi = config.batch_overhead_l1_gas * l1_gas_price; + // Scale the inscriptions_cost_satoshi to 18 decimals + let gas_price_satoshi = inscriptions_cost_satoshi * 10_000_000_000 / config.max_gas_per_batch; + // The "minimal_l2_gas_price" calculated from the operational cost to publish and verify block. + let fair_l2_gas_price = gas_price_satoshi + config.minimal_l2_gas_price; + + PubdataIndependentBatchFeeModelInput { + l1_gas_price, + fair_l2_gas_price, + fair_pubdata_price: l1_pubdata_price, + } +} + +/// Bootloader places limitations on fair_l2_gas_price and fair_pubdata_price. +/// (MAX_ALLOWED_FAIR_L2_GAS_PRICE and MAX_ALLOWED_FAIR_PUBDATA_PRICE in bootloader code respectively) +/// Server needs to clip this prices in order to allow chain continues operation at a loss. The alternative +/// would be to stop accepting the transactions until the conditions improve. +/// TODO (PE-153): to be removed when bootloader limitation is removed +fn clip_batch_fee_model_input( + fee_model: PubdataIndependentBatchFeeModelInput, +) -> PubdataIndependentBatchFeeModelInput { + /// MAX_ALLOWED_FAIR_L2_GAS_PRICE + const MAXIMUM_L2_GAS_PRICE: u64 = 10_000_000_000_000; + /// MAX_ALLOWED_FAIR_PUBDATA_PRICE + const MAXIMUM_PUBDATA_PRICE: u64 = 1_000_000_000_000_000; + PubdataIndependentBatchFeeModelInput { + l1_gas_price: fee_model.l1_gas_price, + fair_l2_gas_price: if fee_model.fair_l2_gas_price < MAXIMUM_L2_GAS_PRICE { + fee_model.fair_l2_gas_price + } else { + tracing::warn!( + "Fair l2 gas price {} exceeds maximum. Limitting to {}", + fee_model.fair_l2_gas_price, + MAXIMUM_L2_GAS_PRICE + ); + MAXIMUM_L2_GAS_PRICE + }, + fair_pubdata_price: if fee_model.fair_pubdata_price < MAXIMUM_PUBDATA_PRICE { + fee_model.fair_pubdata_price + } else { + tracing::warn!( + "Fair pubdata price {} exceeds maximum. Limitting to {}", + fee_model.fair_pubdata_price, + MAXIMUM_PUBDATA_PRICE + ); + MAXIMUM_PUBDATA_PRICE + }, + } +} + /// Mock [`BatchFeeModelInputProvider`] implementation that returns a constant value. /// Intended to be used in tests only. #[derive(Debug)] diff --git a/core/node/via_state_keeper/Cargo.toml b/core/node/via_state_keeper/Cargo.toml index 0e6066cb5..1930ddf22 100644 --- a/core/node/via_state_keeper/Cargo.toml +++ b/core/node/via_state_keeper/Cargo.toml @@ -46,5 +46,6 @@ assert_matches.workspace = true tempfile.workspace = true test-casing.workspace = true futures.workspace = true +via_btc_client.workspace = true zksync_system_constants.workspace = true diff --git a/core/node/via_state_keeper/src/io/tests/tester.rs b/core/node/via_state_keeper/src/io/tests/tester.rs index 88da7929f..c08e93bc5 100644 --- a/core/node/via_state_keeper/src/io/tests/tester.rs +++ b/core/node/via_state_keeper/src/io/tests/tester.rs @@ -2,8 +2,14 @@ use std::{slice, sync::Arc, time::Duration}; -use via_fee_model::ViaMainNodeFeeInputProvider; -use zksync_config::configs::{chain::StateKeeperConfig, wallets::Wallets}; +use via_btc_client::inscriber::test_utils::{ + get_mock_inscriber_and_conditions, MockBitcoinOpsConfig, +}; +use via_fee_model::{ViaGasAdjuster, ViaMainNodeFeeInputProvider}; +use zksync_config::{ + configs::{chain::StateKeeperConfig, wallets::Wallets}, + GasAdjusterConfig, +}; use zksync_contracts::BaseSystemContracts; use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_multivm::{ @@ -44,9 +50,17 @@ impl Tester { } pub(super) async fn create_batch_fee_input_provider(&self) -> ViaMainNodeFeeInputProvider { - ViaMainNodeFeeInputProvider::new(FeeModelConfig::V1(FeeModelConfigV1 { - minimal_l2_gas_price: self.minimal_l2_gas_price(), - })) + let inscriber = get_mock_inscriber_and_conditions(MockBitcoinOpsConfig::default()); + ViaMainNodeFeeInputProvider::new( + Arc::new( + ViaGasAdjuster::new(GasAdjusterConfig::default(), inscriber) + .await + .unwrap(), + ), + FeeModelConfig::V1(FeeModelConfigV1 { + minimal_l2_gas_price: self.minimal_l2_gas_price(), + }), + ) .unwrap() } @@ -59,11 +73,7 @@ impl Tester { &self, pool: ConnectionPool, ) -> (MempoolIO, MempoolGuard) { - let batch_fee_input_provider = - ViaMainNodeFeeInputProvider::new(FeeModelConfig::V1(FeeModelConfigV1 { - minimal_l2_gas_price: self.minimal_l2_gas_price(), - })) - .unwrap(); + let batch_fee_input_provider = self.create_batch_fee_input_provider().await; let mempool = MempoolGuard::new(PriorityOpId(0), 100); let config = StateKeeperConfig { diff --git a/docker-compose-via.yml b/docker-compose-via.yml index 38ba99fb4..4e1214024 100644 --- a/docker-compose-via.yml +++ b/docker-compose-via.yml @@ -50,10 +50,18 @@ services: bitcoin-cli $${RPC_ARGS} -rpcwallet=Alice sendtoaddress $${TEST_ADDRESS} 300 echo "Sent 300 BTC to TEST_ADDRESS: $${TEST_ADDRESS}" + echo "TEST_ADDRESS_OP_RETURN: $${TEST_ADDRESS_OP_RETURN}" + bitcoin-cli $${RPC_ARGS} -rpcwallet=Alice sendtoaddress $${TEST_ADDRESS_OP_RETURN} 100 + echo "Sent 100 BTC to TEST_ADDRESS_OP_RETURN: $${TEST_ADDRESS_OP_RETURN}" + echo "VERIFIER_1_ADDRESS: $${VERIFIER_1_ADDRESS}" bitcoin-cli $${RPC_ARGS} -rpcwallet=Alice sendtoaddress $${VERIFIER_1_ADDRESS} 300 echo "Sent 300 BTC to VERIFIER_1_ADDRESS: $${VERIFIER_1_ADDRESS}" + echo "BRIDGE_TEST_ADDRESS: $${BRIDGE_TEST_ADDRESS}" + bitcoin-cli $${RPC_ARGS} -rpcwallet=Alice sendtoaddress $${BRIDGE_TEST_ADDRESS} 300 + echo "Sent 300 BTC to BRIDGE_TEST_ADDRESS: $${BRIDGE_TEST_ADDRESS}" + echo "VERIFIER_2_ADDRESS: $${VERIFIER_2_ADDRESS}" bitcoin-cli $${RPC_ARGS} -rpcwallet=Alice sendtoaddress $${VERIFIER_2_ADDRESS} 300 echo "Sent 300 BTC to VERIFIER_2_ADDRESS: $${VERIFIER_2_ADDRESS}" @@ -89,9 +97,11 @@ services: environment: - BITCOIN_DATA=/home/bitcoin/.bitcoin - TEST_ADDRESS=bcrt1qx2lk0unukm80qmepjp49hwf9z6xnz0s73k9j56 + - TEST_ADDRESS_OP_RETURN=bcrt1qu7z4qrlwl33qqz8duph0k7hv8trvgx8dt8jzfz - VERIFIER_1_ADDRESS=bcrt1qw2mvkvm6alfhe86yf328kgvr7mupdx4vln7kpv - VERIFIER_2_ADDRESS=bcrt1qk8mkhrmgtq24nylzyzejznfzws6d98g4kmuuh4 - VERIFIER_3_ADDRESS=bcrt1q23lgaa90s85jvtl6dsrkvn0g949cwjkwuyzwdm + - BRIDGE_TEST_ADDRESS=bcrt1pcx974cg2w66cqhx67zadf85t8k4sd2wp68l8x8agd3aj4tuegsgsz97amg - RPC_ARGS=-chain=regtest -rpcconnect=bitcoind -rpcwait -rpcuser=rpcuser -rpcpassword=rpcpassword - SLEEP_SECONDS=5 @@ -108,18 +118,19 @@ services: - POSTGRES_PASSWORD=notsecurepassword celestia-node: - image: "ghcr.io/celestiaorg/celestia-node:v0.20.2-arabica" + image: "ghcr.io/celestiaorg/celestia-node:v0.20.4-mocha" container_name: celestia-node volumes: - type: bind source: ./volumes/celestia target: /home/celestia - command: celestia light start --core.ip validator-2.celestia-arabica-11.com --p2p.network arabica + - ./celestia-keys/keys:/home/celestia/keys + command: celestia light start --headers.trusted-hash ${VIA_CELESTIA_CLIENT_TRUSTED_BLOCK_HASH} --core.ip full.consensus.mocha-4.celestia-mocha.com --p2p.network mocha --keyring.backend test --keyring.keyname via ports: - '26658:26658' environment: - NODE_TYPE=light - - P2P_NETWORK=arabica + - P2P_NETWORK=mocha volumes: bitcoin_data: diff --git a/docker/server/Dockerfile b/docker/server/Dockerfile index 6e2915986..c5b88591d 100644 --- a/docker/server/Dockerfile +++ b/docker/server/Dockerfile @@ -6,7 +6,7 @@ WORKDIR /usr/src/via COPY . . -RUN apt-get update && apt-get install -y protobuf-compiler && rm -rf /var/lib/apt/lists/* +RUN apt-get update && apt-get install -y protobuf-compiler git && rm -rf /var/lib/apt/lists/* RUN cargo build --release #--features=rocksdb/io-uring <-- investigate what is this FROM debian:bookworm-slim diff --git a/docker/via-verifier/Dockerfile b/docker/via-verifier/Dockerfile new file mode 100644 index 000000000..925004f44 --- /dev/null +++ b/docker/via-verifier/Dockerfile @@ -0,0 +1,27 @@ +# Will work locally only after prior contracts build +# syntax=docker/dockerfile:experimental +FROM matterlabs/zksync-build-base:latest AS builder + +WORKDIR /usr/src/via + +COPY . . + +RUN apt-get update && apt-get install -y protobuf-compiler && rm -rf /var/lib/apt/lists/* +RUN cargo build --release + +FROM debian:bookworm-slim + +RUN apt-get update && apt-get install -y curl libpq5 liburing-dev ca-certificates && \ + rm -rf /var/lib/apt/lists/* +ENV PATH=$PATH:/usr/local/bin + +EXPOSE 6060 + +ARG PROTOCOL_VERSION=26 +COPY --from=builder /usr/src/via/via_verifier/lib/via_verification/keys/protocol_version/${PROTOCOL_VERSION}/scheduler_key.json \ + /keys/protocol_version/${PROTOCOL_VERSION}/scheduler_key.json +ENV VIA_VK_KEY_PATH=/keys + +COPY --from=builder /usr/src/via/target/release/via_verifier /usr/bin + +ENTRYPOINT ["via_verifier"] diff --git a/etc/env/base/chain.toml b/etc/env/base/chain.toml index 5a8117139..265a6d6d8 100644 --- a/etc/env/base/chain.toml +++ b/etc/env/base/chain.toml @@ -53,7 +53,7 @@ reject_tx_at_gas_percentage = 0.95 # The minimal acceptable L2 gas price, i.e. the price that should include the cost of computation/proving as well # as potentially premium for congestion. -minimal_l2_gas_price = 100 +minimal_l2_gas_price = 10000000 # The constant that represents the possibility that a batch can be sealed because of overuse of computation resources. # It has range from 0 to 1. If it is 0, the compute will not depend on the cost for closing the batch. @@ -65,8 +65,8 @@ compute_overhead_part = 0.0 # If it is 1, the pubdata limit per batch will have to cover the entire cost of closing the batch. pubdata_overhead_part = 1.0 -# The constant amount of L1 gas that is used as the overhead for the batch. It includes the price for batch verification, etc. -batch_overhead_l1_gas = 800000 +# The constant L1 gas unit in vbyte required to finilize the L1 Batch. This cost includes the Pubdata, proof and verification inscriptions. +batch_overhead_l1_gas = 5000 # The maximum amount of gas that can be used by the batch. This value is derived from the circuits limitation per batch. max_gas_per_batch = 200000000 @@ -90,8 +90,8 @@ fee_model_version = "V2" validation_computational_gas_limit = 300000 save_call_traces = true -bootloader_hash = "0x010008e742608b21bf7eb23c1a9d0602047e3618b464c9b59c0fba3b3d7ab66e" -default_aa_hash = "0x01000563374c277a2c1e34659a2a1e87371bb6d852ce142022d497bfb50b9e32" +bootloader_hash = "0x010008e74e40a94b1c6e6eb5a1dfbbdbd9eb9e0ec90fd358d29e8c07c30d8491" +default_aa_hash = "0x01000563426437b886b132bf5bcf9b0d98c3648f02a6e362893db4345078d09f" protective_reads_persistence_enabled = false diff --git a/etc/env/base/contracts.toml b/etc/env/base/contracts.toml index 5d5d584ab..167feb1bf 100644 --- a/etc/env/base/contracts.toml +++ b/etc/env/base/contracts.toml @@ -26,13 +26,13 @@ RECURSION_NODE_LEVEL_VK_HASH = "0x1186ec268d49f1905f8d9c1e9d39fc33e98c74f91d91a2 RECURSION_LEAF_LEVEL_VK_HASH = "0x101e08b00193e529145ee09823378ef51a3bc8966504064f1f6ba3f1ba863210" RECURSION_CIRCUITS_SET_VKS_HASH = "0x18c1639094f58177409186e8c48d9f577c9410901d2f1d486b3e7d6cf553ae4c" GENESIS_TX_HASH = "0xb99ebfea46cbe05a21cd80fe5597d97b204befc52a16303f579c607dc1ac2e2e" -GENESIS_ROOT = "0x01afc682f1b12cd5bcdacd0e04505ffdd4aafe459e44150c422a86437ae13e4e" -GENESIS_BATCH_COMMITMENT = "0xe3ace05ee94258b46086c437763f8c669c58b2a24f11f8b719cf2efaec9daaed" +GENESIS_ROOT = "0xe411a1602b4ef98bc064de599dab18b940172b2a564e781f52ad2650cbcf72c5" +GENESIS_BATCH_COMMITMENT = "0xfaa108d80b92837c172f156fe5f0f0d85c23c70442a30834f75d6a57a50bb73b" PRIORITY_TX_MAX_GAS_LIMIT = 72000000 DEPLOY_L2_BRIDGE_COUNTERPART_GAS_LIMIT = 10000000 GENESIS_ROLLUP_LEAF_INDEX = "54" -GENESIS_PROTOCOL_VERSION = "24" -GENESIS_PROTOCOL_SEMANTIC_VERSION = "0.24.2" +GENESIS_PROTOCOL_VERSION = "26" +GENESIS_PROTOCOL_SEMANTIC_VERSION = "0.26.0" L1_WETH_BRIDGE_IMPL_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" L1_WETH_BRIDGE_PROXY_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" L1_WETH_TOKEN_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" diff --git a/etc/env/base/eth_sender.toml b/etc/env/base/eth_sender.toml index 31fe626c8..819dfceec 100644 --- a/etc/env/base/eth_sender.toml +++ b/etc/env/base/eth_sender.toml @@ -49,10 +49,10 @@ max_acceptable_priority_fee_in_gwei = 100000000000 pubdata_sending_mode = "Blobs" [eth_sender.gas_adjuster] -# Priority fee to be used by GasAdjuster (in wei). -default_priority_fee_per_gas = 1_000_000_000 +# Priority fee to be used by GasAdjuster (in satoshi). +default_priority_fee_per_gas = 1 # Max number of base fees from previous blocks to be used to correctly price transactions. -max_base_fee_samples = 10_000 +max_base_fee_samples = 10 # These two are parameters of the base_fee_per_gas formula in GasAdjuster. # The possible formulas are: # 1. base_fee_median * (A + B * time_in_mempool) diff --git a/etc/env/configs/via.toml b/etc/env/configs/via.toml index 91ba6a400..fbf3053cf 100644 --- a/etc/env/configs/via.toml +++ b/etc/env/configs/via.toml @@ -1,73 +1 @@ -__imports__ = [ "base", "l2-inits/via.init.env" ] - -[via_btc_watch] -confirmations_for_btc_msg = 0 -btc_node_poll_interval = 1000 -rpc_url = "http://0.0.0.0:18443" -rpc_user = "rpcuser" -rpc_password = "rpcpassword" -network = "regtest" -bootstrap_txids = [] -actor_role = "Sequencer" -# Number of blocks that we should wait before processing the new blocks. -# For local regtest we should wait for 0 blocks. -# But for mainnet we should wait for 3 blocks. -btc_blocks_lag = 0 - -[via_btc_sender] -poll_interval = 1000 -private_key = "cVZduZu265sWeAqFYygoDEE1FZ7wV9rpW5qdqjRkUehjaUMWLT1R" -max_aggregated_blocks_to_commit = 1 -max_aggregated_proofs_to_commit = 1 -max_txs_in_flight = 1 -rpc_url = "http://0.0.0.0:18443" -rpc_user = "rpcuser" -rpc_password = "rpcpassword" -network = "regtest" -actor_role = "Sequencer" -da_identifier = "da_identifier_celestia" -proof_sending_mode= "SkipEveryProof" -block_confirmations = 0 - -[via_celestia_client] -api_node_url = "http://0.0.0.0:26658" -auth_token = "" -blob_size_limit = 1973786 - -[rust] -log = """\ -warn,\ -zksync_node_framework=info,\ -zksync_node_consensus=info,\ -zksync_consensus_bft=info,\ -zksync_consensus_network=info,\ -zksync_consensus_storage=info,\ -zksync_commitment_generator=info,\ -zksync_core=debug,\ -zksync_dal=info,\ -zksync_db_connection=info,\ -zksync_health_check=debug,\ -zksync_eth_client=info,\ -zksync_state_keeper=info,\ -zksync_node_sync=info,\ -zksync_storage=info,\ -zksync_metadata_calculator=info,\ -zksync_merkle_tree=info,\ -zksync_node_api_server=info,\ -zksync_node_db_pruner=info,\ -zksync_reorg_detector=info,\ -zksync_consistency_checker=info,\ -zksync_state=debug,\ -zksync_utils=debug,\ -zksync_types=info,\ -zksync_web3_decl=debug,\ -loadnext=info,\ -vm=info,\ -zksync_external_node=info,\ -zksync_snapshots_applier=debug,\ -via_btc_watch=debug,\ -via_state_keeper=debug,\ -via_btc_sender=debug,\ -via_da_dispatcher=debug,\ -via_da_clients=debug,\ -""" +__imports__ = ["base", "l2-inits/via.init.env", "configs/via_base.toml"] diff --git a/etc/env/configs/via_base.toml b/etc/env/configs/via_base.toml new file mode 100644 index 000000000..ad23f1bca --- /dev/null +++ b/etc/env/configs/via_base.toml @@ -0,0 +1,73 @@ +[via_btc_watch] +# Number of blocks that we should wait before processing the new blocks. +# For local regtest we should wait for 0 blocks. +# But for mainnet we should wait for 3 blocks. +confirmations_for_btc_msg = 0 +btc_node_poll_interval = 2000 +rpc_url = "http://0.0.0.0:18443" +rpc_user = "rpcuser" +rpc_password = "rpcpassword" +network = "regtest" +bootstrap_txids = [] +actor_role = "Sequencer" +# Number of blocks that we should check when restarting the service. +btc_blocks_lag = 1000 + +[via_btc_sender] +poll_interval = 2000 +private_key = "cVZduZu265sWeAqFYygoDEE1FZ7wV9rpW5qdqjRkUehjaUMWLT1R" +max_aggregated_blocks_to_commit = 1 +max_aggregated_proofs_to_commit = 1 +max_txs_in_flight = 1 +rpc_url = "http://0.0.0.0:18443" +rpc_user = "rpcuser" +rpc_password = "rpcpassword" +network = "regtest" +actor_role = "Sequencer" +da_identifier = "da_identifier_celestia" +proof_sending_mode= "SkipEveryProof" +block_confirmations = 0 + +[via_celestia_client] +api_node_url = "http://0.0.0.0:26658" +auth_token = "" +blob_size_limit = 1973786 + +[rust] +log = """\ +warn,\ +zksync_node_framework=info,\ +zksync_node_consensus=info,\ +zksync_consensus_bft=info,\ +zksync_consensus_network=info,\ +zksync_consensus_storage=info,\ +zksync_commitment_generator=info,\ +zksync_core=debug,\ +zksync_dal=info,\ +zksync_db_connection=info,\ +zksync_health_check=debug,\ +zksync_eth_client=info,\ +zksync_state_keeper=info,\ +zksync_node_sync=info,\ +zksync_storage=info,\ +zksync_metadata_calculator=info,\ +zksync_merkle_tree=info,\ +zksync_node_api_server=info,\ +zksync_node_db_pruner=info,\ +zksync_reorg_detector=info,\ +zksync_consistency_checker=info,\ +zksync_state=debug,\ +zksync_utils=debug,\ +zksync_types=info,\ +zksync_web3_decl=debug,\ +loadnext=info,\ +vm=info,\ +zksync_external_node=info,\ +zksync_snapshots_applier=debug,\ +via_btc_watch=debug,\ +via_state_keeper=debug,\ +via_btc_sender=debug,\ +via_da_dispatcher=debug,\ +via_da_clients=debug,\ +via_zk_verifier=info,\ +""" diff --git a/etc/env/configs/via_coordinator.toml b/etc/env/configs/via_coordinator.toml new file mode 100644 index 000000000..5509beea3 --- /dev/null +++ b/etc/env/configs/via_coordinator.toml @@ -0,0 +1,35 @@ +__imports__ = [ + "base", + "l2-inits/via_coordinator.init.env", + "configs/via_base.toml", +] + +[via_verifier] +# Interval between polling db for verification requests (in ms). +poll_interval = 10000 +# Coordinator server port. +port = 6060 +# Coordinator server url. +url = "http://0.0.0.0:6060" +# The signer private key. +private_key = "cRaUbRSn8P8cXUcg6cMZ7oTZ1wbDjktYTsbdGw62tuqqD9ttQWMm" +# The verifiers public keys. +verifiers_pub_keys_str = [ + "03d8e2443ef58aa80fb6256bf3b94d2ecf9117f19cb17661ec60ad35fd84ff4a8b", + "02043f839b8ecd9ffd79f26ec7d05750555cd0d1e0777cfc84a29b7e38e6324662", +] +# The bridge address. Run the following cmd: `cargo run --example key_generation_setup coordinator 03d8e2443ef58aa80fb6256bf3b94d2ecf9117f19cb17661ec60ad35fd84ff4a8b 02043f839b8ecd9ffd79f26ec7d05750555cd0d1e0777cfc84a29b7e38e6324662` +# also update the bridge address in the via_btc_client/examples/deposit.rs +bridge_address_str = "bcrt1p3s7m76wp5seprjy4gdxuxrr8pjgd47q5s8lu9vefxmp0my2p4t9qh6s8kq" +# The minimum required signers. +required_signers = 2 +# The verifier_mode can be simple verifier or coordinator. +verifier_mode = "COORDINATOR" + +[via_btc_watch] +actor_role = "Verifier" +# Number of blocks that we should check when restarting the service. +btc_blocks_lag = 1000 + +[via_btc_sender] +private_key = "cRaUbRSn8P8cXUcg6cMZ7oTZ1wbDjktYTsbdGw62tuqqD9ttQWMm" diff --git a/etc/env/configs/via_verifier.toml b/etc/env/configs/via_verifier.toml new file mode 100644 index 000000000..b59d10c47 --- /dev/null +++ b/etc/env/configs/via_verifier.toml @@ -0,0 +1,37 @@ +__imports__ = [ + "base", + "l2-inits/via_verifier.init.env", + "configs/via_base.toml", +] + +[via_verifier] +# Interval between polling db for verification requests (in ms). +poll_interval = 10000 +# Coordinator server port. +port = 6060 +# Coordinator server url. +url = "http://0.0.0.0:6060" +# The signer private key. +private_key = "cQ4UHjdsGWFMcQ8zXcaSr7m4Kxq9x7g9EKqguTaFH7fA34mZAnqW" +# The verifiers public keys. +verifiers_pub_keys_str = [ + "03d8e2443ef58aa80fb6256bf3b94d2ecf9117f19cb17661ec60ad35fd84ff4a8b", + "02043f839b8ecd9ffd79f26ec7d05750555cd0d1e0777cfc84a29b7e38e6324662", +] + +# The bridge address. Run the following cmd: `cargo run --example key_generation_setup coordinator 03d8e2443ef58aa80fb6256bf3b94d2ecf9117f19cb17661ec60ad35fd84ff4a8b 02043f839b8ecd9ffd79f26ec7d05750555cd0d1e0777cfc84a29b7e38e6324662` +# also update the bridge address in the via_btc_client/examples/deposit.rs + +bridge_address_str = "bcrt1p3s7m76wp5seprjy4gdxuxrr8pjgd47q5s8lu9vefxmp0my2p4t9qh6s8kq" +# The minimum required signers. +required_signers = 2 +# The verifier_mode can be simple verifier or coordinator. +verifier_mode = "VERIFIER" + +[via_btc_watch] +actor_role = "Verifier" +# Number of blocks that we should check when restarting the service. +btc_blocks_lag = 1000 + +[via_btc_sender] +private_key = "cQ4UHjdsGWFMcQ8zXcaSr7m4Kxq9x7g9EKqguTaFH7fA34mZAnqW" diff --git a/etc/env/file_based/genesis.yaml b/etc/env/file_based/genesis.yaml index c7889f81b..d498cb866 100644 --- a/etc/env/file_based/genesis.yaml +++ b/etc/env/file_based/genesis.yaml @@ -1,11 +1,11 @@ -genesis_root: 0x01afc682f1b12cd5bcdacd0e04505ffdd4aafe459e44150c422a86437ae13e4e +genesis_root: 0xe411a1602b4ef98bc064de599dab18b940172b2a564e781f52ad2650cbcf72c5 genesis_rollup_leaf_index: 54 -genesis_batch_commitment: 0xe3ace05ee94258b46086c437763f8c669c58b2a24f11f8b719cf2efaec9daaed -genesis_protocol_semantic_version: '0.24.1' +genesis_batch_commitment: 0xfaa108d80b92837c172f156fe5f0f0d85c23c70442a30834f75d6a57a50bb73b +genesis_protocol_semantic_version: '0.26.0' # deprecated -genesis_protocol_version: 24 -default_aa_hash: 0x01000563374c277a2c1e34659a2a1e87371bb6d852ce142022d497bfb50b9e32 -bootloader_hash: 0x010008e742608b21bf7eb23c1a9d0602047e3618b464c9b59c0fba3b3d7ab66e +genesis_protocol_version: 26 +default_aa_hash: 0x01000563426437b886b132bf5bcf9b0d98c3648f02a6e362893db4345078d09f +bootloader_hash: 0x010008e74e40a94b1c6e6eb5a1dfbbdbd9eb9e0ec90fd358d29e8c07c30d8491 l1_chain_id: 9 l2_chain_id: 270 fee_account: '0x0000000000000000000000000000000000000001' diff --git a/etc/multivm_bootloaders/vm_bitcoin/fee_estimate.yul/fee_estimate.yul.zbin b/etc/multivm_bootloaders/vm_bitcoin/fee_estimate.yul/fee_estimate.yul.zbin new file mode 100644 index 000000000..121cf3ba2 Binary files /dev/null and b/etc/multivm_bootloaders/vm_bitcoin/fee_estimate.yul/fee_estimate.yul.zbin differ diff --git a/etc/multivm_bootloaders/vm_bitcoin/gas_test.yul/gas_test.yul.zbin b/etc/multivm_bootloaders/vm_bitcoin/gas_test.yul/gas_test.yul.zbin new file mode 100644 index 000000000..902bff3d7 Binary files /dev/null and b/etc/multivm_bootloaders/vm_bitcoin/gas_test.yul/gas_test.yul.zbin differ diff --git a/etc/multivm_bootloaders/vm_bitcoin/playground_batch.yul/playground_batch.yul.zbin b/etc/multivm_bootloaders/vm_bitcoin/playground_batch.yul/playground_batch.yul.zbin new file mode 100644 index 000000000..e498a18ff Binary files /dev/null and b/etc/multivm_bootloaders/vm_bitcoin/playground_batch.yul/playground_batch.yul.zbin differ diff --git a/etc/multivm_bootloaders/vm_bitcoin/proved_batch.yul/proved_batch.yul.zbin b/etc/multivm_bootloaders/vm_bitcoin/proved_batch.yul/proved_batch.yul.zbin new file mode 100644 index 000000000..20d52844b Binary files /dev/null and b/etc/multivm_bootloaders/vm_bitcoin/proved_batch.yul/proved_batch.yul.zbin differ diff --git a/infrastructure/via/package.json b/infrastructure/via/package.json index 3674fc535..a211bf9ea 100644 --- a/infrastructure/via/package.json +++ b/infrastructure/via/package.json @@ -13,6 +13,8 @@ "devDependencies": { "typescript": "^4.4.4", "@types/node": "^16.11.7", + "zksync-ethers": "^6.13.1", + "ethers": "^6.13.4", "eslint": "^7.32.0", "@typescript-eslint/parser": "^5.0.0", "@typescript-eslint/eslint-plugin": "^5.0.0" diff --git a/infrastructure/via/src/bootstrap.ts b/infrastructure/via/src/bootstrap.ts index 584971309..9d5f2b707 100644 --- a/infrastructure/via/src/bootstrap.ts +++ b/infrastructure/via/src/bootstrap.ts @@ -23,7 +23,7 @@ async function updateEnvVariable(envFilePath: string, variableName: string, newV await fs.writeFile(envFilePath, newEnvContent, 'utf-8'); } -async function updateBootstrapTxidsEnv() { +export async function updateBootstrapTxidsEnv() { const txidsFilePath = path.join(process.env.VIA_HOME!, 'txids.via'); const txidsContent = await fs.readFile(txidsFilePath, 'utf8'); @@ -34,13 +34,15 @@ async function updateBootstrapTxidsEnv() { const envFilePath = path.join(process.env.VIA_HOME!, `etc/env/target/${process.env.VIA_ENV}.env`); + console.log(`Updating file ${envFilePath}`); + await updateEnvVariable(envFilePath, 'VIA_BTC_WATCH_BOOTSTRAP_TXIDS', newTxids); console.log(`Updated VIA_BTC_WATCH_BOOTSTRAP_TXIDS with: ${newTxids}`); try { - await fs.unlink(txidsFilePath); - console.log(`Deleted txids.via file.`); + // await fs.unlink(txidsFilePath); + console.log(`NOT Deleted txids.via file.`); } catch (error) { console.error(`Error deleting txids.via file`); } diff --git a/infrastructure/via/src/celestia.ts b/infrastructure/via/src/celestia.ts index 04d7386a3..af21f896c 100644 --- a/infrastructure/via/src/celestia.ts +++ b/infrastructure/via/src/celestia.ts @@ -4,6 +4,7 @@ import * as fs from 'fs/promises'; import * as path from 'path'; import * as dotenv from 'dotenv'; import { exec } from 'child_process'; +import { updateEnvVariable } from './helpers'; // Function to execute a shell command and return it as a Promise function runCommand(command: string): Promise { @@ -23,23 +24,9 @@ function runCommand(command: string): Promise { const get_node_address_command = "docker exec $(docker ps -q -f name=celestia-node) celestia state account-address | jq -r '.result'"; const get_auth_node_command = - 'docker exec $(docker ps -q -f name=celestia-node) celestia light auth admin --p2p.network arabica'; + 'docker exec $(docker ps -q -f name=celestia-node) celestia light auth admin --p2p.network mocha'; const restart_celestia_container_command = 'docker restart celestia-node'; -async function updateEnvVariable(envFilePath: string, variableName: string, newValue: string) { - const envFileContent = await fs.readFile(envFilePath, 'utf-8'); - const envConfig = dotenv.parse(envFileContent); - - envConfig[variableName] = newValue; - - let newEnvContent = ''; - for (const key in envConfig) { - newEnvContent += `${key}=${envConfig[key]}\n`; - } - - await fs.writeFile(envFilePath, newEnvContent, 'utf-8'); -} - async function updateEnvironment(auth_token: string) { const envFilePath = path.join(process.env.VIA_HOME!, `etc/env/target/${process.env.VIA_ENV}.env`); @@ -49,31 +36,33 @@ async function updateEnvironment(auth_token: string) { } async function get_celestia_faucet_token(node_address: string) { - const response = await fetch('https://faucet.celestia-arabica-11.com/api/v1/faucet/give_me', { - headers: { - accept: '*/*', - 'accept-language': 'en-US,en;q=0.9', - 'content-type': 'application/json', - priority: 'u=1, i', - 'sec-ch-ua': '"Google Chrome";v="129", "Not=A?Brand";v="8", "Chromium";v="129"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"macOS"', - 'sec-fetch-dest': 'empty', - 'sec-fetch-mode': 'cors', - 'sec-fetch-site': 'same-origin', - Referer: 'https://faucet.celestia-arabica-11.com/', - 'Referrer-Policy': 'strict-origin-when-cross-origin' - }, - body: JSON.stringify({ - address: node_address, - chainId: 'arabica-11' - }), - method: 'POST' - }); - - const data = await response.json(); - console.log('Faucet Response:', data); - return data.token; + // const response = await fetch('https://faucet.celestia-arabica-11.com/api/v1/faucet/give_me', { + // headers: { + // accept: '*/*', + // 'accept-language': 'en-US,en;q=0.9', + // 'content-type': 'application/json', + // priority: 'u=1, i', + // 'sec-ch-ua': '"Google Chrome";v="129", "Not=A?Brand";v="8", "Chromium";v="129"', + // 'sec-ch-ua-mobile': '?0', + // 'sec-ch-ua-platform': '"macOS"', + // 'sec-fetch-dest': 'empty', + // 'sec-fetch-mode': 'cors', + // 'sec-fetch-site': 'same-origin', + // Referer: 'https://faucet.celestia-arabica-11.com/', + // 'Referrer-Policy': 'strict-origin-when-cross-origin' + // }, + // body: JSON.stringify({ + // address: node_address, + // chainId: 'arabica-11' + // }), + // method: 'POST' + // }); + + // const data = await response.json(); + // console.log('Faucet Response:', data); + // return data.token; + + console.log('For Mocha faucet,you should send the request in celestia discord channel'); } async function fix_celestia_config() { @@ -118,6 +107,19 @@ async function fix_celestia_config() { return line; }); + // Get the celestia block height where start the node + const envFilePath = path.join(process.env.VIA_HOME!, `etc/env/l2-inits/${process.env.VIA_ENV}.init.env`); + const envs = (await fs.readFile(envFilePath, 'utf-8')).split('\n'); + let height = '1'; + for (let i = 0; i < envs.length; i++) { + if (envs[i].startsWith('VIA_CELESTIA_CLIENT_TRUSTED_BLOCK_HEIGHT')) { + height = envs[i].split('=')[1]; + break; + } + } + + await runCommand(`docker exec celestia-node sed -i 's/ SampleFrom = 1/ SampleFrom = ${height}/' config.toml`); + // Join the updated lines back into a single string const updatedConfigFileContent = updatedLines.join('\n'); @@ -151,7 +153,7 @@ export async function via_celestia() { console.log('Request Sent to Faucet'); await get_celestia_faucet_token(node_address); await get_celestia_faucet_token(node_address); - console.log(`Check your balance at https://arabica.celenium.io/address/${node_address}?tab=transactions`); + console.log(`Check your balance at https://mocha.celenium.io/address/${node_address}?tab=transactions`); } catch (error) { console.error('Error getting faucet token:', error); } diff --git a/infrastructure/via/src/config.ts b/infrastructure/via/src/config.ts index ce3d16528..8dd293d4e 100644 --- a/infrastructure/via/src/config.ts +++ b/infrastructure/via/src/config.ts @@ -6,6 +6,7 @@ import * as env from './env'; import path from 'path'; import dotenv from 'dotenv'; import { unpackStringSemVer } from 'utils'; +import { updateEnvVariable } from './helpers'; function loadConfigFile(configPath: string, stack: string[] = []) { if (stack.includes(configPath)) { @@ -125,7 +126,7 @@ export function compileConfig(environment?: string) { console.log(`Configs compiled for ${environment}`); } -export function pushConfig(environment?: string, diff?: string) { +export async function pushConfig(environment?: string, diff?: string) { environment ??= process.env.VIA_ENV!; const l2InitFile = `etc/env/l2-inits/${environment}.init.env`; const difference: number = parseInt(diff ? diff : '0'); @@ -173,6 +174,19 @@ export function pushConfig(environment?: string, diff?: string) { l2InitFile, false ); + + env.modify( + `DATABASE_VERIFIER_URL`, + `postgres://postgres:notsecurepassword@localhost/verifier_${environment}`, + l2InitFile, + false + ); + env.modify( + `TEST_DATABASE_VERIFIER_URL`, + `postgres://postgres:notsecurepassword@localhost/verifier_${environment}_test`, + l2InitFile, + false + ); } env.modify('DATABASE_STATE_KEEPER_DB_PATH', `./db/${environment}/state_keeper`, l2InitFile, false); @@ -180,8 +194,24 @@ export function pushConfig(environment?: string, diff?: string) { env.modify('DATABASE_MERKLE_TREE_BACKUP_PATH', `./db/${environment}/backups`, l2InitFile, false); env.reload(); + await fetchCelestiaTrustedHash(); } +const fetchCelestiaTrustedHash = async () => { + let environment = process.env.VIA_ENV!; + const l2InitFile = `etc/env/l2-inits/${environment}.init.env`; + + const response = await (await fetch('http://celestia-testnet-consensus.itrocket.net:26657/header')).json(); + const { last_block_id, height } = response.result.header; + + const envFilePath1 = path.join(process.env.VIA_HOME!, `etc/env/target/${process.env.VIA_ENV}.env`); + + env.modify('VIA_CELESTIA_CLIENT_TRUSTED_BLOCK_HEIGHT', height, l2InitFile, false); + env.modify('VIA_CELESTIA_CLIENT_TRUSTED_BLOCK_HASH', last_block_id.hash, l2InitFile, false); + await updateEnvVariable(envFilePath1, 'VIA_CELESTIA_CLIENT_TRUSTED_BLOCK_HEIGHT', height); + await updateEnvVariable(envFilePath1, 'VIA_CELESTIA_CLIENT_TRUSTED_BLOCK_HASH', last_block_id.hash); +}; + // used to increase chainId for easy deployment of next hyperchain on shared bridge // export function bumpChainId() { // // note we bump in the .toml file directly diff --git a/infrastructure/via/src/database.ts b/infrastructure/via/src/database.ts index a56d20392..e32929dc5 100644 --- a/infrastructure/via/src/database.ts +++ b/infrastructure/via/src/database.ts @@ -10,17 +10,19 @@ export async function reset(opts: DbOpts) { export enum DalPath { CoreDal = 'core/lib/dal', - ProverDal = 'prover/crates/lib/prover_dal' + ProverDal = 'prover/crates/lib/prover_dal', + ViaVerifierDal = 'via_verifier/lib/verifier_dal' } export interface DbOpts { core: boolean; prover: boolean; + verifier: boolean; } function getDals(opts: DbOpts): Map { let dals = new Map(); - if (!opts.prover && !opts.core) { + if (!opts.prover && !opts.core && !opts.verifier) { dals.set(DalPath.CoreDal, process.env.DATABASE_URL!); if (process.env.DATABASE_PROVER_URL) { dals.set(DalPath.ProverDal, process.env.DATABASE_PROVER_URL); @@ -32,6 +34,9 @@ function getDals(opts: DbOpts): Map { if (opts.core) { dals.set(DalPath.CoreDal, process.env.DATABASE_URL!); } + if (opts.verifier) { + dals.set(DalPath.ViaVerifierDal, process.env.DATABASE_VERIFIER_URL!); + } return dals; } @@ -40,6 +45,7 @@ function getTestDals(opts: DbOpts): Map { if (!opts.prover && !opts.core) { dals.set(DalPath.CoreDal, process.env.TEST_DATABASE_URL!); dals.set(DalPath.ProverDal, process.env.TEST_DATABASE_PROVER_URL!); + dals.set(DalPath.ViaVerifierDal, process.env.TEST_DATABASE_VERIFIER_URL!); } if (opts.prover) { dals.set(DalPath.ProverDal, process.env.TEST_DATABASE_PROVER_URL!); @@ -47,6 +53,9 @@ function getTestDals(opts: DbOpts): Map { if (opts.core) { dals.set(DalPath.CoreDal, process.env.TEST_DATABASE_URL!); } + if (opts.verifier) { + dals.set(DalPath.ViaVerifierDal, process.env.TEST_DATABASE_VERIFIER_URL!); + } return dals; } diff --git a/infrastructure/via/src/env.ts b/infrastructure/via/src/env.ts index 5b38d1e50..747034c48 100644 --- a/infrastructure/via/src/env.ts +++ b/infrastructure/via/src/env.ts @@ -46,7 +46,7 @@ export async function gitHooks() { } } -export function set(environment: string, print: boolean = false) { +export function set(environment: string, print: boolean = false, soft: boolean = false) { if (!fs.existsSync(`etc/env/target/${environment}.env`) && !fs.existsSync(`etc/env/configs/${environment}.toml`)) { console.error( `Unknown environment: ${environment}.\nCreate an environment file etc/env/target/${environment}.env or etc/env/configs/${environment}.toml` @@ -60,7 +60,11 @@ export function set(environment: string, print: boolean = false) { // No .env file found - we should compile it! config.compileConfig(environment); } - reload(); + + // Only reload if we did NOT pass the --soft flag + if (!soft) { + reload(environment); + } get(print); } @@ -76,6 +80,15 @@ export function reload(environment?: string) { } } +export function load_from_file(environment?: string) { + environment = environment ?? get(); + const envFile = (process.env.ENV_FILE = `etc/env/target/${environment}.env`); + const env = dotenv.parse(fs.readFileSync(envFile)); + for (const envVar in env) { + process.env[envVar] = env[envVar]; + } +} + // loads environment variables export function load() { fs.mkdirSync('etc/env/target', { recursive: true }); @@ -141,8 +154,14 @@ export function mergeInitToEnv() { } export const command = new Command('env') - .arguments('[env_name]') .description('get or set via environment') - .action((environment?: string) => { - environment ? set(environment, true) : get(true); + .option('--soft', 'Skip reloading the environment') + .action((cmd: Command) => { + // If user typed `cli env dev --soft`, then: + // cmd.args[0] = 'dev' + // cmd.soft = true + const environment = cmd.args[0]; + const { soft } = cmd.opts(); + + environment ? set(environment, /*print=*/ true, soft) : get(/*print=*/ true); }); diff --git a/infrastructure/via/src/helpers.ts b/infrastructure/via/src/helpers.ts new file mode 100644 index 000000000..a0f36907a --- /dev/null +++ b/infrastructure/via/src/helpers.ts @@ -0,0 +1,16 @@ +import * as fs from 'fs/promises'; +import * as dotenv from 'dotenv'; + +export async function updateEnvVariable(envFilePath: string, variableName: string, newValue: string) { + const envFileContent = await fs.readFile(envFilePath, 'utf-8'); + const envConfig = dotenv.parse(envFileContent); + + envConfig[variableName] = newValue; + + let newEnvContent = ''; + for (const key in envConfig) { + newEnvContent += `${key}=${envConfig[key]}\n`; + } + + await fs.writeFile(envFilePath, newEnvContent, 'utf-8'); +} diff --git a/infrastructure/via/src/index.ts b/infrastructure/via/src/index.ts index eb1db3717..0cb1bba61 100644 --- a/infrastructure/via/src/index.ts +++ b/infrastructure/via/src/index.ts @@ -15,9 +15,10 @@ import { command as db } from './database'; import * as env from './env'; import { command as transactions } from './transactions'; import { command as bootstrap } from './bootstrap'; -import { command as verifier } from './verifier'; +import { verifierCommand as verifier } from './verifier'; import { command as celestia } from './celestia'; import { command as btc_explorer } from './btc_explorer'; +import { command as token } from './token'; const COMMANDS = [ server, @@ -35,6 +36,7 @@ const COMMANDS = [ verifier, celestia, btc_explorer, + token, completion(program as Command) ]; diff --git a/infrastructure/via/src/init.ts b/infrastructure/via/src/init.ts index fe5b6a0c6..0976dcc4f 100644 --- a/infrastructure/via/src/init.ts +++ b/infrastructure/via/src/init.ts @@ -9,10 +9,10 @@ import * as contract from './contract'; import * as db from './database'; import * as docker from './docker'; import * as env from './env'; -import * as config from './config'; import * as run from './run'; -// import * as server from './server'; +import { Mode } from './types'; import { createVolumes, up } from './up'; +import path from 'path'; // Checks if all required tools are installed with the correct versions const checkEnv = async (): Promise => { @@ -56,7 +56,9 @@ const initSetup = async ({ await announced('Checking environment', checkEnv()); await announced('Checking git hooks', env.gitHooks()); await announced('Create volumes', createVolumes()); - await announced('Setting up containers', up(docker.VIA_DOCKER_COMPOSE)); + const envFilePath = path.join(process.env.VIA_HOME!, `etc/env/l2-inits/${process.env.VIA_ENV}.init.env`); + + await announced('Setting up containers', up(docker.VIA_DOCKER_COMPOSE, envFilePath)); } await announced('Compiling JS packages', run.yarn()); @@ -67,13 +69,24 @@ const initSetup = async ({ ]); }; -const initDatabase = async (shouldCheck: boolean = true): Promise => { - await announced('Drop postgres db', db.drop({ core: true, prover: true })); - await announced('Setup postgres db', db.setup({ core: true, prover: true }, shouldCheck)); +const initDatabase = async ( + shouldCheck: boolean = true, + core = true, + prover = true, + verifier = false +): Promise => { + await announced('Drop postgres db', db.drop({ core, prover, verifier })); + await announced('Setup postgres db', db.setup({ core, prover, verifier }, shouldCheck)); await announced('Clean rocksdb', clean(`db/${process.env.VIA_ENV!}`)); await announced('Clean backups', clean(`backups/${process.env.VIA_ENV!}`)); }; +const initVerifierSetup = async (skipEnvSetup: boolean): Promise => { + await announced(`Initializing the verifier'}`); + await announced('Checking environment', checkEnv()); + await initDatabase(true, false, false, true); +}; + // Deploys ERC20 and WETH tokens to localhost // ? // type DeployTestTokensOptions = { envFile?: string }; @@ -113,6 +126,7 @@ type InitDevCmdActionOptions = InitSetupOptions & { baseTokenName?: string; localLegacyBridgeTesting?: boolean; shouldCheckPostgres: boolean; // Whether to perform `cargo sqlx prepare --check` + mode: Mode; }; export const initDevCmdAction = async ({ skipEnvSetup, @@ -144,6 +158,22 @@ export const initDevCmdAction = async ({ } }; +export const initVerifierDevCmdAction = async ({ skipEnvSetup }: InitDevCmdActionOptions): Promise => { + await initVerifierSetup(skipEnvSetup); +}; + +const init = async (options: InitDevCmdActionOptions) => { + switch (options.mode) { + case Mode.SEQUENCER: + return await initDevCmdAction(options); + case Mode.VERIFIER: + case Mode.COORDINATOR: + return await initVerifierDevCmdAction(options); + default: + throw new Error('Invalid init mode'); + } +}; + // ########################### Command Definitions ########################### export const initCommand = new Command('init') .option('--skip-submodules-checkout') @@ -152,10 +182,11 @@ export const initCommand = new Command('init') .option('--base-token-name ', 'base token name') // ? // .option('--validium-mode', 'deploy contracts in Validium mode') .option('--run-observability', 'run observability suite') + .option('--mode [type]', 'init mode', Mode.SEQUENCER) .option( '--local-legacy-bridge-testing', 'used to test LegacyBridge compatibily. The chain will have the same id as the era chain id, while eraChainId in L2SharedBridge will be 0' ) .option('--should-check-postgres', 'Whether to perform cargo sqlx prepare --check during database setup', true) .description('Deploys the shared bridge and registers a hyperchain locally, as quickly as possible.') - .action(initDevCmdAction); + .action(init); diff --git a/infrastructure/via/src/token.ts b/infrastructure/via/src/token.ts new file mode 100644 index 000000000..fe29fb162 --- /dev/null +++ b/infrastructure/via/src/token.ts @@ -0,0 +1,161 @@ +import { Command } from 'commander'; +import { Wallet, Provider, Contract } from 'zksync-ethers'; +import { ethers } from 'ethers'; +import * as utils from 'utils'; + +const DEFAULT_DEPOSITOR_PRIVATE_KEY = 'cVZduZu265sWeAqFYygoDEE1FZ7wV9rpW5qdqjRkUehjaUMWLT1R'; +const DEFAULT_DEPOSITOR_PRIVATE_KEY_OP_RETURN = 'cQa1WGJQWT5suej9XZRBoAe4JsFtvswq5N3LWu7QboLJuZJewJSp'; +const DEFAULT_NETWORK = 'regtest'; +const DEFAULT_RPC_URL = 'http://0.0.0.0:18443'; +const DEFAULT_RPC_USERNAME = 'rpcuser'; +const DEFAULT_RPC_PASSWORD = 'rpcpassword'; + +// 0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 +const DEFAULT_L2_PRIVATE_KEY = '0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110'; +const DEFAULT_L2_RPC_URL = 'http://0.0.0.0:3050'; +const L2_BASE_TOKEN = '0x000000000000000000000000000000000000800a'; + +async function deposit( + amount: number, + receiverL2Address: string, + senderPrivateKey: string, + network: String, + rcpUrl: string, + rpcUsername: string, + rpcPassword: string +) { + if (isNaN(amount)) { + console.error('Error: Invalid deposit amount. Please provide a valid number.'); + return; + } + process.chdir(`${process.env.VIA_HOME}`); + await utils.spawn( + `cargo run --example deposit -- ${amount} ${receiverL2Address} ${senderPrivateKey} ${network} ${rcpUrl} ${rpcUsername} ${rpcPassword}` + ); +} + +async function depositWithOpReturn( + amount: number, + receiverL2Address: string, + senderPrivateKey: string, + network: String, + rcpUrl: string, + rpcUsername: string, + rpcPassword: string +) { + if (isNaN(amount)) { + console.error('Error: Invalid deposit amount. Please provide a valid number.'); + return; + } + process.chdir(`${process.env.VIA_HOME}`); + await utils.spawn( + `cargo run --example deposit_opreturn -- ${amount} ${receiverL2Address} ${senderPrivateKey} ${network} ${rcpUrl} ${rpcUsername} ${rpcPassword}` + ); +} +async function withdraw(amount: number, receiverL1Address: string, userL2PrivateKey: string, rpcUrl: string) { + if (isNaN(amount)) { + console.error('Error: Invalid withdraw amount. Please provide a valid number.'); + return; + } + + const abi = [ + { + inputs: [ + { + internalType: 'uint256', + name: '_account', + type: 'uint256' + } + ], + name: 'balanceOf', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256' + } + ], + stateMutability: 'view', + type: 'function' + }, + { + inputs: [ + { + internalType: 'bytes', + name: '_l1Receiver', + type: 'bytes' + } + ], + name: 'withdraw', + outputs: [], + stateMutability: 'payable', + type: 'function' + } + ]; + + const provider = new Provider(rpcUrl); + const wallet = new Wallet(userL2PrivateKey, provider); + const btcAddress = ethers.toUtf8Bytes(receiverL1Address); + const contract = new Contract(L2_BASE_TOKEN, abi, wallet) as any; + + let balance = await contract.balanceOf(wallet.address); + console.log('Balance before withdraw', ethers.formatEther(String(balance))); + const tx = await contract.connect(wallet).withdraw(btcAddress, { value: ethers.parseEther(String(amount)) }); + await tx.wait(); + balance = await contract.balanceOf(wallet.address); + console.log('Balance after withdraw', ethers.formatEther(String(balance))); +} + +export const command = new Command('token').description('Bridge BTC L2<>L1'); +command + .command('deposit') + .description('deposit BTC to l2') + .requiredOption('--amount ', 'amount of BTC to deposit', parseFloat) + .requiredOption('--receiver-l2-address ', 'receiver l2 address') + .option('--sender-private-key ', 'sender private key', DEFAULT_DEPOSITOR_PRIVATE_KEY) + .option('--network ', 'network', DEFAULT_NETWORK) + .option('--rpc-url ', 'RPC URL', DEFAULT_RPC_URL) + .option('--rpc-username ', 'RPC username', DEFAULT_RPC_USERNAME) + .option('--rpc-password ', 'RPC password', DEFAULT_RPC_PASSWORD) + .action((cmd: Command) => + deposit( + cmd.amount, + cmd.receiverL2Address, + cmd.senderPrivateKey, + cmd.network, + cmd.rpcUrl, + cmd.rpcUsername, + cmd.rpcPassword + ) + ); + +command + .command('deposit-with-op-return') + .description('deposit BTC to l2 with op-return') + .requiredOption('--amount ', 'amount of BTC to deposit', parseFloat) + .requiredOption('--receiver-l2-address ', 'receiver l2 address') + .option('--sender-private-key ', 'sender private key', DEFAULT_DEPOSITOR_PRIVATE_KEY) + .option('--network ', 'network', DEFAULT_NETWORK) + .option('--rpc-url ', 'RPC URL', DEFAULT_RPC_URL) + .option('--rpc-username ', 'RPC username', DEFAULT_RPC_USERNAME) + .option('--rpc-password ', 'RPC password', DEFAULT_RPC_PASSWORD) + .action((cmd: Command) => + depositWithOpReturn( + cmd.amount, + cmd.receiverL2Address, + cmd.senderPrivateKey, + cmd.network, + cmd.rpcUrl, + cmd.rpcUsername, + cmd.rpcPassword + ) + ); + +command + .command('withdraw') + .description('withdraw BTC to l1') + .requiredOption('--amount ', 'amount of BTC to withdraw', parseFloat) + .requiredOption('--receiver-l1-address ', 'receiver l1 address') + .option('--user-private-key ', 'user private key', DEFAULT_L2_PRIVATE_KEY) + .option('--rpc-url ', 'RPC URL', DEFAULT_L2_RPC_URL) + .action((cmd: Command) => withdraw(cmd.amount, cmd.receiverL1Address, cmd.userPrivateKey, cmd.rpcUrl)); diff --git a/infrastructure/via/src/types.ts b/infrastructure/via/src/types.ts new file mode 100644 index 000000000..b0153cf3a --- /dev/null +++ b/infrastructure/via/src/types.ts @@ -0,0 +1,5 @@ +export enum Mode { + SEQUENCER = 'sequencer', + VERIFIER = 'verifier', + COORDINATOR = 'coordinator' +} diff --git a/infrastructure/via/src/up.ts b/infrastructure/via/src/up.ts index 602d149fc..69f9a50aa 100644 --- a/infrastructure/via/src/up.ts +++ b/infrastructure/via/src/up.ts @@ -21,11 +21,15 @@ export function createVolumes() { fs.mkdirSync(`${process.env.VIA_HOME}/volumes/btc-explorer/mysql`, { recursive: true }); + fs.mkdirSync(`${process.env.VIA_HOME}/volumes/celestia-keys`, { + recursive: true + }); } -export async function up(composeFile?: string) { +export async function up(composeFile?: string, envFilePath?: string) { if (composeFile) { - await utils.spawn(`docker compose -f ${composeFile} up -d`); + const envFile = envFilePath ? `--env-file ${envFilePath}` : ''; + await utils.spawn(`docker compose ${envFile} -f ${composeFile} up -d`); } else { await utils.spawn('docker compose up -d'); } diff --git a/infrastructure/via/src/verifier.ts b/infrastructure/via/src/verifier.ts index f376af7d8..310899bbf 100644 --- a/infrastructure/via/src/verifier.ts +++ b/infrastructure/via/src/verifier.ts @@ -1,65 +1,26 @@ -import * as utils from 'utils'; import { Command } from 'commander'; +import * as utils from 'utils'; +import * as env from './env'; +import { updateBootstrapTxidsEnv } from './bootstrap'; +import { updateEnvVariable } from './helpers'; +import path from 'path'; +import { load_from_file } from './env'; -const DEFAULT_DEPOSITOR_PRIVATE_KEY = 'cVZduZu265sWeAqFYygoDEE1FZ7wV9rpW5qdqjRkUehjaUMWLT1R'; -const DEFAULT_NETWORK = 'regtest'; -const DEFAULT_RPC_URL = 'http://0.0.0.0:18443'; -const DEFAULT_RPC_USERNAME = 'rpcuser'; -const DEFAULT_RPC_PASSWORD = 'rpcpassword'; - -async function verifyBatch(batchProofRefRevealTxId: string) { - process.chdir(`${process.env.VIA_HOME}`); - await utils.spawn(`cargo run --example verify_batch -- ${batchProofRefRevealTxId}`); -} +export async function verifier() { + await updateBootstrapTxidsEnv(); -async function deposit( - amount: number, - receiverL2Address: string, - senderPrivateKey: string, - network: String, - rcpUrl: string, - rpcUsername: string, - rpcPassword: string -) { - if (isNaN(amount)) { - console.error('Error: Invalid deposit amount. Please provide a valid number.'); - return; - } - process.chdir(`${process.env.VIA_HOME}`); - await utils.spawn( - `cargo run --example deposit -- ${amount} ${receiverL2Address} ${senderPrivateKey} ${network} ${rcpUrl} ${rpcUsername} ${rpcPassword}` - ); -} + console.log(`Starting verifier node...`); -export const command = new Command('verifier').description('verifier network mock'); + env.load_from_file(); -command - .command('verify-batch') - .description('verify batch by batch da ref reveal tx id') - .requiredOption( - '--batch-proof-ref-reveal-tx-id ', - 'reveal tx id for the l1 batch proof to verify' - ) - .action((cmd: Command) => verifyBatch(cmd.batchProofRefRevealTxId)); + await utils.spawn(`cargo run --bin via_verifier`); +} -command - .command('deposit') - .description('deposit BTC to l2') - .requiredOption('--amount ', 'amount of BTC to deposit', parseFloat) - .requiredOption('--receiver-l2-address ', 'receiver l2 address') - .option('--sender-private-key ', 'sender private key', DEFAULT_DEPOSITOR_PRIVATE_KEY) - .option('--network ', 'network', DEFAULT_NETWORK) - .option('--rpc-url ', 'RPC URL', DEFAULT_RPC_URL) - .option('--rpc-username ', 'RPC username', DEFAULT_RPC_USERNAME) - .option('--rpc-password ', 'RPC password', DEFAULT_RPC_PASSWORD) - .action((cmd: Command) => - deposit( - cmd.amount, - cmd.receiverL2Address, - cmd.senderPrivateKey, - cmd.network, - cmd.rpcUrl, - cmd.rpcUsername, - cmd.rpcPassword - ) - ); +export const verifierCommand = new Command('verifier') + .description('start via verifier node') + .action(async (cmd: Command) => { + cmd.chainName ? env.reload(cmd.chainName) : env.load(); + await env.load(); + env.get(true); + await verifier(); + }); diff --git a/infrastructure/zk/src/format_sql.ts b/infrastructure/zk/src/format_sql.ts index 09f655f54..fc12e8d9a 100644 --- a/infrastructure/zk/src/format_sql.ts +++ b/infrastructure/zk/src/format_sql.ts @@ -159,7 +159,7 @@ async function formatFile(filePath: string, check: boolean) { export async function formatSqlxQueries(check: boolean) { process.chdir(`${process.env.ZKSYNC_HOME}`); const { stdout: filesRaw } = await utils.exec( - 'find core/lib/dal -type f -name "*.rs" && find prover/crates/lib/prover_dal -type f -name "*.rs"' + 'find core/lib/dal -type f -name "*.rs" && find prover/crates/lib/prover_dal -type f -name "*.rs" && find via_verifier/lib/verifier_dal -type f -name "*.rs"' ); const files = filesRaw.trim().split('\n'); const formatResults = await Promise.all(files.map((file) => formatFile(file, check))); diff --git a/prover/crates/lib/prover_fri_types/src/lib.rs b/prover/crates/lib/prover_fri_types/src/lib.rs index c14bc1905..f89e45ac1 100644 --- a/prover/crates/lib/prover_fri_types/src/lib.rs +++ b/prover/crates/lib/prover_fri_types/src/lib.rs @@ -28,8 +28,8 @@ pub mod keys; pub mod queue; // THESE VALUES SHOULD BE UPDATED ON ANY PROTOCOL UPGRADE OF PROVERS -pub const PROVER_PROTOCOL_VERSION: ProtocolVersionId = ProtocolVersionId::Version24; -pub const PROVER_PROTOCOL_PATCH: VersionPatch = VersionPatch(2); +pub const PROVER_PROTOCOL_VERSION: ProtocolVersionId = ProtocolVersionId::Version26; +pub const PROVER_PROTOCOL_PATCH: VersionPatch = VersionPatch(0); pub const PROVER_PROTOCOL_SEMANTIC_VERSION: ProtocolSemanticVersion = ProtocolSemanticVersion { minor: PROVER_PROTOCOL_VERSION, patch: PROVER_PROTOCOL_PATCH, diff --git a/via-playground/README.md b/via-playground/README.md index 06799ec7b..f8a674603 100644 --- a/via-playground/README.md +++ b/via-playground/README.md @@ -21,7 +21,7 @@ and replace it in `hardhat.config.ts`. 3. Run the following command to bridge BTC to L2: ```shell - via verifier deposit \ + via token deposit \ --amount 100 \ --receiver-l2-address 0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 \ --sender-private-key \ diff --git a/via_verifier/CHANGELOG.md b/via_verifier/CHANGELOG.md new file mode 100644 index 000000000..e40c9125f --- /dev/null +++ b/via_verifier/CHANGELOG.md @@ -0,0 +1,8 @@ +# Changelog + + +## [x.x.x] Version + +### Features +### Bug Fixes + diff --git a/via_verifier/bin/verifier_server/Cargo.toml b/via_verifier/bin/verifier_server/Cargo.toml new file mode 100644 index 000000000..0d618795b --- /dev/null +++ b/via_verifier/bin/verifier_server/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "via_verifier" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +zksync_config = { workspace = true, features = ["observability_ext"] } +zksync_env_config.workspace = true +zksync_storage.workspace = true +zksync_utils.workspace = true +zksync_types.workspace = true +zksync_node_framework.workspace = true +zksync_vlog.workspace = true +zksync_core_leftovers.workspace = true +zksync_protobuf_config.workspace = true +zksync_node_api_server.workspace = true +zksync_metadata_calculator.workspace = true +via_da_clients.workspace = true + + +anyhow.workspace = true +clap = { workspace = true, features = ["derive"] } +tokio = { workspace = true, features = ["full"] } +tracing.workspace = true + +[target.'cfg(not(target_env = "msvc"))'.dependencies] +tikv-jemallocator.workspace = true diff --git a/via_verifier/bin/verifier_server/src/config.rs b/via_verifier/bin/verifier_server/src/config.rs new file mode 100644 index 000000000..a80bb05e9 --- /dev/null +++ b/via_verifier/bin/verifier_server/src/config.rs @@ -0,0 +1,108 @@ +use anyhow::Context as _; +use zksync_config::{ + configs::{ + api::MerkleTreeApiConfig, + chain::{CircuitBreakerConfig, MempoolConfig, OperationsManagerConfig, StateKeeperConfig}, + consensus::ConsensusSecrets, + fri_prover_group::FriProverGroupConfig, + house_keeper::HouseKeeperConfig, + BasicWitnessInputProducerConfig, FriProofCompressorConfig, FriProverConfig, + FriWitnessGeneratorConfig, ObservabilityConfig, PrometheusConfig, + ProtectiveReadsWriterConfig, + }, + ApiConfig, DADispatcherConfig, DBConfig, ObjectStoreConfig, PostgresConfig, ViaBtcSenderConfig, + ViaBtcWatchConfig, ViaCelestiaConfig, ViaVerifierConfig, +}; +use zksync_core_leftovers::temp_config_store::{decode_yaml_repr, TempConfigStore}; +use zksync_env_config::FromEnv; +use zksync_protobuf_config::proto; + +pub(crate) fn read_consensus_secrets() -> anyhow::Result> { + // Read public config. + let Ok(path) = std::env::var("CONSENSUS_SECRETS_PATH") else { + return Ok(None); + }; + let secrets = std::fs::read_to_string(&path).context(path)?; + Ok(Some( + decode_yaml_repr::(&secrets) + .context("failed decoding YAML")?, + )) +} +// +// pub(crate) fn read_consensus_config() -> anyhow::Result> { +// // Read public config. +// let Ok(path) = std::env::var("CONSENSUS_CONFIG_PATH") else { +// return Ok(None); +// }; +// let cfg = std::fs::read_to_string(&path).context(path)?; +// Ok(Some( +// decode_yaml_repr::(&cfg).context("failed decoding YAML")?, +// )) +// } + +pub(crate) fn load_env_config() -> anyhow::Result { + Ok(TempConfigStore { + postgres_config: PostgresConfig::from_env().ok(), + health_check_config: None, + merkle_tree_api_config: MerkleTreeApiConfig::from_env().ok(), + web3_json_rpc_config: None, + circuit_breaker_config: CircuitBreakerConfig::from_env().ok(), + mempool_config: MempoolConfig::from_env().ok(), + network_config: None, + contract_verifier: None, + operations_manager_config: OperationsManagerConfig::from_env().ok(), + state_keeper_config: StateKeeperConfig::from_env().ok(), + house_keeper_config: HouseKeeperConfig::from_env().ok(), + fri_proof_compressor_config: FriProofCompressorConfig::from_env().ok(), + fri_prover_config: FriProverConfig::from_env().ok(), + fri_prover_group_config: FriProverGroupConfig::from_env().ok(), + fri_prover_gateway_config: None, + fri_witness_vector_generator: None, + fri_witness_generator_config: FriWitnessGeneratorConfig::from_env().ok(), + prometheus_config: PrometheusConfig::from_env().ok(), + proof_data_handler_config: None, + api_config: ApiConfig::from_env().ok(), + db_config: DBConfig::from_env().ok(), + eth_sender_config: None, + eth_watch_config: None, + gas_adjuster_config: None, + observability: ObservabilityConfig::from_env().ok(), + snapshot_creator: None, + da_dispatcher_config: DADispatcherConfig::from_env().ok(), + protective_reads_writer_config: ProtectiveReadsWriterConfig::from_env().ok(), + basic_witness_input_producer_config: BasicWitnessInputProducerConfig::from_env().ok(), + core_object_store: ObjectStoreConfig::from_env().ok(), + base_token_adjuster_config: None, + commitment_generator: None, + pruning: None, + snapshot_recovery: None, + external_price_api_client_config: None, + external_proof_integration_api_config: None, + experimental_vm_config: None, + prover_job_monitor_config: None, + }) +} + +// TODO: temporary solution, should be removed after the config is refactored +pub(crate) fn via_load_env_config() -> anyhow::Result<( + ViaBtcWatchConfig, + ViaBtcSenderConfig, + ViaCelestiaConfig, + ViaVerifierConfig, +)> { + let btc_watch_config = + ViaBtcWatchConfig::from_env().context("Failed to load BTC watch config")?; + let btc_sender_config = + ViaBtcSenderConfig::from_env().context("Failed to load BTC sender config")?; + let celestia_config = + ViaCelestiaConfig::from_env().context("Failed to load celestia config")?; + let verifier_config = + ViaVerifierConfig::from_env().context("Failed to load verifier config")?; + + Ok(( + btc_watch_config, + btc_sender_config, + celestia_config, + verifier_config, + )) +} diff --git a/via_verifier/bin/verifier_server/src/main.rs b/via_verifier/bin/verifier_server/src/main.rs new file mode 100644 index 000000000..1b02099d8 --- /dev/null +++ b/via_verifier/bin/verifier_server/src/main.rs @@ -0,0 +1,120 @@ +use anyhow::Context as _; +use clap::Parser; +use zksync_config::{ + configs::{DatabaseSecrets, L1Secrets, Secrets}, + ContractsConfig, GenesisConfig, ViaGeneralConfig, +}; +use zksync_env_config::FromEnv; + +mod config; +mod node_builder; + +#[cfg(not(target_env = "msvc"))] +#[global_allocator] +static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; + +#[derive(Debug, Parser)] +#[command(author = "Via verifer", version, about = "Via verifer node", long_about = None)] +struct Cli { + /// Path to the YAML config. If set, it will be used instead of env vars. + #[arg(long)] + config_path: Option, + + /// Path to the YAML with secrets. If set, it will be used instead of env vars. + #[arg(long)] + secrets_path: Option, + + /// Path to the yaml with contracts. If set, it will be used instead of env vars. + #[arg(long)] + contracts_config_path: Option, + + /// Path to the wallets config. If set, it will be used instead of env vars. + #[arg(long)] + wallets_path: Option, + + /// Path to the YAML with genesis configuration. If set, it will be used instead of env vars. + #[arg(long)] + genesis_path: Option, +} + +fn main() -> anyhow::Result<()> { + let opt = Cli::parse(); + + // Load env config + let tmp_config = config::load_env_config()?; + + // Load configurations + let configs = match opt.config_path { + Some(_path) => { + todo!("Load config from file") + } + None => { + let general = tmp_config.general(); + let mut via_general = ViaGeneralConfig::from(general); + + // Load the rest of the configs + let via_configs = config::via_load_env_config()?; + via_general.via_btc_watch_config = Some(via_configs.0); + via_general.via_btc_sender_config = Some(via_configs.1); + via_general.via_celestia_config = Some(via_configs.2); + via_general.via_verifier_config = Some(via_configs.3); + via_general + } + }; + + let secrets = match opt.secrets_path { + Some(_path) => { + todo!("Load secrets from file") + } + None => Secrets { + consensus: config::read_consensus_secrets().context("read_verifier_secrets()")?, + database: DatabaseSecrets::from_env().ok(), + l1: L1Secrets::from_env().ok(), + }, + }; + + let genesis = match opt.genesis_path { + Some(_path) => { + todo!("Load genesis from file") + } + None => GenesisConfig::from_env().context("Failed to load genesis from env")?, + }; + + let wallets = match opt.wallets_path { + Some(_path) => { + todo!("Load config from file"); + } + None => tmp_config.wallets(), + }; + + let mut contracts_config = match opt.contracts_config_path { + Some(_path) => { + todo!("Load contracts from file") + } + None => ContractsConfig::from_env().context("contracts_config")?, + }; + + // Disable ecosystem contracts for now + contracts_config.ecosystem_contracts = None; + + let observability_config = configs + .observability + .clone() + .context("Observability config missing")?; + + let node_builder = + node_builder::ViaNodeBuilder::new(configs, wallets, secrets, genesis, contracts_config)?; + + let observability_guard = { + // Observability initialization should be performed within tokio context. + let _context_guard = node_builder.runtime_handle().enter(); + observability_config.install()? + }; + + // Build the node + + let node = node_builder.build()?; + node.run(observability_guard)?; + + Ok(()) +} diff --git a/via_verifier/bin/verifier_server/src/node_builder.rs b/via_verifier/bin/verifier_server/src/node_builder.rs new file mode 100644 index 000000000..86d6b1705 --- /dev/null +++ b/via_verifier/bin/verifier_server/src/node_builder.rs @@ -0,0 +1,176 @@ +use anyhow::Context; +use via_da_clients::celestia::wiring_layer::ViaCelestiaClientWiringLayer; +use zksync_config::{ + configs::{via_verifier::VerifierMode, wallets::Wallets, Secrets}, + ActorRole, ContractsConfig, GenesisConfig, ViaGeneralConfig, +}; +use zksync_node_framework::{ + implementations::layers::{ + circuit_breaker_checker::CircuitBreakerCheckerLayer, + healtcheck_server::HealthCheckLayer, + pools_layer::PoolsLayerBuilder, + sigint::SigintHandlerLayer, + via_btc_sender::{ + vote::ViaBtcVoteInscriptionLayer, vote_manager::ViaInscriptionManagerLayer, + }, + via_btc_watch::BtcWatchLayer, + via_verifier::{ + coordinator_api::ViaCoordinatorApiLayer, verifier::ViaWithdrawalVerifierLayer, + }, + via_verifier_btc_watch::VerifierBtcWatchLayer, + via_zk_verification::ViaBtcProofVerificationLayer, + }, + service::{ZkStackService, ZkStackServiceBuilder}, +}; + +/// Macro that looks into a path to fetch an optional config, +/// and clones it into a variable. +macro_rules! try_load_config { + ($path:expr) => { + $path.as_ref().context(stringify!($path))?.clone() + }; +} + +pub struct ViaNodeBuilder { + is_coordinator: bool, + node: ZkStackServiceBuilder, + configs: ViaGeneralConfig, + wallets: Wallets, + genesis_config: GenesisConfig, + contracts_config: ContractsConfig, + secrets: Secrets, +} + +impl ViaNodeBuilder { + pub fn new( + via_general_config: ViaGeneralConfig, + wallets: Wallets, + secrets: Secrets, + genesis_config: GenesisConfig, + contracts_config: ContractsConfig, + ) -> anyhow::Result { + let via_verifier_config = try_load_config!(via_general_config.via_verifier_config); + let is_coordinator = via_verifier_config.verifier_mode == VerifierMode::COORDINATOR; + Ok(Self { + is_coordinator, + node: ZkStackServiceBuilder::new().context("Cannot create ZkStackServiceBuilder")?, + configs: via_general_config, + wallets, + genesis_config, + contracts_config, + secrets, + }) + } + + pub fn runtime_handle(&self) -> tokio::runtime::Handle { + self.node.runtime_handle() + } + + fn add_sigint_handler_layer(mut self) -> anyhow::Result { + self.node.add_layer(SigintHandlerLayer); + Ok(self) + } + + fn add_via_celestia_da_client_layer(mut self) -> anyhow::Result { + let celestia_config = try_load_config!(self.configs.via_celestia_config); + println!("{:?}", celestia_config); + self.node + .add_layer(ViaCelestiaClientWiringLayer::new(celestia_config)); + Ok(self) + } + + fn add_healthcheck_layer(mut self) -> anyhow::Result { + let healthcheck_config = try_load_config!(self.configs.api_config).healthcheck; + self.node.add_layer(HealthCheckLayer(healthcheck_config)); + Ok(self) + } + + fn add_circuit_breaker_checker_layer(mut self) -> anyhow::Result { + let circuit_breaker_config = try_load_config!(self.configs.circuit_breaker_config); + self.node + .add_layer(CircuitBreakerCheckerLayer(circuit_breaker_config)); + Ok(self) + } + + fn add_btc_sender_layer(mut self) -> anyhow::Result { + let btc_sender_config = try_load_config!(self.configs.via_btc_sender_config); + self.node + .add_layer(ViaBtcVoteInscriptionLayer::new(btc_sender_config.clone())); + self.node + .add_layer(ViaInscriptionManagerLayer::new(btc_sender_config)); + Ok(self) + } + + // VIA related layers + fn add_verifier_btc_watcher_layer(mut self) -> anyhow::Result { + let mut btc_watch_config = try_load_config!(self.configs.via_btc_watch_config); + assert_eq!( + btc_watch_config.actor_role, + ActorRole::Verifier, + "Verifier role is expected" + ); + self.node + .add_layer(VerifierBtcWatchLayer::new(btc_watch_config)); + Ok(self) + } + + fn add_pools_layer(mut self) -> anyhow::Result { + let config = try_load_config!(self.configs.postgres_config); + let secrets = try_load_config!(self.secrets.database); + let pools_layer = PoolsLayerBuilder::empty(config, secrets) + .with_verifier(true) + .build(); + self.node.add_layer(pools_layer); + Ok(self) + } + + fn add_verifier_coordinator_api_layer(mut self) -> anyhow::Result { + let via_verifier_config = try_load_config!(self.configs.via_verifier_config); + let via_btc_sender_config = try_load_config!(self.configs.via_btc_sender_config); + self.node.add_layer(ViaCoordinatorApiLayer { + config: via_verifier_config, + btc_sender_config: via_btc_sender_config, + }); + Ok(self) + } + + fn add_withdrawal_verifier_task_layer(mut self) -> anyhow::Result { + let via_verifier_config = try_load_config!(self.configs.via_verifier_config); + let via_btc_sender_config = try_load_config!(self.configs.via_btc_sender_config); + self.node.add_layer(ViaWithdrawalVerifierLayer { + config: via_verifier_config, + btc_sender_config: via_btc_sender_config, + }); + Ok(self) + } + + fn add_zkp_verification_layer(mut self) -> anyhow::Result { + let via_verifier_config = try_load_config!(self.configs.via_verifier_config); + let via_btc_watcher_config = try_load_config!(self.configs.via_btc_watch_config); + self.node.add_layer(ViaBtcProofVerificationLayer { + config: via_verifier_config, + btc_watcher_config: via_btc_watcher_config, + }); + Ok(self) + } + + pub fn build(mut self) -> anyhow::Result { + self = self + .add_sigint_handler_layer()? + .add_healthcheck_layer()? + .add_circuit_breaker_checker_layer()? + .add_pools_layer()? + .add_btc_sender_layer()? + .add_verifier_btc_watcher_layer()? + .add_via_celestia_da_client_layer()? + .add_zkp_verification_layer()?; + + if self.is_coordinator { + self = self.add_verifier_coordinator_api_layer()? + } + + self = self.add_withdrawal_verifier_task_layer()?; + + Ok(self.node.build()) + } +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-110825e736943e4ef68110e8d7af3af3edce16051e8987a27ea207bfe9b65502.json b/via_verifier/lib/verifier_dal/.sqlx/query-110825e736943e4ef68110e8d7af3af3edce16051e8987a27ea207bfe9b65502.json new file mode 100644 index 000000000..1a0bcb627 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-110825e736943e4ef68110e8d7af3af3edce16051e8987a27ea207bfe9b65502.json @@ -0,0 +1,82 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n via_btc_inscriptions_request_history\n WHERE\n inscription_request_id = $1\n ORDER BY\n id DESC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "commit_tx_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "reveal_tx_id", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "inscription_request_id", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "signed_commit_tx", + "type_info": "Bytea" + }, + { + "ordinal": 5, + "name": "signed_reveal_tx", + "type_info": "Bytea" + }, + { + "ordinal": 6, + "name": "actual_fees", + "type_info": "Int8" + }, + { + "ordinal": 7, + "name": "confirmed_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "sent_at_block", + "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 10, + "name": "updated_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + true, + false, + true, + false + ] + }, + "hash": "110825e736943e4ef68110e8d7af3af3edce16051e8987a27ea207bfe9b65502" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-175807bfd3b6618ab60ede8cb56ddc773bf8e85190606d1b6f2203ce8b5aba2e.json b/via_verifier/lib/verifier_dal/.sqlx/query-175807bfd3b6618ab60ede8cb56ddc773bf8e85190606d1b6f2203ce8b5aba2e.json new file mode 100644 index 000000000..df41de9be --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-175807bfd3b6618ab60ede8cb56ddc773bf8e85190606d1b6f2203ce8b5aba2e.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n canonical_tx_hash\n FROM\n via_transactions\n WHERE\n status IS NULL\n ORDER BY\n priority_id ASC\n LIMIT\n $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "canonical_tx_hash", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "175807bfd3b6618ab60ede8cb56ddc773bf8e85190606d1b6f2203ce8b5aba2e" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-1c5df79bec4f47dc94562f5d27fbeeda15ca4daf5620896833c3cdd4c909a156.json b/via_verifier/lib/verifier_dal/.sqlx/query-1c5df79bec4f47dc94562f5d27fbeeda15ca4daf5620896833c3cdd4c909a156.json new file mode 100644 index 000000000..1faa23e5d --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-1c5df79bec4f47dc94562f5d27fbeeda15ca4daf5620896833c3cdd4c909a156.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n via_transactions (priority_id, tx_id, receiver, value, calldata, canonical_tx_hash)\n VALUES\n ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (tx_id) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Varchar", + "Int8", + "Bytea", + "Bytea" + ] + }, + "nullable": [] + }, + "hash": "1c5df79bec4f47dc94562f5d27fbeeda15ca4daf5620896833c3cdd4c909a156" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-2082d20e3b547a7385c174892d96857119c57c7ff5194a546d24fec0e3cf3059.json b/via_verifier/lib/verifier_dal/.sqlx/query-2082d20e3b547a7385c174892d96857119c57c7ff5194a546d24fec0e3cf3059.json new file mode 100644 index 000000000..c6bf0d9f7 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-2082d20e3b547a7385c174892d96857119c57c7ff5194a546d24fec0e3cf3059.json @@ -0,0 +1,56 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n via_btc_inscriptions_request.*\n FROM\n via_btc_inscriptions_request\n JOIN via_btc_inscriptions_request_history ON via_btc_inscriptions_request.id = via_btc_inscriptions_request_history.inscription_request_id\n AND via_btc_inscriptions_request_history.sent_at_block IS NOT NULL\n AND via_btc_inscriptions_request.confirmed_inscriptions_request_history_id IS NULL\n AND via_btc_inscriptions_request_history.id = (\n SELECT\n id\n FROM\n via_btc_inscriptions_request_history\n WHERE\n inscription_request_id = via_btc_inscriptions_request.id\n AND via_btc_inscriptions_request_history.sent_at_block IS NOT NULL\n ORDER BY\n created_at DESC\n LIMIT\n 1\n )\n ORDER BY\n id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "request_type", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "inscription_message", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "predicted_fee", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "confirmed_inscriptions_request_history_id", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + true, + true, + true, + false, + false + ] + }, + "hash": "2082d20e3b547a7385c174892d96857119c57c7ff5194a546d24fec0e3cf3059" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-2147add1c47a8b86de081f060458cc9601afc6a4f86f930c9ceff02fae5c02dc.json b/via_verifier/lib/verifier_dal/.sqlx/query-2147add1c47a8b86de081f060458cc9601afc6a4f86f930c9ceff02fae5c02dc.json new file mode 100644 index 000000000..17d7311da --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-2147add1c47a8b86de081f060458cc9601afc6a4f86f930c9ceff02fae5c02dc.json @@ -0,0 +1,58 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n via_btc_inscriptions_request.*\n FROM\n via_btc_inscriptions_request\n LEFT JOIN via_btc_inscriptions_request_history ON via_btc_inscriptions_request.id = via_btc_inscriptions_request_history.inscription_request_id\n WHERE\n via_btc_inscriptions_request_history.inscription_request_id IS NULL\n ORDER BY\n via_btc_inscriptions_request.id\n LIMIT\n $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "request_type", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "inscription_message", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "predicted_fee", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "confirmed_inscriptions_request_history_id", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + true, + true, + true, + false, + false + ] + }, + "hash": "2147add1c47a8b86de081f060458cc9601afc6a4f86f930c9ceff02fae5c02dc" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-3229aefe26d77c22cee31dd506cd62244613761e41e811705611d75fbe48ad1a.json b/via_verifier/lib/verifier_dal/.sqlx/query-3229aefe26d77c22cee31dd506cd62244613761e41e811705611d75fbe48ad1a.json new file mode 100644 index 000000000..25653cf10 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-3229aefe26d77c22cee31dd506cd62244613761e41e811705611d75fbe48ad1a.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n via_btc_inscriptions_request_history (\n commit_tx_id,\n reveal_tx_id,\n inscription_request_id,\n signed_commit_tx,\n signed_reveal_tx,\n actual_fees,\n sent_at_block,\n created_at,\n updated_at\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, NOW(), NOW())\n RETURNING\n id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Varchar", + "Int8", + "Bytea", + "Bytea", + "Int8", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "3229aefe26d77c22cee31dd506cd62244613761e41e811705611d75fbe48ad1a" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-32a1a262d383bab472b28dedbde93c3a0a389b0c96da70be09697d71874f49f4.json b/via_verifier/lib/verifier_dal/.sqlx/query-32a1a262d383bab472b28dedbde93c3a0a389b0c96da70be09697d71874f49f4.json new file mode 100644 index 000000000..1a6e3e10c --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-32a1a262d383bab472b28dedbde93c3a0a389b0c96da70be09697d71874f49f4.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE via_votable_transactions\n SET\n withdrawal_tx_id = $1\n WHERE\n is_finalized = TRUE\n AND is_verified = TRUE\n AND withdrawal_tx_id IS NULL\n AND l1_batch_number = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "32a1a262d383bab472b28dedbde93c3a0a389b0c96da70be09697d71874f49f4" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-42d4dac4050c296b9f1804dcf6b0073159cb97bf34ebf2740e3f40b863515cce.json b/via_verifier/lib/verifier_dal/.sqlx/query-42d4dac4050c296b9f1804dcf6b0073159cb97bf34ebf2740e3f40b863515cce.json new file mode 100644 index 000000000..2dedade5f --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-42d4dac4050c296b9f1804dcf6b0073159cb97bf34ebf2740e3f40b863515cce.json @@ -0,0 +1,29 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n COUNT(*) FILTER (\n WHERE\n vote = TRUE\n ) AS ok_votes,\n COUNT(*) AS total_votes\n FROM\n via_votes\n WHERE\n l1_batch_number = $1\n AND tx_id = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "ok_votes", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "total_votes", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "42d4dac4050c296b9f1804dcf6b0073159cb97bf34ebf2740e3f40b863515cce" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-4f3219a13e972e4c7b13af9d22c55e871be4ac71acac33d4662af5cfc4c6bff9.json b/via_verifier/lib/verifier_dal/.sqlx/query-4f3219a13e972e4c7b13af9d22c55e871be4ac71acac33d4662af5cfc4c6bff9.json new file mode 100644 index 000000000..8ccbb4649 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-4f3219a13e972e4c7b13af9d22c55e871be4ac71acac33d4662af5cfc4c6bff9.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n pubdata_blob_id,\n tx_id\n FROM\n via_votable_transactions\n WHERE\n is_finalized = TRUE\n AND is_verified = TRUE\n AND withdrawal_tx_id IS NULL\n AND l1_batch_number = $1\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "pubdata_blob_id", + "type_info": "Varchar" + }, + { + "ordinal": 1, + "name": "tx_id", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "4f3219a13e972e4c7b13af9d22c55e871be4ac71acac33d4662af5cfc4c6bff9" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-5644db74e4626d2c689b1a24c8b7a23c11c6079a7d4f715a7387d9a96a91c52c.json b/via_verifier/lib/verifier_dal/.sqlx/query-5644db74e4626d2c689b1a24c8b7a23c11c6079a7d4f715a7387d9a96a91c52c.json new file mode 100644 index 000000000..2e85bc0df --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-5644db74e4626d2c689b1a24c8b7a23c11c6079a7d4f715a7387d9a96a91c52c.json @@ -0,0 +1,60 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n via_btc_inscriptions_request (request_type, inscription_message, predicted_fee, created_at, updated_at)\n VALUES\n ($1, $2, $3, NOW(), NOW())\n RETURNING\n *\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "request_type", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "inscription_message", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "predicted_fee", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "confirmed_inscriptions_request_history_id", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Bytea", + "Int8" + ] + }, + "nullable": [ + false, + false, + true, + true, + true, + false, + false + ] + }, + "hash": "5644db74e4626d2c689b1a24c8b7a23c11c6079a7d4f715a7387d9a96a91c52c" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-6851a0c44cca95d6f0f21dddc056e1dcef9dd65f5751547525cc9b0839836da5.json b/via_verifier/lib/verifier_dal/.sqlx/query-6851a0c44cca95d6f0f21dddc056e1dcef9dd65f5751547525cc9b0839836da5.json new file mode 100644 index 000000000..6e898fb47 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-6851a0c44cca95d6f0f21dddc056e1dcef9dd65f5751547525cc9b0839836da5.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number,\n tx_id\n FROM\n via_votable_transactions\n WHERE\n is_verified = FALSE\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "tx_id", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false + ] + }, + "hash": "6851a0c44cca95d6f0f21dddc056e1dcef9dd65f5751547525cc9b0839836da5" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-7fe235092b7f64cb5c6755f1f571daff666fe04dd45246eb94c961eed1dba2d7.json b/via_verifier/lib/verifier_dal/.sqlx/query-7fe235092b7f64cb5c6755f1f571daff666fe04dd45246eb94c961eed1dba2d7.json new file mode 100644 index 000000000..1b93c7993 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-7fe235092b7f64cb5c6755f1f571daff666fe04dd45246eb94c961eed1dba2d7.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE via_btc_inscriptions_request\n SET\n updated_at = NOW(),\n confirmed_inscriptions_request_history_id = $2\n WHERE\n id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "7fe235092b7f64cb5c6755f1f571daff666fe04dd45246eb94c961eed1dba2d7" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-81fbd3f476db005b3260f6600aea3cdd314652a88cd6009a9fca9358b6aa4e59.json b/via_verifier/lib/verifier_dal/.sqlx/query-81fbd3f476db005b3260f6600aea3cdd314652a88cd6009a9fca9358b6aa4e59.json new file mode 100644 index 000000000..414285372 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-81fbd3f476db005b3260f6600aea3cdd314652a88cd6009a9fca9358b6aa4e59.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n MIN(l1_batch_number) as \"l1_batch_number\"\n FROM via_votable_transactions\n WHERE\n is_finalized = FALSE \n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "81fbd3f476db005b3260f6600aea3cdd314652a88cd6009a9fca9358b6aa4e59" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-8958c9555cb62efc73eccc2215d7b9c39534c83e3738db90e7111345df0281a2.json b/via_verifier/lib/verifier_dal/.sqlx/query-8958c9555cb62efc73eccc2215d7b9c39534c83e3738db90e7111345df0281a2.json new file mode 100644 index 000000000..4ed034e04 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-8958c9555cb62efc73eccc2215d7b9c39534c83e3738db90e7111345df0281a2.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n withdrawal_tx_id\n FROM via_votable_transactions\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "withdrawal_tx_id", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true + ] + }, + "hash": "8958c9555cb62efc73eccc2215d7b9c39534c83e3738db90e7111345df0281a2" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-9c5faf8349565e8eedb040d4a72d8690e72af5ae37d0059be1fa43a432ec1375.json b/via_verifier/lib/verifier_dal/.sqlx/query-9c5faf8349565e8eedb040d4a72d8690e72af5ae37d0059be1fa43a432ec1375.json new file mode 100644 index 000000000..963590678 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-9c5faf8349565e8eedb040d4a72d8690e72af5ae37d0059be1fa43a432ec1375.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_status,\n tx_id\n FROM\n via_votable_transactions\n WHERE\n l1_batch_number = $1\n AND is_verified = TRUE\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_status", + "type_info": "Bool" + }, + { + "ordinal": 1, + "name": "tx_id", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "9c5faf8349565e8eedb040d4a72d8690e72af5ae37d0059be1fa43a432ec1375" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-ad6d50d69c52bacd18be392d583f6d91830eac4268a83112781a6d9c521c5be9.json b/via_verifier/lib/verifier_dal/.sqlx/query-ad6d50d69c52bacd18be392d583f6d91830eac4268a83112781a6d9c521c5be9.json new file mode 100644 index 000000000..5a7d4a2de --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-ad6d50d69c52bacd18be392d583f6d91830eac4268a83112781a6d9c521c5be9.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE via_votable_transactions\n SET\n is_finalized = TRUE,\n updated_at = NOW()\n WHERE\n l1_batch_number = $1\n AND tx_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [] + }, + "hash": "ad6d50d69c52bacd18be392d583f6d91830eac4268a83112781a6d9c521c5be9" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-b78c21cb05f5d94f2dddaf1a216a233d81fc65d08215d6057e750ad72fbd7cce.json b/via_verifier/lib/verifier_dal/.sqlx/query-b78c21cb05f5d94f2dddaf1a216a233d81fc65d08215d6057e750ad72fbd7cce.json new file mode 100644 index 000000000..ee5f7c934 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-b78c21cb05f5d94f2dddaf1a216a233d81fc65d08215d6057e750ad72fbd7cce.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE via_btc_inscriptions_request_history\n SET\n updated_at = NOW(),\n confirmed_at = NOW()\n WHERE\n id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "b78c21cb05f5d94f2dddaf1a216a233d81fc65d08215d6057e750ad72fbd7cce" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-bac043d7cb5f1b9cee546a34fa265dd1b36c6d94d1c307d66d2133d2277f62a7.json b/via_verifier/lib/verifier_dal/.sqlx/query-bac043d7cb5f1b9cee546a34fa265dd1b36c6d94d1c307d66d2133d2277f62a7.json new file mode 100644 index 000000000..77dc305a2 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-bac043d7cb5f1b9cee546a34fa265dd1b36c6d94d1c307d66d2133d2277f62a7.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number,\n pubdata_blob_id,\n tx_id\n FROM\n via_votable_transactions\n WHERE\n is_finalized = TRUE\n AND is_verified = TRUE\n AND withdrawal_tx_id IS NULL\n ORDER BY\n l1_batch_number ASC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "pubdata_blob_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "tx_id", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "bac043d7cb5f1b9cee546a34fa265dd1b36c6d94d1c307d66d2133d2277f62a7" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-c00edea696d125effe23b50c656970ac92a60580e047d5a43b77e56ab5f3d2b4.json b/via_verifier/lib/verifier_dal/.sqlx/query-c00edea696d125effe23b50c656970ac92a60580e047d5a43b77e56ab5f3d2b4.json new file mode 100644 index 000000000..7fff300d2 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-c00edea696d125effe23b50c656970ac92a60580e047d5a43b77e56ab5f3d2b4.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n is_finalized\n FROM\n via_votable_transactions\n WHERE\n l1_batch_number = $1\n AND tx_id = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "is_finalized", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [ + false + ] + }, + "hash": "c00edea696d125effe23b50c656970ac92a60580e047d5a43b77e56ab5f3d2b4" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-c2f1e15f46b72570d02c7b8be08024ec90890f94199b039933296c3f99ba63bf.json b/via_verifier/lib/verifier_dal/.sqlx/query-c2f1e15f46b72570d02c7b8be08024ec90890f94199b039933296c3f99ba63bf.json new file mode 100644 index 000000000..b7391ab5c --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-c2f1e15f46b72570d02c7b8be08024ec90890f94199b039933296c3f99ba63bf.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE via_transactions\n SET\n status = $2\n WHERE\n canonical_tx_hash = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "c2f1e15f46b72570d02c7b8be08024ec90890f94199b039933296c3f99ba63bf" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-ca8a2d4afee25b6cd41bcb88af78eef1a09c549c5c62c77b5527f771ae6370b0.json b/via_verifier/lib/verifier_dal/.sqlx/query-ca8a2d4afee25b6cd41bcb88af78eef1a09c549c5c62c77b5527f771ae6370b0.json new file mode 100644 index 000000000..d6681f247 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-ca8a2d4afee25b6cd41bcb88af78eef1a09c549c5c62c77b5527f771ae6370b0.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT EXISTS(\n SELECT 1\n FROM via_l1_batch_vote_inscription_request\n WHERE l1_batch_number = $1\n )\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "exists", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "ca8a2d4afee25b6cd41bcb88af78eef1a09c549c5c62c77b5527f771ae6370b0" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-d574f696c92de6af8a597071dad510b32864e18832c7f6d4bd54c14783f54cca.json b/via_verifier/lib/verifier_dal/.sqlx/query-d574f696c92de6af8a597071dad510b32864e18832c7f6d4bd54c14783f54cca.json new file mode 100644 index 000000000..9133c893a --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-d574f696c92de6af8a597071dad510b32864e18832c7f6d4bd54c14783f54cca.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n via_votes (l1_batch_number, tx_id, verifier_address, vote)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT (l1_batch_number, tx_id, verifier_address) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Text", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "d574f696c92de6af8a597071dad510b32864e18832c7f6d4bd54c14783f54cca" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-dcc4dfd0b7fd4968ba8d7f26fe9a4a2263af9c4aa330638ffdc8c380aafa6d3a.json b/via_verifier/lib/verifier_dal/.sqlx/query-dcc4dfd0b7fd4968ba8d7f26fe9a4a2263af9c4aa330638ffdc8c380aafa6d3a.json new file mode 100644 index 000000000..f44a753ea --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-dcc4dfd0b7fd4968ba8d7f26fe9a4a2263af9c4aa330638ffdc8c380aafa6d3a.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n via_l1_batch_vote_inscription_request (l1_batch_number, vote_l1_batch_inscription_id, created_at, updated_at)\n VALUES\n ($1, $2, NOW(), NOW())\n ON CONFLICT DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "dcc4dfd0b7fd4968ba8d7f26fe9a4a2263af9c4aa330638ffdc8c380aafa6d3a" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-e1237653c8bb6b8df57852f48136f71979a3e3826c8cb240c0f1e37bc1bbbdf9.json b/via_verifier/lib/verifier_dal/.sqlx/query-e1237653c8bb6b8df57852f48136f71979a3e3826c8cb240c0f1e37bc1bbbdf9.json new file mode 100644 index 000000000..57ef4050b --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-e1237653c8bb6b8df57852f48136f71979a3e3826c8cb240c0f1e37bc1bbbdf9.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n via_votable_transactions (\n l1_batch_number,\n tx_id,\n da_identifier,\n blob_id,\n pubdata_reveal_tx_id,\n pubdata_blob_id\n )\n VALUES\n ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (l1_batch_number, tx_id) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Varchar", + "Varchar", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "e1237653c8bb6b8df57852f48136f71979a3e3826c8cb240c0f1e37bc1bbbdf9" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-ea3062d8c4eb8f234ba6fbc145734c78fcf487984d098ef103bb228a787e6e2e.json b/via_verifier/lib/verifier_dal/.sqlx/query-ea3062d8c4eb8f234ba6fbc145734c78fcf487984d098ef103bb228a787e6e2e.json new file mode 100644 index 000000000..9f36c543c --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-ea3062d8c4eb8f234ba6fbc145734c78fcf487984d098ef103bb228a787e6e2e.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT COUNT(priority_id) as priority_id FROM via_transactions;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "priority_id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "ea3062d8c4eb8f234ba6fbc145734c78fcf487984d098ef103bb228a787e6e2e" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-eb4413d3d75a4a014f741d6f23486ffccdb4ea2a1495525fe76be9c1e77580c9.json b/via_verifier/lib/verifier_dal/.sqlx/query-eb4413d3d75a4a014f741d6f23486ffccdb4ea2a1495525fe76be9c1e77580c9.json new file mode 100644 index 000000000..c1fda0d3d --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-eb4413d3d75a4a014f741d6f23486ffccdb4ea2a1495525fe76be9c1e77580c9.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n MAX(l1_batch_number) AS max_batch_number\n FROM\n via_votable_transactions\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "max_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "eb4413d3d75a4a014f741d6f23486ffccdb4ea2a1495525fe76be9c1e77580c9" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-ed6c76995db0b440fcce87a2acdf5f2c1e476cdcbcca9a61c1af5f0234fc22a5.json b/via_verifier/lib/verifier_dal/.sqlx/query-ed6c76995db0b440fcce87a2acdf5f2c1e476cdcbcca9a61c1af5f0234fc22a5.json new file mode 100644 index 000000000..24761a046 --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-ed6c76995db0b440fcce87a2acdf5f2c1e476cdcbcca9a61c1af5f0234fc22a5.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n 1 AS cnt\n FROM\n via_transactions\n WHERE\n tx_id = $1\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "cnt", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + null + ] + }, + "hash": "ed6c76995db0b440fcce87a2acdf5f2c1e476cdcbcca9a61c1af5f0234fc22a5" +} diff --git a/via_verifier/lib/verifier_dal/.sqlx/query-f9bb84f6a995fd6590f96491066703fbb5f734284c380b316be30fd38aac8675.json b/via_verifier/lib/verifier_dal/.sqlx/query-f9bb84f6a995fd6590f96491066703fbb5f734284c380b316be30fd38aac8675.json new file mode 100644 index 000000000..1f2be9fbe --- /dev/null +++ b/via_verifier/lib/verifier_dal/.sqlx/query-f9bb84f6a995fd6590f96491066703fbb5f734284c380b316be30fd38aac8675.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE via_votable_transactions\n SET\n is_verified = TRUE,\n l1_batch_status = $3,\n updated_at = NOW()\n WHERE\n l1_batch_number = $1\n AND tx_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "f9bb84f6a995fd6590f96491066703fbb5f734284c380b316be30fd38aac8675" +} diff --git a/via_verifier/lib/verifier_dal/Cargo.toml b/via_verifier/lib/verifier_dal/Cargo.toml new file mode 100644 index 000000000..b6442e39f --- /dev/null +++ b/via_verifier/lib/verifier_dal/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "via_verifier_dal" +description = "Via Verifier DAL" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +anyhow.workspace = true +zksync_db_connection.workspace = true +zksync_basic_types.workspace = true +zksync_types.workspace = true +thiserror.workspace = true + +bitcoin = { version = "0.32.2" } +strum = { workspace = true, features = ["derive"] } +sqlx = { workspace = true, features = [ + "runtime-tokio", + "tls-native-tls", + "macros", + "postgres", + "bigdecimal", + "rust_decimal", + "chrono", + "json", + "migrate", + "ipnetwork", +] } + +[dev-dependencies] +tokio = { workspace = true, features = ["full"] } +rand = { workspace = true } diff --git a/via_verifier/lib/verifier_dal/doc/Tables.md b/via_verifier/lib/verifier_dal/doc/Tables.md new file mode 100644 index 000000000..e69de29bb diff --git a/via_verifier/lib/verifier_dal/migrations/20240906134623_add_via_btc_inscription_requests.down.sql b/via_verifier/lib/verifier_dal/migrations/20240906134623_add_via_btc_inscription_requests.down.sql new file mode 100644 index 000000000..f87e9b335 --- /dev/null +++ b/via_verifier/lib/verifier_dal/migrations/20240906134623_add_via_btc_inscription_requests.down.sql @@ -0,0 +1,2 @@ +DROP TABLE via_btc_inscriptions_request_history; +DROP TABLE via_btc_inscriptions_request; \ No newline at end of file diff --git a/via_verifier/lib/verifier_dal/migrations/20240906134623_add_via_btc_inscription_requests.up.sql b/via_verifier/lib/verifier_dal/migrations/20240906134623_add_via_btc_inscription_requests.up.sql new file mode 100644 index 000000000..1b2a41839 --- /dev/null +++ b/via_verifier/lib/verifier_dal/migrations/20240906134623_add_via_btc_inscription_requests.up.sql @@ -0,0 +1,34 @@ +CREATE TABLE "via_btc_inscriptions_request" ( + "id" BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + "request_type" varchar NOT NULL, + "inscription_message" BYTEA, + "predicted_fee" bigint, + "confirmed_inscriptions_request_history_id" bigint UNIQUE, + "created_at" timestamp NOT NULL DEFAULT 'now()', + "updated_at" timestamp NOT NULL +); + +CREATE TABLE "via_btc_inscriptions_request_history" ( + "id" BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + "commit_tx_id" varchar UNIQUE NOT NULL, + "reveal_tx_id" varchar UNIQUE NOT NULL, + "inscription_request_id" bigint NOT NULL, + "signed_commit_tx" BYTEA NOT NULL, + "signed_reveal_tx" BYTEA NOT NULL, + "actual_fees" bigint NOT NULL, + "confirmed_at" timestamp DEFAULT null, + "sent_at_block" bigint NOT NULL, + "created_at" timestamp DEFAULT 'now()', + "updated_at" timestamp NOT NULL +); + +CREATE TABLE "via_l1_batch_vote_inscription_request" ( + "l1_batch_number" bigint UNIQUE NOT NULL, + "vote_l1_batch_inscription_id" bigint UNIQUE NOT NULL, + "created_at" timestamp NOT NULL DEFAULT 'now()', + "updated_at" timestamp NOT NULL +); + +ALTER TABLE "via_btc_inscriptions_request_history" ADD FOREIGN KEY ("inscription_request_id") REFERENCES "via_btc_inscriptions_request" ("id") ON DELETE CASCADE ON UPDATE NO ACTION; +ALTER TABLE "via_btc_inscriptions_request" ADD FOREIGN KEY ("confirmed_inscriptions_request_history_id") REFERENCES "via_btc_inscriptions_request_history" ("id"); +ALTER TABLE "via_l1_batch_vote_inscription_request" ADD FOREIGN KEY ("vote_l1_batch_inscription_id") REFERENCES "via_btc_inscriptions_request" ("id"); diff --git a/via_verifier/lib/verifier_dal/migrations/20250112053854_create_via_votes.up.down.sql b/via_verifier/lib/verifier_dal/migrations/20250112053854_create_via_votes.up.down.sql new file mode 100644 index 000000000..9bce83ce9 --- /dev/null +++ b/via_verifier/lib/verifier_dal/migrations/20250112053854_create_via_votes.up.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS via_votes; +DROP TABLE IF EXISTS via_votable_transactions; diff --git a/via_verifier/lib/verifier_dal/migrations/20250112053854_create_via_votes.up.up.sql b/via_verifier/lib/verifier_dal/migrations/20250112053854_create_via_votes.up.up.sql new file mode 100644 index 000000000..a000a61cf --- /dev/null +++ b/via_verifier/lib/verifier_dal/migrations/20250112053854_create_via_votes.up.up.sql @@ -0,0 +1,26 @@ +CREATE TABLE IF NOT EXISTS via_votable_transactions ( + l1_batch_number BIGINT UNIQUE NOT NULL, + tx_id BYTEA, + da_identifier VARCHAR NOT NULL, + blob_id VARCHAR NOT NULL, + pubdata_blob_id VARCHAR NOT NULL, + pubdata_reveal_tx_id VARCHAR NOT NULL, + withdrawal_tx_id BYTEA, + is_finalized BOOLEAN NOT NULL DEFAULT FALSE, + is_verified BOOLEAN NOT NULL DEFAULT FALSE, + l1_batch_status BOOLEAN NOT NULL DEFAULT FALSE, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP NOT NULL DEFAULT NOW(), + PRIMARY KEY (l1_batch_number, tx_id) +); + +CREATE TABLE IF NOT EXISTS via_votes ( + l1_batch_number BIGINT NOT NULL, + tx_id BYTEA NOT NULL, + verifier_address TEXT NOT NULL, + vote BOOLEAN NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + PRIMARY KEY (l1_batch_number, tx_id, verifier_address), + FOREIGN KEY (l1_batch_number, tx_id) REFERENCES via_votable_transactions (l1_batch_number, tx_id) ON DELETE CASCADE +); + diff --git a/via_verifier/lib/verifier_dal/migrations/20250207164238_via_add_transactions_dal.down.sql b/via_verifier/lib/verifier_dal/migrations/20250207164238_via_add_transactions_dal.down.sql new file mode 100644 index 000000000..40028abab --- /dev/null +++ b/via_verifier/lib/verifier_dal/migrations/20250207164238_via_add_transactions_dal.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS via_transactions; \ No newline at end of file diff --git a/via_verifier/lib/verifier_dal/migrations/20250207164238_via_add_transactions_dal.up.sql b/via_verifier/lib/verifier_dal/migrations/20250207164238_via_add_transactions_dal.up.sql new file mode 100644 index 000000000..7d3c82c0c --- /dev/null +++ b/via_verifier/lib/verifier_dal/migrations/20250207164238_via_add_transactions_dal.up.sql @@ -0,0 +1,14 @@ +CREATE TABLE IF NOT EXISTS via_transactions ( + "priority_id" BIGINT NOT NULL, + "tx_id" BYTEA NOT NULL, + "receiver" VARCHAR NOT NULL, + "value" BIGINT NOT NULL, + "calldata" BYTEA, + "canonical_tx_hash" BYTEA NOT NULL, + "status" BOOLEAN, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP NOT NULL DEFAULT NOW(), + PRIMARY KEY (tx_id) +); + +CREATE INDEX idx_via_transactions_priority ON via_transactions(priority_id); diff --git a/via_verifier/lib/verifier_dal/src/lib.rs b/via_verifier/lib/verifier_dal/src/lib.rs new file mode 100644 index 000000000..e7806acd7 --- /dev/null +++ b/via_verifier/lib/verifier_dal/src/lib.rs @@ -0,0 +1,67 @@ +//! Data access layer (DAL) for ZKsync Era. + +// Linter settings. +#![warn(clippy::cast_lossless)] + +pub use sqlx::{types::BigDecimal, Error as SqlxError}; +use via_transactions_dal::ViaTransactionsDal; +use zksync_db_connection::connection::DbMarker; +pub use zksync_db_connection::{ + connection::{Connection, IsolationLevel}, + connection_pool::{ConnectionPool, ConnectionPoolBuilder}, + error::{DalError, DalResult}, +}; + +use crate::{ + via_blocks_dal::ViaBlocksDal, via_btc_sender_dal::ViaBtcSenderDal, via_votes_dal::ViaVotesDal, +}; + +pub mod models; +pub mod via_blocks_dal; +pub mod via_btc_sender_dal; +pub mod via_transactions_dal; +pub mod via_votes_dal; + +#[cfg(test)] +mod tests; + +// This module is private and serves as a way to seal the trait. +mod private { + pub trait Sealed {} +} + +// Here we are making the trait sealed, because it should be public to function correctly, but we don't +// want to allow any other downstream implementations of this trait. +pub trait VerifierDal<'a>: private::Sealed +where + Self: 'a, +{ + fn via_votes_dal(&mut self) -> ViaVotesDal<'_, 'a>; + fn via_btc_sender_dal(&mut self) -> ViaBtcSenderDal<'_, 'a>; + fn via_block_dal(&mut self) -> ViaBlocksDal<'_, 'a>; + fn via_transactions_dal(&mut self) -> ViaTransactionsDal<'_, 'a>; +} + +#[derive(Clone, Debug)] +pub struct Verifier; + +// Implement the marker trait for the Core to be able to use it in Connection. +impl DbMarker for Verifier {} +// Implement the sealed trait for the struct itself. +impl private::Sealed for Connection<'_, Verifier> {} + +impl<'a> VerifierDal<'a> for Connection<'a, Verifier> { + fn via_votes_dal(&mut self) -> ViaVotesDal<'_, 'a> { + ViaVotesDal { storage: self } + } + + fn via_btc_sender_dal(&mut self) -> ViaBtcSenderDal<'_, 'a> { + ViaBtcSenderDal { storage: self } + } + fn via_block_dal(&mut self) -> ViaBlocksDal<'_, 'a> { + ViaBlocksDal { storage: self } + } + fn via_transactions_dal(&mut self) -> ViaTransactionsDal<'_, 'a> { + ViaTransactionsDal { storage: self } + } +} diff --git a/via_verifier/lib/verifier_dal/src/migrations.rs b/via_verifier/lib/verifier_dal/src/migrations.rs new file mode 100644 index 000000000..67eb15e38 --- /dev/null +++ b/via_verifier/lib/verifier_dal/src/migrations.rs @@ -0,0 +1,16 @@ +use anyhow::Context; +use sqlx::{migrate::Migrator, PgPool}; +use std::path::Path; + +/// Runs migrations for the verifier database. +pub async fn migrate(pool: &PgPool) -> anyhow::Result<()> { + let path = Path::new(env!("CARGO_MANIFEST_DIR")).join("migrations"); + let migrator = Migrator::new(path) + .await + .context("Failed to create migrator")?; + migrator + .run(pool) + .await + .context("Failed to run migrations")?; + Ok(()) +} \ No newline at end of file diff --git a/via_verifier/lib/verifier_dal/src/models/mod.rs b/via_verifier/lib/verifier_dal/src/models/mod.rs new file mode 100644 index 000000000..c641ab7e2 --- /dev/null +++ b/via_verifier/lib/verifier_dal/src/models/mod.rs @@ -0,0 +1 @@ +pub mod storage_btc_inscription_request; diff --git a/via_verifier/lib/verifier_dal/src/models/storage_btc_inscription_request.rs b/via_verifier/lib/verifier_dal/src/models/storage_btc_inscription_request.rs new file mode 100644 index 000000000..baf0d71f3 --- /dev/null +++ b/via_verifier/lib/verifier_dal/src/models/storage_btc_inscription_request.rs @@ -0,0 +1,62 @@ +use std::str::FromStr; + +use bitcoin::Txid; +use sqlx::types::chrono::NaiveDateTime; +use zksync_types::btc_sender::{ViaBtcInscriptionRequest, ViaBtcInscriptionRequestHistory}; + +#[derive(Debug, Clone)] +pub struct ViaStorageBtcInscriptionRequest { + pub id: i64, + pub request_type: String, + pub inscription_message: Option>, + pub predicted_fee: Option, + pub confirmed_inscriptions_request_history_id: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, +} + +#[derive(Clone, Debug)] +pub struct ViaStorageBtcInscriptionRequestHistory { + pub id: i64, + pub commit_tx_id: String, + pub reveal_tx_id: String, + pub inscription_request_id: i64, + pub signed_commit_tx: Option>, + pub signed_reveal_tx: Option>, + pub actual_fees: i64, + pub sent_at_block: i64, + pub confirmed_at: Option, + pub created_at: Option, + pub updated_at: Option, +} + +impl From for ViaBtcInscriptionRequest { + fn from(req: ViaStorageBtcInscriptionRequest) -> ViaBtcInscriptionRequest { + ViaBtcInscriptionRequest { + id: req.id, + request_type: req.request_type, + inscription_message: req.inscription_message, + confirmed_inscriptions_request_history_id: req + .confirmed_inscriptions_request_history_id, + predicted_fee: req.predicted_fee, + created_at: req.created_at, + updated_at: req.updated_at, + } + } +} + +impl From for ViaBtcInscriptionRequestHistory { + fn from(history: ViaStorageBtcInscriptionRequestHistory) -> ViaBtcInscriptionRequestHistory { + ViaBtcInscriptionRequestHistory { + id: history.id, + commit_tx_id: Txid::from_str(&history.commit_tx_id).unwrap(), + reveal_tx_id: Txid::from_str(&history.reveal_tx_id).unwrap(), + inscription_request_id: history.inscription_request_id, + sent_at_block: history.sent_at_block, + signed_commit_tx: history.signed_commit_tx, + signed_reveal_tx: history.signed_reveal_tx, + actual_fees: history.actual_fees, + confirmed_at: history.confirmed_at, + } + } +} diff --git a/via_verifier/lib/verifier_dal/src/tests/mod.rs b/via_verifier/lib/verifier_dal/src/tests/mod.rs new file mode 100644 index 000000000..f29b90636 --- /dev/null +++ b/via_verifier/lib/verifier_dal/src/tests/mod.rs @@ -0,0 +1,68 @@ +use rand::random; +use zksync_db_connection::{connection::Connection, connection_pool::ConnectionPool}; +use zksync_types::H256; + +use crate::{Verifier, VerifierDal}; + +// Helper functions for testing +async fn create_test_connection() -> Connection<'static, Verifier> { + let connection_pool = ConnectionPool::::test_pool().await; + connection_pool.connection().await.unwrap() +} + +fn mock_via_vote() -> (u32, H256, String, bool) { + ( + 1, // l1_batch_number + H256::random(), + "0x1234567890123456789012345678901234567890".to_string(), // verifier_address + random::(), + ) +} + +#[tokio::test] +async fn test_via_vote_workflow() { + let mut storage = create_test_connection().await; + + // Create test data + let (l1_batch_number, tx_id, verifier_address, vote) = mock_via_vote(); + + // First insert a votable transaction + storage + .via_votes_dal() + .insert_votable_transaction( + l1_batch_number, + tx_id, + "test_da_id".to_string(), + "test_blob_id".to_string(), + "test_pubdata_tx_id".to_string(), + "test_pubdata_blob_id".to_string(), + ) + .await + .unwrap(); + + // Test inserting a vote + storage + .via_votes_dal() + .insert_vote(l1_batch_number, tx_id, &verifier_address, vote) + .await + .unwrap(); + + // Test getting vote count + let (ok_votes, total_votes) = storage + .via_votes_dal() + .get_vote_count(l1_batch_number, tx_id) + .await + .unwrap(); + + assert_eq!(total_votes, 1); + assert_eq!(ok_votes, if vote { 1 } else { 0 }); + + // Test finalizing transaction + let is_finalized = storage + .via_votes_dal() + .finalize_transaction_if_needed(l1_batch_number, tx_id, 0.5, 1) + .await + .unwrap(); + + assert_eq!(is_finalized, vote); // Should be finalized if the vote was true +} diff --git a/via_verifier/lib/verifier_dal/src/via_blocks_dal.rs b/via_verifier/lib/verifier_dal/src/via_blocks_dal.rs new file mode 100644 index 000000000..847055f7f --- /dev/null +++ b/via_verifier/lib/verifier_dal/src/via_blocks_dal.rs @@ -0,0 +1,82 @@ +use zksync_db_connection::{ + connection::Connection, + error::DalResult, + instrument::{InstrumentExt, Instrumented}, +}; +use zksync_types::{ + via_verifier_btc_inscription_operations::ViaVerifierBtcInscriptionRequestType, L1BatchNumber, +}; + +use crate::Verifier; + +#[derive(Debug)] +pub struct ViaBlocksDal<'a, 'c> { + pub(crate) storage: &'a mut Connection<'c, Verifier>, +} + +impl ViaBlocksDal<'_, '_> { + pub async fn insert_vote_l1_batch_inscription_request_id( + &mut self, + batch_number: L1BatchNumber, + inscription_request_id: i64, + inscription_request: ViaVerifierBtcInscriptionRequestType, + ) -> DalResult<()> { + match inscription_request { + ViaVerifierBtcInscriptionRequestType::VoteOnchain => { + let instrumentation = Instrumented::new("set_inscription_request_tx_id#commit") + .with_arg("batch_number", &batch_number) + .with_arg("inscription_request_id", &inscription_request_id); + + let query = sqlx::query!( + r#" + INSERT INTO + via_l1_batch_vote_inscription_request (l1_batch_number, vote_l1_batch_inscription_id, created_at, updated_at) + VALUES + ($1, $2, NOW(), NOW()) + ON CONFLICT DO NOTHING + "#, + i64::from(batch_number.0), + inscription_request_id as i32, + ); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; + + if result.rows_affected() == 0 { + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Failed to insert into 'via_l1_batch_vote_inscription_request': \ + No rows were affected. This could be due to a conflict or invalid input values. \ + batch_number: {:?}, inscription_request_id: {:?}", + i64::from(batch_number.0), + inscription_request_id as i32 + )); + return Err(err); + } + Ok(()) + } + } + } + + pub async fn check_vote_l1_batch_inscription_request_if_exists( + &mut self, + batch_number: i64, + ) -> DalResult { + let exists = sqlx::query_scalar!( + r#" + SELECT EXISTS( + SELECT 1 + FROM via_l1_batch_vote_inscription_request + WHERE l1_batch_number = $1 + ) + "#, + batch_number + ) + .instrument("check_vote_l1_batch_inscription_request_id_exists") + .fetch_one(self.storage) + .await?; + + Ok(exists.unwrap_or(false)) + } +} diff --git a/via_verifier/lib/verifier_dal/src/via_btc_sender_dal.rs b/via_verifier/lib/verifier_dal/src/via_btc_sender_dal.rs new file mode 100644 index 000000000..dd2535ae3 --- /dev/null +++ b/via_verifier/lib/verifier_dal/src/via_btc_sender_dal.rs @@ -0,0 +1,215 @@ +use anyhow::Context; +use zksync_db_connection::connection::Connection; +use zksync_types::btc_sender::{ViaBtcInscriptionRequest, ViaBtcInscriptionRequestHistory}; + +use crate::{ + models::storage_btc_inscription_request::{ + ViaStorageBtcInscriptionRequest, ViaStorageBtcInscriptionRequestHistory, + }, + Verifier, +}; + +#[derive(Debug)] +pub struct ViaBtcSenderDal<'a, 'c> { + pub(crate) storage: &'a mut Connection<'c, Verifier>, +} + +impl ViaBtcSenderDal<'_, '_> { + pub async fn via_save_btc_inscriptions_request( + &mut self, + inscription_request_type: String, + inscription_message: Vec, + predicted_fee: u64, + ) -> sqlx::Result { + let inscription_request = sqlx::query_as!( + ViaBtcInscriptionRequest, + r#" + INSERT INTO + via_btc_inscriptions_request (request_type, inscription_message, predicted_fee, created_at, updated_at) + VALUES + ($1, $2, $3, NOW(), NOW()) + RETURNING + * + "#, + inscription_request_type, + inscription_message, + predicted_fee as i64, + ) + .fetch_one(self.storage.conn()) + .await?; + Ok(inscription_request) + } + + pub async fn get_inflight_inscriptions( + &mut self, + ) -> sqlx::Result> { + let txs = sqlx::query_as!( + ViaStorageBtcInscriptionRequest, + r#" + SELECT + via_btc_inscriptions_request.* + FROM + via_btc_inscriptions_request + JOIN via_btc_inscriptions_request_history ON via_btc_inscriptions_request.id = via_btc_inscriptions_request_history.inscription_request_id + AND via_btc_inscriptions_request_history.sent_at_block IS NOT NULL + AND via_btc_inscriptions_request.confirmed_inscriptions_request_history_id IS NULL + AND via_btc_inscriptions_request_history.id = ( + SELECT + id + FROM + via_btc_inscriptions_request_history + WHERE + inscription_request_id = via_btc_inscriptions_request.id + AND via_btc_inscriptions_request_history.sent_at_block IS NOT NULL + ORDER BY + created_at DESC + LIMIT + 1 + ) + ORDER BY + id + "# + ) + .fetch_all(self.storage.conn()) + .await?; + Ok(txs.into_iter().map(|tx| tx.into()).collect()) + } + + pub async fn list_new_inscription_request( + &mut self, + limit: i64, + ) -> sqlx::Result> { + let txs = sqlx::query_as!( + ViaStorageBtcInscriptionRequest, + r#" + SELECT + via_btc_inscriptions_request.* + FROM + via_btc_inscriptions_request + LEFT JOIN via_btc_inscriptions_request_history ON via_btc_inscriptions_request.id = via_btc_inscriptions_request_history.inscription_request_id + WHERE + via_btc_inscriptions_request_history.inscription_request_id IS NULL + ORDER BY + via_btc_inscriptions_request.id + LIMIT + $1 + "#, + limit, + ) + .fetch_all(self.storage.conn()) + .await?; + Ok(txs.into_iter().map(|tx| tx.into()).collect()) + } + + #[allow(clippy::too_many_arguments)] + pub async fn insert_inscription_request_history( + &mut self, + commit_tx_id: String, + reveal_tx_id: String, + inscription_request_id: i64, + signed_commit_tx: Vec, + signed_reveal_tx: Vec, + actual_fees: i64, + sent_at_block: i64, + ) -> sqlx::Result> { + Ok(sqlx::query!( + r#" + INSERT INTO + via_btc_inscriptions_request_history ( + commit_tx_id, + reveal_tx_id, + inscription_request_id, + signed_commit_tx, + signed_reveal_tx, + actual_fees, + sent_at_block, + created_at, + updated_at + ) + VALUES + ($1, $2, $3, $4, $5, $6, $7, NOW(), NOW()) + RETURNING + id + "#, + commit_tx_id, + reveal_tx_id, + inscription_request_id, + signed_commit_tx, + signed_reveal_tx, + actual_fees, + sent_at_block as i32 + ) + .fetch_optional(self.storage.conn()) + .await? + .map(|row| row.id as u32)) + } + + pub async fn get_last_inscription_request_history( + &mut self, + inscription_request_id: i64, + ) -> sqlx::Result> { + let inscription_request_history = sqlx::query_as!( + ViaStorageBtcInscriptionRequestHistory, + r#" + SELECT + * + FROM + via_btc_inscriptions_request_history + WHERE + inscription_request_id = $1 + ORDER BY + id DESC + LIMIT + 1 + "#, + inscription_request_id + ) + .fetch_optional(self.storage.conn()) + .await?; + + Ok(inscription_request_history.map(ViaBtcInscriptionRequestHistory::from)) + } + + pub async fn confirm_inscription( + &mut self, + inscriptions_request_id: i64, + inscriptions_request_history_id: i64, + ) -> anyhow::Result<()> { + let mut transaction = self + .storage + .start_transaction() + .await + .context("start_transaction")?; + + sqlx::query!( + r#" + UPDATE via_btc_inscriptions_request_history + SET + updated_at = NOW(), + confirmed_at = NOW() + WHERE + id = $1 + "#, + inscriptions_request_history_id + ) + .execute(transaction.conn()) + .await?; + + sqlx::query!( + r#" + UPDATE via_btc_inscriptions_request + SET + updated_at = NOW(), + confirmed_inscriptions_request_history_id = $2 + WHERE + id = $1 + "#, + inscriptions_request_id, + inscriptions_request_history_id + ) + .execute(transaction.conn()) + .await?; + + transaction.commit().await.context("commit()") + } +} diff --git a/via_verifier/lib/verifier_dal/src/via_transactions_dal.rs b/via_verifier/lib/verifier_dal/src/via_transactions_dal.rs new file mode 100644 index 000000000..016fd1ace --- /dev/null +++ b/via_verifier/lib/verifier_dal/src/via_transactions_dal.rs @@ -0,0 +1,124 @@ +use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; +use zksync_types::H256; + +use crate::Verifier; + +#[derive(Debug)] +pub struct ViaTransactionsDal<'a, 'c> { + pub(crate) storage: &'a mut Connection<'c, Verifier>, +} + +impl ViaTransactionsDal<'_, '_> { + pub async fn insert_transaction( + &mut self, + priority_id: i64, + tx_id: H256, + receiver: String, + value: i64, + calldata: Vec, + canonical_tx_hash: H256, + ) -> DalResult<()> { + sqlx::query!( + r#" + INSERT INTO + via_transactions (priority_id, tx_id, receiver, value, calldata, canonical_tx_hash) + VALUES + ($1, $2, $3, $4, $5, $6) + ON CONFLICT (tx_id) DO NOTHING + "#, + priority_id, + tx_id.as_bytes(), + receiver, + value, + calldata, + canonical_tx_hash.as_bytes(), + ) + .instrument("insert_transaction") + .fetch_optional(self.storage) + .await?; + + Ok(()) + } + + pub async fn get_last_priority_id(&mut self) -> DalResult { + let priority_id = sqlx::query_scalar!( + r#" + SELECT COUNT(priority_id) as priority_id FROM via_transactions; + "# + ) + .instrument("get_last_priority_id") + .fetch_one(self.storage) + .await?; + + Ok(priority_id.unwrap_or(0)) + } + + pub async fn list_transactions_not_processed(&mut self, limit: i64) -> DalResult>> { + let rows = sqlx::query!( + r#" + SELECT + canonical_tx_hash + FROM + via_transactions + WHERE + status IS NULL + ORDER BY + priority_id ASC + LIMIT + $1 + "#, + limit + ) + .instrument("list_transactions") + .fetch_all(self.storage) + .await?; + + let canonical_tx_hashs: Vec> = + rows.into_iter().map(|row| row.canonical_tx_hash).collect(); + Ok(canonical_tx_hashs) + } + + pub async fn update_transaction( + &mut self, + canonical_tx_hash: &H256, + status: bool, + ) -> DalResult<()> { + sqlx::query!( + r#" + UPDATE via_transactions + SET + status = $2 + WHERE + canonical_tx_hash = $1 + "#, + canonical_tx_hash.as_bytes(), + status + ) + .instrument("update_transaction") + .fetch_optional(self.storage) + .await?; + + Ok(()) + } + + pub async fn transaction_exists_with_txid(&mut self, tx_id: &H256) -> DalResult { + let exists = sqlx::query!( + r#" + SELECT + 1 AS cnt + FROM + via_transactions + WHERE + tx_id = $1 + LIMIT + 1 + "#, + tx_id.as_bytes(), + ) + .instrument("transaction_exists_with_txid") + .fetch_optional(self.storage) + .await?; + + Ok(exists.is_some()) + } +} diff --git a/via_verifier/lib/verifier_dal/src/via_votes_dal.rs b/via_verifier/lib/verifier_dal/src/via_votes_dal.rs new file mode 100644 index 000000000..489ba362e --- /dev/null +++ b/via_verifier/lib/verifier_dal/src/via_votes_dal.rs @@ -0,0 +1,402 @@ +use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; +use zksync_types::H256; + +use crate::Verifier; + +pub struct ViaVotesDal<'c, 'a> { + pub(crate) storage: &'c mut Connection<'a, Verifier>, +} + +impl ViaVotesDal<'_, '_> { + /// Inserts a new row in `via_votable_transactions`. + /// Notice we haven’t changed this since the PK is still (l1_batch_number, tx_id). + pub async fn insert_votable_transaction( + &mut self, + l1_batch_number: u32, + tx_id: H256, + da_identifier: String, + blob_id: String, + pubdata_reveal_tx_id: String, + pubdata_blob_id: String, + ) -> DalResult<()> { + sqlx::query!( + r#" + INSERT INTO + via_votable_transactions ( + l1_batch_number, + tx_id, + da_identifier, + blob_id, + pubdata_reveal_tx_id, + pubdata_blob_id + ) + VALUES + ($1, $2, $3, $4, $5, $6) + ON CONFLICT (l1_batch_number, tx_id) DO NOTHING + "#, + i64::from(l1_batch_number), + tx_id.as_bytes(), + da_identifier, + blob_id, + pubdata_reveal_tx_id, + pubdata_blob_id + ) + .instrument("insert_votable_transaction") + .fetch_optional(self.storage) + .await?; + + Ok(()) + } + + /// Inserts a new vote row in `via_votes`. + /// Now requires `l1_batch_number` as part of the primary key / FK. + pub async fn insert_vote( + &mut self, + l1_batch_number: u32, + tx_id: H256, + verifier_address: &str, + vote: bool, + ) -> DalResult<()> { + sqlx::query!( + r#" + INSERT INTO + via_votes (l1_batch_number, tx_id, verifier_address, vote) + VALUES + ($1, $2, $3, $4) + ON CONFLICT (l1_batch_number, tx_id, verifier_address) DO NOTHING + "#, + l1_batch_number as i32, + tx_id.as_bytes(), + verifier_address, + vote + ) + .instrument("insert_vote") + .fetch_optional(self.storage) + .await?; + + Ok(()) + } + + /// Returns (ok_votes, total_votes) for the given `(l1_batch_number, tx_id)`. + /// Must also filter on `l1_batch_number`. + pub async fn get_vote_count( + &mut self, + l1_batch_number: u32, + tx_id: H256, + ) -> DalResult<(i64, i64)> { + let row = sqlx::query!( + r#" + SELECT + COUNT(*) FILTER ( + WHERE + vote = TRUE + ) AS ok_votes, + COUNT(*) AS total_votes + FROM + via_votes + WHERE + l1_batch_number = $1 + AND tx_id = $2 + "#, + l1_batch_number as i32, + tx_id.as_bytes() + ) + .instrument("get_vote_count") + .fetch_one(self.storage) + .await?; + + let ok_votes = row.ok_votes.unwrap_or(0); + let total_votes = row.total_votes.unwrap_or(0); + Ok((ok_votes, total_votes)) + } + + /// Marks the transaction as finalized if #ok_votes / #total_votes >= threshold. + /// Must use `(l1_batch_number, tx_id)` in both vote counting and the UPDATE statement. + pub async fn finalize_transaction_if_needed( + &mut self, + l1_batch_number: u32, + tx_id: H256, + threshold: f64, + number_of_verifiers: usize, + ) -> DalResult { + let row = sqlx::query!( + r#" + SELECT + is_finalized + FROM + via_votable_transactions + WHERE + l1_batch_number = $1 + AND tx_id = $2 + "#, + i64::from(l1_batch_number), + tx_id.as_bytes() + ) + .instrument("check_if_already_finalized") + .fetch_one(self.storage) + .await?; + + if row.is_finalized { + return Ok(false); + } + + let (ok_votes, _total_votes) = self.get_vote_count(l1_batch_number, tx_id).await?; + let is_threshold_reached = (ok_votes as f64) / (number_of_verifiers as f64) >= threshold; + + if is_threshold_reached { + sqlx::query!( + r#" + UPDATE via_votable_transactions + SET + is_finalized = TRUE, + updated_at = NOW() + WHERE + l1_batch_number = $1 + AND tx_id = $2 + "#, + i64::from(l1_batch_number), + tx_id.as_bytes() + ) + .instrument("finalize_transaction_if_needed") + .execute(self.storage) + .await?; + } + + Ok(is_threshold_reached) + } + + pub async fn get_last_inserted_block(&mut self) -> DalResult> { + let row = sqlx::query!( + r#" + SELECT + MAX(l1_batch_number) AS max_batch_number + FROM + via_votable_transactions + "# + ) + .instrument("get_last_inserted_block") + .fetch_one(self.storage) + .await?; + + Ok(row.max_batch_number.map(|n| n as u32)) + } + + pub async fn verify_votable_transaction( + &mut self, + l1_batch_number: u32, + tx_id: H256, + l1_batch_status: bool, + ) -> DalResult<()> { + sqlx::query!( + r#" + UPDATE via_votable_transactions + SET + is_verified = TRUE, + l1_batch_status = $3, + updated_at = NOW() + WHERE + l1_batch_number = $1 + AND tx_id = $2 + "#, + i64::from(l1_batch_number), + tx_id.as_bytes(), + l1_batch_status + ) + .instrument("verify_transaction") + .execute(self.storage) + .await?; + Ok(()) + } + + pub async fn get_first_non_finalized_block(&mut self) -> DalResult> { + let l1_block_number = sqlx::query_scalar!( + r#" + SELECT + MIN(l1_batch_number) as "l1_batch_number" + FROM via_votable_transactions + WHERE + is_finalized = FALSE + "#, + ) + .instrument("get_last_block_finilized") + .fetch_optional(self.storage) + .await? + .flatten(); + + Ok(l1_block_number) + } + + pub async fn get_verifier_vote_status( + &mut self, + block_number: i64, + ) -> DalResult)>> { + let row = sqlx::query!( + r#" + SELECT + l1_batch_status, + tx_id + FROM + via_votable_transactions + WHERE + l1_batch_number = $1 + AND is_verified = TRUE + LIMIT + 1 + "#, + block_number + ) + .instrument("get_verifier_vote_status") + .fetch_optional(self.storage) + .await?; + + let result = row.map(|r| { + let l1_batch_status = r.l1_batch_status; + let tx_id = r.tx_id; + (l1_batch_status, tx_id) + }); + + Ok(result) + } + + /// Retrieve the first not executed block. (Similar to `get_first_not_finilized_block`, just with `is_verified = FALSE`). + pub async fn get_first_not_verified_block(&mut self) -> DalResult)>> { + let row = sqlx::query!( + r#" + SELECT + l1_batch_number, + tx_id + FROM + via_votable_transactions + WHERE + is_verified = FALSE + ORDER BY + l1_batch_number ASC + LIMIT + 1 + "#, + ) + .instrument("get_first_not_executed_block") + .fetch_optional(self.storage) + .await?; + + let result = row.map(|r| { + let l1_batch_number = r.l1_batch_number; + let tx_id = r.tx_id; + (l1_batch_number, tx_id) + }); + + Ok(result) + } + + pub async fn get_finalized_block_and_non_processed_withdrawal( + &mut self, + l1_batch_number: i64, + ) -> DalResult)>> { + // Query the database to fetch the desired row + let result = sqlx::query!( + r#" + SELECT + pubdata_blob_id, + tx_id + FROM + via_votable_transactions + WHERE + is_finalized = TRUE + AND is_verified = TRUE + AND withdrawal_tx_id IS NULL + AND l1_batch_number = $1 + LIMIT + 1 + "#, + l1_batch_number + ) + .instrument("get_finalized_block_and_non_processed_withdrawal") + .fetch_optional(self.storage) // Use fetch_optional to handle None results + .await?; + + // Map the result into the desired output format + let mapped_result = result.map(|row| (row.pubdata_blob_id, row.tx_id)); + + Ok(mapped_result) + } + + pub async fn get_finalized_blocks_and_non_processed_withdrawals( + &mut self, + ) -> DalResult)>> { + let rows = sqlx::query!( + r#" + SELECT + l1_batch_number, + pubdata_blob_id, + tx_id + FROM + via_votable_transactions + WHERE + is_finalized = TRUE + AND is_verified = TRUE + AND withdrawal_tx_id IS NULL + ORDER BY + l1_batch_number ASC + "#, + ) + .instrument("get_finalized_blocks_and_non_processed_withdrawals") + .fetch_all(self.storage) + .await?; + + // Map the rows into a Vec<(l1_batch_number, pubdata_blob_id, tx_id)> + let result: Vec<(i64, String, Vec)> = rows + .into_iter() + .map(|r| (r.l1_batch_number, r.pubdata_blob_id, r.tx_id)) + .collect(); + + Ok(result) + } + + pub async fn mark_vote_transaction_as_processed_withdrawals( + &mut self, + tx_id: H256, + l1_batch_number: i64, + ) -> DalResult<()> { + sqlx::query!( + r#" + UPDATE via_votable_transactions + SET + withdrawal_tx_id = $1 + WHERE + is_finalized = TRUE + AND is_verified = TRUE + AND withdrawal_tx_id IS NULL + AND l1_batch_number = $2 + "#, + tx_id.as_bytes(), + l1_batch_number + ) + .instrument("mark_vote_transaction_as_processed_withdrawals") + .execute(self.storage) + .await?; + + Ok(()) + } + + pub async fn get_vote_transaction_withdrawal_tx( + &mut self, + l1_batch_number: i64, + ) -> DalResult>> { + let withdrawal_tx_id = sqlx::query_scalar!( + r#" + SELECT + withdrawal_tx_id + FROM via_votable_transactions + WHERE + l1_batch_number = $1 + "#, + l1_batch_number + ) + .instrument("get_vote_transaction_withdrawal_tx") + .fetch_optional(self.storage) + .await? + .flatten(); + + Ok(withdrawal_tx_id) + } +} diff --git a/via_verifier/lib/via_da_client/Cargo.toml b/via_verifier/lib/via_da_client/Cargo.toml new file mode 100644 index 000000000..002b9dce6 --- /dev/null +++ b/via_verifier/lib/via_da_client/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "via_da_client" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +anyhow.workspace = true +zksync_types.workspace = true +zksync_utils.workspace = true + +byteorder = "1.4" + +[dev-dependencies] +hex.workspace = true +rand = "0.8" + diff --git a/via_verifier/lib/via_da_client/src/lib.rs b/via_verifier/lib/via_da_client/src/lib.rs new file mode 100644 index 000000000..a3715fa02 --- /dev/null +++ b/via_verifier/lib/via_da_client/src/lib.rs @@ -0,0 +1,2 @@ +pub mod pubdata; +pub mod types; diff --git a/via_verifier/lib/via_da_client/src/pubdata.rs b/via_verifier/lib/via_da_client/src/pubdata.rs new file mode 100644 index 000000000..d3f01ad75 --- /dev/null +++ b/via_verifier/lib/via_da_client/src/pubdata.rs @@ -0,0 +1,207 @@ +use std::io::{Cursor, Read}; + +use anyhow::Context; +use byteorder::{BigEndian, ReadBytesExt}; + +use crate::types::L1MessengerL2ToL1Log; + +#[derive(Debug, Clone, Default)] +pub struct Pubdata { + pub user_logs: Vec, + pub l2_to_l1_messages: Vec>, +} + +impl Pubdata { + pub fn encode_pubdata(self) -> Vec { + let mut l1_messenger_pubdata = vec![]; + + // Encoding user L2->L1 logs. + // Format: `[(numberOfL2ToL1Logs as u32) || l2tol1logs[1] || ... || l2tol1logs[n]]` + l1_messenger_pubdata.extend((self.user_logs.len() as u32).to_be_bytes()); + for l2tol1log in self.user_logs { + l1_messenger_pubdata.extend(l2tol1log.encode_packed()); + } + + // Encoding L2->L1 messages + // Format: `[(numberOfMessages as u32) || (messages[1].len() as u32) || messages[1] || ... || (messages[n].len() as u32) || messages[n]]` + l1_messenger_pubdata.extend((self.l2_to_l1_messages.len() as u32).to_be_bytes()); + for message in self.l2_to_l1_messages { + l1_messenger_pubdata.extend((message.len() as u32).to_be_bytes()); + l1_messenger_pubdata.extend(message); + } + + l1_messenger_pubdata + } + + pub fn decode_pubdata(pubdata: Vec) -> anyhow::Result { + let mut cursor = Cursor::new(pubdata); + let mut user_logs = Vec::new(); + let mut l2_to_l1_messages = Vec::new(); + + // Decode user L2->L1 logs + let num_user_logs = cursor + .read_u32::() + .context("Failed to decode num user logs")? as usize; + for _ in 0..num_user_logs { + let log = L1MessengerL2ToL1Log::decode_packed(&mut cursor)?; + user_logs.push(log); + } + + // Decode L2->L1 messages + let num_messages = cursor.read_u32::()? as usize; + for _ in 0..num_messages { + let message_len = cursor.read_u32::()? as usize; + let mut message = vec![0u8; message_len]; + cursor + .read_exact(&mut message) + .context("Read l2 to l1 message")?; + l2_to_l1_messages.push(message); + } + + Ok(Pubdata { + user_logs, + l2_to_l1_messages, + }) + } +} + +/// Helper function to read a specific number of bytes +#[allow(unused)] +fn read_bytes(reader: &mut R, num_bytes: usize) -> anyhow::Result> { + let mut buffer = vec![0u8; num_bytes]; + reader.read_exact(&mut buffer)?; + Ok(buffer) +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use hex::encode; + use zksync_types::{web3::keccak256, Address, H256}; + + use super::*; + use crate::types::L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR; + + fn generate_random_hex(len: usize) -> String { + // Generate random bytes + let random_bytes: Vec = (0..len).map(|_| rand::random::()).collect(); + + // Convert bytes to hex and return it + encode(random_bytes) + } + + #[test] + fn test_decode_l1_messager_l2_to_l1_log() { + let message = L1MessengerL2ToL1Log { + l2_shard_id: 0, + is_service: true, + tx_number_in_block: 5, + sender: Address::from_str(L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR).unwrap(), + key: H256::random(), + value: H256::random(), + }; + let encoded_messages = message.encode_packed(); + + let mut cursor = Cursor::new(encoded_messages); + let decoded = L1MessengerL2ToL1Log::decode_packed(&mut cursor).unwrap(); + assert_eq!(message.l2_shard_id, decoded.l2_shard_id); + assert_eq!(message.is_service, decoded.is_service); + assert_eq!(message.tx_number_in_block, decoded.tx_number_in_block); + assert_eq!(message.sender, decoded.sender); + assert_eq!(message.key, decoded.key); + assert_eq!(message.value, decoded.value); + } + + #[test] + fn test_decode_pubdata_with_single_l1_messager_l2_to_l1_log() { + let message = L1MessengerL2ToL1Log { + l2_shard_id: 0, + is_service: true, + tx_number_in_block: 5, + sender: Address::from_str(L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR).unwrap(), + key: H256::random(), + value: H256::random(), + }; + + let pubdata = Pubdata { + user_logs: vec![message.clone()], + l2_to_l1_messages: vec![hex::decode("deadbeef").unwrap()], + }; + + let encoded_pubdata = pubdata.encode_pubdata(); + let pubdata_input = Pubdata::decode_pubdata(encoded_pubdata).unwrap(); + + let decoded_message = pubdata_input.user_logs[0].clone(); + assert_eq!(pubdata_input.user_logs.len(), 1); + assert_eq!(decoded_message.l2_shard_id, message.clone().l2_shard_id); + assert_eq!(decoded_message.is_service, message.clone().is_service); + assert_eq!( + decoded_message.tx_number_in_block, + message.clone().tx_number_in_block + ); + assert_eq!(decoded_message.sender, message.clone().sender); + assert_eq!(decoded_message.key, message.clone().key); + assert_eq!(decoded_message.value, message.clone().value); + } + + #[test] + fn test_decode_pubdata_with_many_l1_messager_l2_to_l1_log() { + let len: usize = 5; + let mut user_logs: Vec = Vec::new(); + let mut l2_to_l1_messages: Vec> = Vec::new(); + for _ in 0..len { + let log = L1MessengerL2ToL1Log { + l2_shard_id: 0, + is_service: true, + tx_number_in_block: 5, + sender: Address::from_str(L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR).unwrap(), + key: H256::from_str(&generate_random_hex(32)).unwrap(), + value: H256::from_str(&generate_random_hex(32)).unwrap(), + }; + user_logs.push(log.clone()); + l2_to_l1_messages.push(hex::decode("deadbeef").unwrap()); + } + + let pubdata = Pubdata { + user_logs: user_logs.clone(), + l2_to_l1_messages, + }; + + let encoded_pubdata = pubdata.encode_pubdata(); + let pubdata_input = Pubdata::decode_pubdata(encoded_pubdata).unwrap(); + + let decoded_logs = pubdata_input.user_logs.clone(); + let decoded_messages = pubdata_input.l2_to_l1_messages.clone(); + assert_eq!(pubdata_input.user_logs.len(), len); + assert_eq!(pubdata_input.l2_to_l1_messages.len(), len); + for i in 0..len { + let decoded_log = decoded_logs[i].clone(); + let msg_log = user_logs[i].clone(); + + assert_eq!(decoded_log.l2_shard_id, msg_log.clone().l2_shard_id); + assert_eq!(decoded_log.is_service, msg_log.clone().is_service); + assert_eq!( + decoded_log.tx_number_in_block, + msg_log.clone().tx_number_in_block + ); + assert_eq!(decoded_log.sender, msg_log.clone().sender); + assert_eq!(decoded_log.key, msg_log.clone().key); + assert_eq!(decoded_log.value, msg_log.clone().value); + + // l2 to l1 message + let decoded_message = decoded_messages[i].clone(); + assert_eq!(decoded_message, hex::decode("deadbeef").unwrap()); + } + } + + #[test] + fn test_decode_pubdata_with_single_real_l1_messager_l2_to_l1_log() { + let input = "00000001000100000000000000000000000000000000000000008008000000000000000000000000000000000000000000000000000000000000800aa1fd131a17718668a78581197d19972abd907b7b343b9694e02246d18c3801c500000001000000506c0960f962637274317178326c6b30756e756b6d3830716d65706a703439687766397a36786e7a307337336b396a35360000000000000000000000000000000000000000000000000000000005f5e10000000000010001280400032c1818e4770f08c05b28829d7d5f9d401d492c7432c166dfecf4af04238ea323009d7042e8fb0f249338d18505e5ba1d4a546e9d21f47c847ca725ff53ac29f740ca1bbc31cc849a8092a36f9a321e17412dee200b956038af1c2dc83430a0e8b000d3e2c6760d91078e517a2cb882cd3c9551de3ab5f30d554d51b17e3744cf92b0cf368ce957aed709b985423cd3ba11615de01ecafa15eb9a11bc6cdef4f6327900436ef22b96a07224eb06f0eecfecc184033da7db2a5fb58f867f17298b896b55000000420901000000362205f5e1000000003721032b8b14000000382209216c140000003a8901000000000000000000000000000000170000003b8902000000000000000000000000000000170000003e890200000000000000000000000000000017"; + let encoded_pubdata = hex::decode(input).unwrap(); + let pubdata_input = Pubdata::decode_pubdata(encoded_pubdata).unwrap(); + + let hash = keccak256(&pubdata_input.l2_to_l1_messages[0].clone()); + assert_eq!(H256::from(hash), pubdata_input.user_logs[0].value); + } +} diff --git a/via_verifier/lib/via_da_client/src/types.rs b/via_verifier/lib/via_da_client/src/types.rs new file mode 100644 index 000000000..310d6cfec --- /dev/null +++ b/via_verifier/lib/via_da_client/src/types.rs @@ -0,0 +1,109 @@ +use std::io::Read; + +use anyhow::Context; +use byteorder::{BigEndian, ReadBytesExt}; +use zksync_types::{Address, H160, H256, U256}; +use zksync_utils::{u256_to_bytes_be, u256_to_h256}; + +/// The function selector used in L2 to compute the message. +pub const WITHDRAW_FUNC_SIG: &str = "finalizeEthWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"; + +/// The L2 BaseToken address. +pub const L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR: &str = "000000000000000000000000000000000000800a"; + +/// The L2 Bootloader address. +pub const L2_BOOTLOADER_CONTRACT_ADDR: &str = "0x0000000000000000000000000000000000008001"; + +#[derive(Clone, Debug, Default)] +#[allow(unused)] +pub struct L2BridgeLogMetadata { + pub log: L1MessengerL2ToL1Log, + pub message: Vec, +} + +/// Corresponds to the following solidity event: +/// ```solidity +/// struct L2ToL1Log { +/// uint8 l2ShardId; +/// bool isService; +/// uint16 txNumberInBlock; +/// address sender; +/// bytes32 key; +/// bytes32 value; +/// } +/// ``` +#[derive(Clone, Debug, Default)] +pub struct L1MessengerL2ToL1Log { + /// l2ShardId The shard identifier, 0 - rollup, 1 - porter + /// All other values are not used but are reserved for the future + pub l2_shard_id: u8, + /// isService A boolean flag that is part of the log along with `key`, `value`, and `sender` address. + /// This field is required formally but does not have any special meaning + pub is_service: bool, + /// txNumberInBatch The L2 transaction number in a Batch, in which the log was sent + pub tx_number_in_block: u16, + /// sender The L2 address which sent the log + pub sender: H160, + /// key The 32 bytes of information that was sent in the log + pub key: H256, + /// value The 32 bytes of information that was sent in the log + pub value: H256, +} + +impl L1MessengerL2ToL1Log { + pub fn encode_packed(&self) -> Vec { + let mut res: Vec = vec![]; + res.push(self.l2_shard_id); + res.push(self.is_service as u8); + res.extend_from_slice(&self.tx_number_in_block.to_be_bytes()); + res.extend_from_slice(self.sender.as_bytes()); + res.extend(u256_to_bytes_be(&U256::from_big_endian(&self.key.0))); + res.extend(u256_to_bytes_be(&U256::from_big_endian(&self.value.0))); + res + } + + pub fn decode_packed(reader: &mut R) -> anyhow::Result { + // Read `l2_shard_id` (1 byte) + let l2_shard_id = reader.read_u8().context("Failed to read l2_shard_id")?; + + // Read `is_service` (1 byte, a boolean stored as 0 or 1) + let is_service_byte = reader.read_u8().context("Failed to read is_service byte")?; + let is_service = is_service_byte != 0; // 0 -> false, non-zero -> true + + // Read `tx_number_in_block` (2 bytes, u16) + let tx_number_in_block = reader + .read_u16::() + .context("Failed to read tx_number_in_block")?; + + // Read `sender` (address is 20 bytes) + let mut sender_bytes = [0u8; 20]; + reader + .read_exact(&mut sender_bytes) + .context("Failed to read sender address")?; + let sender = Address::from(sender_bytes); + + // Read `key` (U256 is 32 bytes) + let key_bytes = _read_bytes(reader, 32).context("Failed to read key bytes")?; + let key = u256_to_h256(U256::from_big_endian(&key_bytes)); + + // Read `value` (U256 is 32 bytes) + let value_bytes = _read_bytes(reader, 32).context("Failed to read value bytes")?; + let value = u256_to_h256(U256::from_big_endian(&value_bytes)); + + Ok(L1MessengerL2ToL1Log { + l2_shard_id, + is_service, + tx_number_in_block, + sender, + key, + value, + }) + } +} + +/// Helper function to read a specific number of bytes +fn _read_bytes(reader: &mut R, num_bytes: usize) -> anyhow::Result> { + let mut buffer = vec![0u8; num_bytes]; + reader.read_exact(&mut buffer)?; + Ok(buffer) +} diff --git a/via_verifier/lib/via_musig2/Cargo.toml b/via_verifier/lib/via_musig2/Cargo.toml new file mode 100644 index 000000000..130780d80 --- /dev/null +++ b/via_verifier/lib/via_musig2/Cargo.toml @@ -0,0 +1,50 @@ +[package] +name = "via_musig2" +description = "Via Network Musig2 Wrapper" +version.workspace = true +edition.workspace = true +authors = ["Via Network"] +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +rand.workspace = true +hex.workspace = true +via_btc_client.workspace = true +anyhow.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +serde_json.workspace = true +serde.workspace = true +reqwest.workspace = true +bitcoincore-rpc = "0.19.0" +bitcoin = { version = "0.32.2", features = ["serde"] } +musig2 = "0.2.0" +secp256k1_musig2 = { package = "secp256k1", version = "0.30.0", features = [ + "rand", + "hashes", +] } +tokio = { version = "1.0", features = ["full"] } +axum = "0.6" +uuid = { version = "1.3", features = ["v4"] } +hyper = { version = "0.14", features = ["full"] } +base64 = "0.21" + + + +[[example]] +name = "key_generation_setup" +path = "examples/key_generation_setup.rs" + + +[[example]] +name = "withdrawal" +path = "examples/withdrawal.rs" + + +[[example]] +name = "coordinator" +path = "examples/coordinator.rs" diff --git a/via_verifier/lib/via_musig2/examples/coordinator.rs b/via_verifier/lib/via_musig2/examples/coordinator.rs new file mode 100644 index 000000000..90e466fec --- /dev/null +++ b/via_verifier/lib/via_musig2/examples/coordinator.rs @@ -0,0 +1,529 @@ +use std::{clone::Clone, collections::HashMap, str::FromStr, sync::Arc, time::Duration}; + +use axum::{ + extract::{Path, State}, + http::StatusCode, + routing::{get, post}, + Json, Router, +}; +use base64::Engine; +use bitcoin::{hashes::Hash, Address, Amount, Network, Txid}; +use hyper::Server; +use musig2::{BinaryEncoding, CompactSignature, PartialSignature, PubNonce}; +use rand::thread_rng; +use secp256k1_musig2::{PublicKey, Secp256k1, SecretKey}; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock; +use tracing::{info, instrument}; +use uuid::Uuid; +use via_btc_client::{ + types::BitcoinNetwork, + withdrawal_builder::{UnsignedWithdrawalTx, WithdrawalBuilder, WithdrawalRequest}, +}; +use via_musig2::{verify_signature, Signer}; + +#[derive(Clone)] +#[allow(dead_code)] +struct AppState { + signer: Arc>, + signing_sessions: Arc>>, + unsigned_txs: Arc>>, + bridge_address: Address, + all_pubkeys: Vec, + num_signers: usize, +} + +#[derive(Debug, Clone)] +#[allow(dead_code)] +struct SigningSession { + session_id: String, + tx_id: String, + received_nonces: HashMap, + received_sigs: HashMap, + final_signature: Option, + message: Vec, +} + +/// Data posted by other signers to submit their nonce +#[derive(Serialize, Deserialize, Debug)] +struct NoncePair { + signer_index: usize, + nonce: String, // Base64 encoded +} + +/// Data posted by other signers to submit their partial signature +#[derive(Serialize, Deserialize, Debug)] +struct PartialSignaturePair { + signer_index: usize, + signature: String, // Base64 encoded +} + +#[derive(Serialize, Deserialize, Clone)] +struct SigningSessionResponse { + session_id: String, + message_to_sign: String, // hex-encoded message (txid) + aggregated_pubkey: String, // hex-encoded aggregated pubkey + required_signers: usize, + received_nonces: usize, + received_partial_signatures: usize, + final_signature: Option, // hex-encoded final signature if present +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + tracing_subscriber::fmt::init(); + + // Setup coordinator keys and signers + let mut rng = thread_rng(); + let secret_key = SecretKey::new(&mut rng); + + let secp = Secp256k1::new(); + let public_key = PublicKey::from_secret_key(&secp, &secret_key); + let other_pubkey_1 = PublicKey::from_secret_key(&secp, &SecretKey::new(&mut rng)); + let other_pubkey_2 = PublicKey::from_secret_key(&secp, &SecretKey::new(&mut rng)); + + let all_pubkeys = vec![public_key, other_pubkey_1, other_pubkey_2]; + let coordinator_signer = Signer::new(secret_key, 0, all_pubkeys.clone())?; + + // Create test bridge address + let bridge_address = + Address::from_str("bcrt1pxqkh0g270lucjafgngmwv7vtgc8mk9j5y4j8fnrxm77yunuh398qfv8tqp")? + .require_network(Network::Regtest)?; + + let state = AppState { + signer: Arc::new(RwLock::new(coordinator_signer)), + signing_sessions: Arc::new(RwLock::new(HashMap::new())), + unsigned_txs: Arc::new(RwLock::new(HashMap::new())), + bridge_address, + all_pubkeys: all_pubkeys.clone(), + num_signers: 3, + }; + + // Start coordinator server in one task + let server_state = state.clone(); + let server_task = tokio::spawn(async move { + run_coordinator_server(server_state).await.unwrap(); + }); + + // Wait a bit for the server to start + tokio::time::sleep(Duration::from_secs(1)).await; + + // Create one signing session for demonstration + let session_id = create_signing_session(&state).await?; + + // Now simulate other verifiers (signer_index = 1 and 2) + // Each verifier has their own keys: + let mut rng = thread_rng(); + let verifier1_sk = SecretKey::new(&mut rng); + let verifier1_signer = Signer::new(verifier1_sk, 1, all_pubkeys.clone())?; + + let mut rng = thread_rng(); + let verifier2_sk = SecretKey::new(&mut rng); + let verifier2_signer = Signer::new(verifier2_sk, 2, all_pubkeys)?; + + // Spawn tasks for verifier polling + let verifier1_task = tokio::spawn(run_verifier_polling( + "http://0.0.0.0:3000".to_string(), + session_id.session_id.clone(), + verifier1_signer, + )); + + let verifier2_task = tokio::spawn(run_verifier_polling( + "http://0.0.0.0:3000".to_string(), + session_id.session_id.clone(), + verifier2_signer, + )); + + // Run all concurrently + let _ = tokio::join!(server_task, verifier1_task, verifier2_task); + + Ok(()) +} + +async fn run_coordinator_server(state: AppState) -> anyhow::Result<()> { + let app = Router::new() + .route("/session/new", post(create_session_handler)) + .route("/session/:id", get(get_session)) + .route("/session/:id/nonce", post(submit_nonce)) + .route("/session/:id/partial", post(submit_partial_signature)) + .route("/session/:id/signature", get(get_final_signature)) + .route("/session/:id/nonces", get(get_nonces)) + .with_state(state); + + info!("Starting coordinator server on 0.0.0.0:3000"); + Server::bind(&"0.0.0.0:3000".parse()?) + .serve(app.into_make_service()) + .await?; + + Ok(()) +} + +async fn run_verifier_polling( + base_url: String, + session_id: String, + mut signer: Signer, +) -> anyhow::Result<()> { + use reqwest::Client; + + let client = Client::new(); + + loop { + // Fetch session info + let url = format!("{}/session/{}", base_url, session_id); + let resp = client.get(&url).send().await?; + if resp.status().as_u16() == StatusCode::NOT_FOUND.as_u16() { + // Session might not exist yet, wait and retry + tokio::time::sleep(Duration::from_secs(2)).await; + continue; + } + if !resp.status().is_success() { + println!( + "Verifier polling: Error fetching session info: {:?}", + resp.text().await? + ); + tokio::time::sleep(Duration::from_secs(2)).await; + continue; + } + + let session_info: SigningSessionResponse = resp.json().await?; + if session_info.final_signature.is_some() { + println!( + "Verifier {}: Final signature obtained! {:?}", + signer.signer_index(), + session_info.final_signature + ); + break; + } + + // We need to see if we have submitted our nonce and partial signature + // If we have not submitted nonce and partial sig yet, we do so if needed: + if session_info.received_nonces < session_info.required_signers { + // We need to submit nonce if not already submitted + // Start signing session if not started: + let message = hex::decode(&session_info.message_to_sign)?; + if signer.has_not_started() { + signer.start_signing_session(message)?; + } + + if !signer.has_submitted_nonce() { + // Submit our nonce + let nonce = signer + .our_nonce() + .ok_or_else(|| anyhow::anyhow!("No nonce available"))?; + let nonce_b64 = base64::engine::general_purpose::STANDARD.encode(nonce.to_bytes()); + let nonce_pair = NoncePair { + signer_index: signer.signer_index(), + nonce: nonce_b64, + }; + let nonce_url = format!("{}/session/{}/nonce", base_url, session_id); + let resp = client.post(&nonce_url).json(&nonce_pair).send().await?; + if !resp.status().is_success() { + println!( + "Verifier {}: Error submitting nonce: {:?}", + signer.signer_index(), + resp.text().await? + ); + } else { + signer.mark_nonce_submitted(); + } + } + } else if session_info.received_partial_signatures < session_info.required_signers { + // All nonces are in, we can finalize first round and create partial signature if not done + if !signer.has_created_partial_sig() { + // We need to fetch all nonces from the coordinator + let nonces_url = format!("{}/session/{}/nonces", base_url, session_id); + let resp = client.get(&nonces_url).send().await?; + let nonces: HashMap = resp.json().await?; + + // Process each nonce + for (idx, nonce_b64) in nonces { + if idx != signer.signer_index() { + let nonce_bytes = + base64::engine::general_purpose::STANDARD.decode(nonce_b64)?; + let nonce = PubNonce::from_bytes(&nonce_bytes)?; + signer + .receive_nonce(idx, nonce.clone()) + .map_err(|e| anyhow::anyhow!("Failed to receive nonce: {}", e))?; + } + } + + let partial_sig = signer.create_partial_signature()?; + let sig_b64 = + base64::engine::general_purpose::STANDARD.encode(partial_sig.serialize()); + let sig_pair = PartialSignaturePair { + signer_index: signer.signer_index(), + signature: sig_b64, + }; + let partial_url = format!("{}/session/{}/partial", base_url, session_id); + let resp = client.post(&partial_url).json(&sig_pair).send().await?; + if !resp.status().is_success() { + println!( + "Verifier {}: Error submitting partial signature: {:?}", + signer.signer_index(), + resp.text().await? + ); + } else { + signer.mark_partial_sig_submitted(); + } + } + } else { + // waiting for final signature + } + + tokio::time::sleep(Duration::from_secs(2)).await; + } + + Ok(()) +} + +// Handler to create a new signing session for a withdrawal transaction +#[instrument(skip(state))] +async fn create_session_handler( + State(state): State, +) -> Result, StatusCode> { + create_signing_session(&state) + .await + .map(Json) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR) +} + +// GET /session/:id +#[instrument(skip(state))] +async fn get_session( + State(state): State, + Path(session_id): Path, +) -> Result, StatusCode> { + let sessions = state.signing_sessions.read().await; + let session = sessions.get(&session_id).ok_or(StatusCode::NOT_FOUND)?; + + let signer = state.signer.read().await; + let resp = SigningSessionResponse { + session_id: session.session_id.clone(), + message_to_sign: hex::encode(&session.message), + aggregated_pubkey: hex::encode(signer.aggregated_pubkey().serialize()), + required_signers: state.num_signers, + received_nonces: session.received_nonces.len(), + received_partial_signatures: session.received_sigs.len(), + final_signature: session + .final_signature + .as_ref() + .map(|sig| hex::encode(sig.serialize())), + }; + Ok(Json(resp)) +} + +// POST /session/:id/nonce +#[instrument(skip(state))] +async fn submit_nonce( + State(state): State, + Path(session_id): Path, + Json(nonce_pair): Json, +) -> Result { + let decoded_nonce = base64::engine::general_purpose::STANDARD + .decode(&nonce_pair.nonce) + .map_err(|_| StatusCode::BAD_REQUEST)?; + let pub_nonce = PubNonce::from_bytes(&decoded_nonce).map_err(|_| StatusCode::BAD_REQUEST)?; + + { + let mut sessions = state.signing_sessions.write().await; + let session = sessions.get_mut(&session_id).ok_or(StatusCode::NOT_FOUND)?; + + session + .received_nonces + .insert(nonce_pair.signer_index, pub_nonce); + + // If all nonces are collected, coordinator finalizes and create partial sig + if session.received_nonces.len() == state.num_signers { + let mut signer = state.signer.write().await; + for (&i, nonce) in &session.received_nonces { + if i != 0 { + signer + .receive_nonce(i, nonce.clone()) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + } + } + let partial_sig = signer + .create_partial_signature() + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + session.received_sigs.insert(0, partial_sig); + } + } + + Ok(StatusCode::OK) +} + +// POST /session/:id/partial +#[instrument(skip(state))] +async fn submit_partial_signature( + State(state): State, + Path(session_id): Path, + Json(sig_pair): Json, +) -> Result { + let decoded_sig = base64::engine::general_purpose::STANDARD + .decode(&sig_pair.signature) + .map_err(|_| StatusCode::BAD_REQUEST)?; + let partial_sig = + PartialSignature::from_slice(&decoded_sig).map_err(|_| StatusCode::BAD_REQUEST)?; + + { + let mut sessions = state.signing_sessions.write().await; + let session = sessions.get_mut(&session_id).ok_or(StatusCode::NOT_FOUND)?; + + session + .received_sigs + .insert(sig_pair.signer_index, partial_sig); + + if session.received_sigs.len() == state.num_signers { + let mut signer = state.signer.write().await; + for (&i, psig) in &session.received_sigs { + if i != 0 { + signer + .receive_partial_signature(i, *psig) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + } + } + + let final_sig = signer + .create_final_signature() + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + session.final_signature = Some(final_sig); + + // Verify final sig + let agg_pub = signer.aggregated_pubkey(); + verify_signature(agg_pub, final_sig, &session.message) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + } + } + + Ok(StatusCode::OK) +} + +// GET /session/:id/signature +#[instrument(skip(state))] +async fn get_final_signature( + State(state): State, + Path(session_id): Path, +) -> Result, StatusCode> { + let sessions = state.signing_sessions.read().await; + let session = sessions.get(&session_id).ok_or(StatusCode::NOT_FOUND)?; + + if let Some(final_sig) = &session.final_signature { + Ok(Json(hex::encode(final_sig.serialize()))) + } else { + Err(StatusCode::NOT_FOUND) + } +} + +async fn create_signing_session(state: &AppState) -> anyhow::Result { + let unsigned_tx = { + let withdrawal_builder = create_test_withdrawal_builder(&state.bridge_address).await?; + let withdrawals = create_test_withdrawal_requests()?; + let proof_txid = Txid::hash(&[0x42; 32]); + + withdrawal_builder + .create_unsigned_withdrawal_tx(withdrawals, proof_txid) + .await? + }; + + // Create unique session ID + let session_id = Uuid::new_v4().to_string(); + let tx_id = unsigned_tx.txid.to_string(); + + { + let mut utxos = state.unsigned_txs.write().await; + utxos.insert(tx_id.clone(), unsigned_tx.clone()); + } + + // TODO: extract sighash and sign it and broadcast in last step + let message = unsigned_tx.tx.compute_txid().as_byte_array().to_vec(); + { + let mut signer = state.signer.write().await; + signer.start_signing_session(message.clone())?; + } + + let session = SigningSession { + session_id: session_id.clone(), + tx_id, + received_nonces: HashMap::new(), + received_sigs: HashMap::new(), + final_signature: None, + message: message.clone(), + }; + + { + let mut sessions = state.signing_sessions.write().await; + sessions.insert(session_id.clone(), session); + } + + // Coordinator is signer_index 0, so insert coordinator's nonce: + { + #[allow(unused_mut)] + let mut signer = state.signer.write().await; + let coordinator_nonce = signer.our_nonce().expect("nonce should be generated"); + let mut sessions = state.signing_sessions.write().await; + let session = sessions.get_mut(&session_id).unwrap(); + session.received_nonces.insert(0, coordinator_nonce); + } + + let signer = state.signer.read().await; + Ok(SigningSessionResponse { + session_id, + message_to_sign: hex::encode(message), + aggregated_pubkey: hex::encode(signer.aggregated_pubkey().serialize()), + required_signers: state.num_signers, + received_nonces: 1, + received_partial_signatures: 0, + final_signature: None, + }) +} + +// Mock a WithdrawalBuilder +async fn create_test_withdrawal_builder( + bridge_address: &Address, +) -> anyhow::Result { + let rpc_url = "http://localhost:18443"; + let network = BitcoinNetwork::Regtest; + let auth = bitcoincore_rpc::Auth::None; + let builder = WithdrawalBuilder::new(rpc_url, network, auth, bridge_address.clone()).await?; + Ok(builder) +} + +// Mock withdrawal requests +fn create_test_withdrawal_requests() -> anyhow::Result> { + // Just create two withdrawal requests for demonstration + let addr1 = + Address::from_str("bcrt1pv6dtdf0vrrj6ntas926v8vw9u0j3mga29vmfnxh39zfxya83p89qz9ze3l")? + .require_network(Network::Regtest)?; + let addr2 = Address::from_str("bcrt1qxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyz0abcd")? + .require_network(Network::Regtest)?; + + let requests = vec![ + WithdrawalRequest { + address: addr1, + amount: Amount::from_btc(0.1)?, + }, + WithdrawalRequest { + address: addr2, + amount: Amount::from_btc(0.05)?, + }, + ]; + Ok(requests) +} + +async fn get_nonces( + State(state): State, + Path(session_id): Path, +) -> Result>, StatusCode> { + let sessions = state.signing_sessions.read().await; + let session = sessions.get(&session_id).ok_or(StatusCode::NOT_FOUND)?; + + let mut nonces = HashMap::new(); + for (&idx, nonce) in &session.received_nonces { + nonces.insert( + idx, + base64::engine::general_purpose::STANDARD.encode(nonce.to_bytes()), + ); + } + + Ok(Json(nonces)) +} diff --git a/via_verifier/lib/via_musig2/examples/key_generation_setup.rs b/via_verifier/lib/via_musig2/examples/key_generation_setup.rs new file mode 100644 index 000000000..6c79d334c --- /dev/null +++ b/via_verifier/lib/via_musig2/examples/key_generation_setup.rs @@ -0,0 +1,100 @@ +use std::{env, str::FromStr}; + +use bitcoin::{Address as BitcoinAddress, Network}; +use musig2::KeyAggContext; +use rand::rngs::OsRng; +use secp256k1_musig2::{PublicKey, Secp256k1, SecretKey}; + +#[derive(Debug)] +#[allow(dead_code)] +struct ContributorOutput { + secret_key: Option, + public_key: PublicKey, +} + +#[derive(Debug)] +#[allow(dead_code)] +struct CoordinatorOutput { + participant_count: usize, + bridge_address: BitcoinAddress, +} + +fn generate_keypair() -> (SecretKey, PublicKey) { + let mut rng = OsRng; + let secp = Secp256k1::new(); + let secret_key = SecretKey::new(&mut rng); + let public_key = PublicKey::from_secret_key(&secp, &secret_key); + (secret_key, public_key) +} + +fn create_bridge_address( + pubkeys: Vec, +) -> Result> { + let secp = bitcoin::secp256k1::Secp256k1::new(); + + let musig_key_agg_cache = KeyAggContext::new(pubkeys)?; + + let agg_pubkey = musig_key_agg_cache.aggregated_pubkey::(); + let (xonly_agg_key, _) = agg_pubkey.x_only_public_key(); + + // Convert to bitcoin XOnlyPublicKey first + let internal_key = bitcoin::XOnlyPublicKey::from_slice(&xonly_agg_key.serialize())?; + + // Use internal_key for address creation + let address = BitcoinAddress::p2tr(&secp, internal_key, None, Network::Regtest); + + Ok(address) +} + +fn main() -> Result<(), Box> { + let args: Vec = env::args().collect(); + + if args.len() < 2 { + return Err( + "Usage: contributor [optional_public_key] OR coordinator public_key1 public_key2 ..." + .into(), + ); + } + + match args[1].as_str() { + "contributor" => { + let output = if args.len() > 2 { + // Use provided public key + let public_key = PublicKey::from_str(&args[2])?; + ContributorOutput { + secret_key: None, + public_key, + } + } else { + // Generate new keypair + let (secret_key, public_key) = generate_keypair(); + ContributorOutput { + secret_key: Some(secret_key), + public_key, + } + }; + println!("{:?}", output); + Ok(()) + } + "coordinator" => { + if args.len() <= 2 { + return Err("Error: Coordinator needs at least one public key".into()); + } + + let mut pubkeys = Vec::new(); + for pubkey_str in args.iter().skip(2) { + let public_key = PublicKey::from_str(pubkey_str).unwrap(); + pubkeys.push(public_key); + } + + let bridge_address = create_bridge_address(pubkeys)?; + let output = CoordinatorOutput { + participant_count: args.len() - 2, + bridge_address, + }; + println!("{:?}", output); + Ok(()) + } + _ => Err("Invalid role. Use 'contributor' or 'coordinator'".into()), + } +} diff --git a/via_verifier/lib/via_musig2/examples/withdrawal.rs b/via_verifier/lib/via_musig2/examples/withdrawal.rs new file mode 100644 index 000000000..3576159fe --- /dev/null +++ b/via_verifier/lib/via_musig2/examples/withdrawal.rs @@ -0,0 +1,251 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! Demonstrate creating a transaction that spends to and from p2tr outputs with musig2. + +use std::str::FromStr; + +use bitcoin::{ + hashes::Hash, + key::Keypair, + locktime::absolute, + secp256k1::{Secp256k1, SecretKey}, + sighash::{Prevouts, SighashCache, TapSighashType}, + transaction, Address, Amount, Network, PrivateKey, ScriptBuf, Sequence, TapTweakHash, + Transaction, TxIn, TxOut, Witness, +}; +use musig2::{secp::Scalar, KeyAggContext}; +use rand::Rng; +use secp256k1_musig2::schnorr::Signature; +use via_btc_client::{inscriber::Inscriber, types::NodeAuth}; + +const RPC_URL: &str = "http://0.0.0.0:18443"; +const RPC_USERNAME: &str = "rpcuser"; +const RPC_PASSWORD: &str = "rpcpassword"; +const NETWORK: Network = Network::Regtest; +const PK: &str = "cRaUbRSn8P8cXUcg6cMZ7oTZ1wbDjktYTsbdGw62tuqqD9ttQWMm"; +const SPEND_AMOUNT: Amount = Amount::from_sat(5_000_000); + +#[tokio::main] +async fn main() -> Result<(), Box> { + let secp = Secp256k1::new(); + + // Get a keypair we control. In a real application these would come from a stored secret. + let private_key_1 = + PrivateKey::from_wif("cVZduZu265sWeAqFYygoDEE1FZ7wV9rpW5qdqjRkUehjaUMWLT1R").unwrap(); + + let private_key_2 = + PrivateKey::from_wif("cUWA5dZXc6NwLovW3Kr9DykfY5ysFigKZM5Annzty7J8a43Fe2YF").unwrap(); + + let private_key_3 = + PrivateKey::from_wif("cRaUbRSn8P8cXUcg6cMZ7oTZ1wbDjktYTsbdGw62tuqqD9ttQWMm").unwrap(); + + let secret_key_1 = SecretKey::from_slice(&private_key_1.inner.secret_bytes()).unwrap(); + let secret_key_2 = SecretKey::from_slice(&private_key_2.inner.secret_bytes()).unwrap(); + let secret_key_3 = SecretKey::from_slice(&private_key_3.inner.secret_bytes()).unwrap(); + + let keypair_1 = Keypair::from_secret_key(&secp, &secret_key_1); + let keypair_2 = Keypair::from_secret_key(&secp, &secret_key_2); + let keypair_3 = Keypair::from_secret_key(&secp, &secret_key_3); + + let (internal_key_1, parity_1) = keypair_1.x_only_public_key(); + let (internal_key_2, parity_2) = keypair_2.x_only_public_key(); + let (internal_key_3, parity_3) = keypair_3.x_only_public_key(); + + // ------------------------------------------- + // Key aggregation (MuSig2) + // ------------------------------------------- + let pubkeys = vec![ + musig2::secp256k1::PublicKey::from_slice(&internal_key_1.public_key(parity_1).serialize()) + .unwrap(), + musig2::secp256k1::PublicKey::from_slice(&internal_key_2.public_key(parity_2).serialize()) + .unwrap(), + musig2::secp256k1::PublicKey::from_slice(&internal_key_3.public_key(parity_3).serialize()) + .unwrap(), + ]; + + let mut musig_key_agg_cache = KeyAggContext::new(pubkeys)?; + + let agg_pubkey = musig_key_agg_cache.aggregated_pubkey::(); + let (xonly_agg_key, _) = agg_pubkey.x_only_public_key(); + + // Convert to bitcoin XOnlyPublicKey first + let internal_key = bitcoin::XOnlyPublicKey::from_slice(&xonly_agg_key.serialize())?; + + // Calculate taproot tweak + let tap_tweak = TapTweakHash::from_key_and_tweak(internal_key, None); + let tweak = tap_tweak.to_scalar(); + let tweak_bytes = tweak.to_be_bytes(); + let tweak = secp256k1_musig2::Scalar::from_be_bytes(tweak_bytes).unwrap(); + + // Apply tweak to the key aggregation context before signing + musig_key_agg_cache = musig_key_agg_cache.with_xonly_tweak(tweak)?; + + // Use internal_key for address creation + let address = Address::p2tr(&secp, internal_key, None, NETWORK); + + println!("address: {}", address); + + // ------------------------------------------- + // Connect to Bitcoin node (adjust RPC credentials and URL) + // ------------------------------------------- + let inscriber = Inscriber::new( + RPC_URL, + NETWORK, + NodeAuth::UserPass(RPC_USERNAME.to_string(), RPC_PASSWORD.to_string()), + PK, + None, + ) + .await?; + let client = inscriber.get_client().await; + + // ------------------------------------------- + // Fetching UTXOs from node + // ------------------------------------------- + let utxos = client.fetch_utxos(&address).await?; + + // Get an unspent output that is locked to the key above that we control. + // In a real application these would come from the chain. + let (dummy_out_point, dummy_utxo) = utxos[0].clone(); + + let change_amount = dummy_utxo.value - SPEND_AMOUNT - Amount::from_sat(1000); + + // Get an address to send to. + let address = receivers_address(); + + // The input for the transaction we are constructing. + let input = TxIn { + previous_output: dummy_out_point, // The dummy output we are spending. + script_sig: ScriptBuf::default(), // For a p2tr script_sig is empty. + sequence: Sequence::ENABLE_RBF_NO_LOCKTIME, + witness: Witness::default(), // Filled in after signing. + }; + + // The spend output is locked to a key controlled by the receiver. + let spend = TxOut { + value: SPEND_AMOUNT, + script_pubkey: address.script_pubkey(), + }; + + // The change output is locked to a key controlled by us. + let change = TxOut { + value: change_amount, + script_pubkey: ScriptBuf::new_p2tr(&secp, internal_key, None), // Change comes back to us. + }; + + // The transaction we want to sign and broadcast. + let mut unsigned_tx = Transaction { + version: transaction::Version::TWO, // Post BIP-68. + lock_time: absolute::LockTime::ZERO, // Ignore the locktime. + input: vec![input], // Input goes into index 0. + output: vec![spend, change], // Outputs, order does not matter. + }; + let input_index = 0; + + // Get the sighash to sign. + let sighash_type = TapSighashType::Default; + let prevouts = vec![dummy_utxo]; + let prevouts = Prevouts::All(&prevouts); + + let mut sighasher = SighashCache::new(&mut unsigned_tx); + let sighash = sighasher + .taproot_key_spend_signature_hash(input_index, &prevouts, sighash_type) + .expect("failed to construct sighash"); + + // ------------------------------------------- + // MuSig2 Signing Process + // ------------------------------------------- + use musig2::{FirstRound, SecNonceSpices}; + use rand::thread_rng; + + // Convert bitcoin::SecretKey to musig2::SecretKey for each participant + let secret_key_1 = musig2::secp256k1::SecretKey::from_slice(&secret_key_1[..]).unwrap(); + let secret_key_2 = musig2::secp256k1::SecretKey::from_slice(&secret_key_2[..]).unwrap(); + let secret_key_3 = musig2::secp256k1::SecretKey::from_slice(&secret_key_3[..]).unwrap(); + + // First round: Generate nonces + let mut first_round_1 = FirstRound::new( + musig_key_agg_cache.clone(), // Use tweaked context + thread_rng().gen::<[u8; 32]>(), + 0, + SecNonceSpices::new() + .with_seckey(secret_key_1) + .with_message(&sighash.to_byte_array()), + )?; + + let mut first_round_2 = FirstRound::new( + musig_key_agg_cache.clone(), + thread_rng().gen::<[u8; 32]>(), + 1, + SecNonceSpices::new() + .with_seckey(secret_key_2) + .with_message(&sighash.to_byte_array()), + )?; + + let mut first_round_3 = FirstRound::new( + musig_key_agg_cache.clone(), + thread_rng().gen::<[u8; 32]>(), + 2, + SecNonceSpices::new() + .with_seckey(secret_key_3) + .with_message(&sighash.to_byte_array()), + )?; + + // Exchange nonces + let nonce_1 = first_round_1.our_public_nonce(); + let nonce_2 = first_round_2.our_public_nonce(); + let nonce_3 = first_round_3.our_public_nonce(); + + first_round_1.receive_nonce(1, nonce_2.clone())?; + first_round_1.receive_nonce(2, nonce_3.clone())?; + first_round_2.receive_nonce(0, nonce_1.clone())?; + first_round_2.receive_nonce(2, nonce_3.clone())?; + first_round_3.receive_nonce(0, nonce_1.clone())?; + first_round_3.receive_nonce(1, nonce_2.clone())?; + + // Second round: Create partial signatures + let binding = sighash.to_byte_array(); + let mut second_round_1 = first_round_1.finalize(secret_key_1, &binding)?; + let second_round_2 = first_round_2.finalize(secret_key_2, &binding)?; + let second_round_3 = first_round_3.finalize(secret_key_3, &binding)?; + // Combine partial signatures + let partial_sig_2: [u8; 32] = second_round_2.our_signature(); + let partial_sig_3: [u8; 32] = second_round_3.our_signature(); + + second_round_1.receive_signature(1, Scalar::from_slice(&partial_sig_2).unwrap())?; + second_round_1.receive_signature(2, Scalar::from_slice(&partial_sig_3).unwrap())?; + + let final_signature: Signature = second_round_1.finalize()?; + + // Update the witness stack with the aggregated signature + let signature = bitcoin::taproot::Signature { + signature: bitcoin::secp256k1::schnorr::Signature::from_slice( + &final_signature.to_byte_array(), + )?, + sighash_type, + }; + *sighasher.witness_mut(input_index).unwrap() = Witness::p2tr_key_spend(&signature); + + // Get the signed transaction + let tx = sighasher.into_transaction(); + + // BOOM! Transaction signed and ready to broadcast. + println!("{:#?}", tx); + + let tx_hex = bitcoin::consensus::encode::serialize_hex(&tx); + let res = client.broadcast_signed_transaction(&tx_hex).await?; + println!("res: {:?}", res); + + Ok(()) +} + +/// A dummy address for the receiver. +/// +/// We lock the spend output to the key associated with this address. +/// +/// (FWIW this is an arbitrary mainnet address from block 805222.) +fn receivers_address() -> Address { + Address::from_str("bc1p0dq0tzg2r780hldthn5mrznmpxsxc0jux5f20fwj0z3wqxxk6fpqm7q0va") + .expect("a valid address") + .require_network(Network::Bitcoin) + .expect("valid address for mainnet") +} diff --git a/via_verifier/lib/via_musig2/src/lib.rs b/via_verifier/lib/via_musig2/src/lib.rs new file mode 100644 index 000000000..91617db46 --- /dev/null +++ b/via_verifier/lib/via_musig2/src/lib.rs @@ -0,0 +1,300 @@ +use std::fmt; + +use bitcoin::TapTweakHash; +use musig2::{ + verify_single, CompactSignature, FirstRound, KeyAggContext, PartialSignature, PubNonce, + SecNonceSpices, SecondRound, +}; +use secp256k1_musig2::{PublicKey, Secp256k1, SecretKey}; + +#[derive(Debug)] +pub enum MusigError { + Musig2Error(String), + InvalidSignerIndex, + MissingNonces, + MissingPartialSignatures, + InvalidState(String), +} + +impl fmt::Display for MusigError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + MusigError::Musig2Error(e) => write!(f, "MuSig2 error: {}", e), + MusigError::InvalidSignerIndex => write!(f, "Invalid signer index"), + MusigError::MissingNonces => write!(f, "Missing required nonces"), + MusigError::MissingPartialSignatures => { + write!(f, "Missing required partial signatures") + } + MusigError::InvalidState(s) => write!(f, "Invalid state: {}", s), + } + } +} + +impl std::error::Error for MusigError {} + +/// Represents a single signer in the MuSig2 protocol +pub struct Signer { + secret_key: SecretKey, + public_key: PublicKey, + signer_index: usize, + key_agg_ctx: KeyAggContext, + first_round: Option, + second_round: Option>>, + message: Vec, + nonce_submitted: bool, + partial_sig_submitted: bool, +} + +impl fmt::Debug for Signer { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Signer") + .field("public_key", &self.public_key) + .field("signer_index", &self.signer_index) + .field("key_agg_ctx", &self.key_agg_ctx) + .field("message", &self.message) + .field("nonce_submitted", &self.nonce_submitted) + .field("partial_sig_submitted", &self.partial_sig_submitted) + .finish() + } +} + +impl Signer { + /// Create a new signer with the given secret key and index + pub fn new( + secret_key: SecretKey, + signer_index: usize, + all_pubkeys: Vec, + ) -> Result { + let secp = Secp256k1::new(); + let public_key = PublicKey::from_secret_key(&secp, &secret_key); + + // Verify that signer_index is valid and matches the public key + if signer_index >= all_pubkeys.len() { + return Err(MusigError::InvalidSignerIndex); + } + if all_pubkeys[signer_index] != public_key { + return Err(MusigError::Musig2Error( + "Public key at signer_index does not match derived public key".into(), + )); + } + + let mut musig_key_agg_cache = + KeyAggContext::new(all_pubkeys).map_err(|e| MusigError::Musig2Error(e.to_string()))?; + + let agg_pubkey = musig_key_agg_cache.aggregated_pubkey::(); + let (xonly_agg_key, _) = agg_pubkey.x_only_public_key(); + + // Convert to bitcoin XOnlyPublicKey first + let internal_key = bitcoin::XOnlyPublicKey::from_slice(&xonly_agg_key.serialize()) + .map_err(|e| { + MusigError::Musig2Error(format!( + "Failed to convert to bitcoin XOnlyPublicKey: {}", + e + )) + })?; + + // Calculate taproot tweak + let tap_tweak = TapTweakHash::from_key_and_tweak(internal_key, None); + let tweak = tap_tweak.to_scalar(); + let tweak_bytes = tweak.to_be_bytes(); + let musig2_compatible_tweak = secp256k1_musig2::Scalar::from_be_bytes(tweak_bytes).unwrap(); + // Apply tweak to the key aggregation context before signing + musig_key_agg_cache = musig_key_agg_cache + .with_xonly_tweak(musig2_compatible_tweak) + .map_err(|e| MusigError::Musig2Error(format!("Failed to apply tweak: {}", e)))?; + + Ok(Self { + secret_key, + public_key, + signer_index, + key_agg_ctx: musig_key_agg_cache, + first_round: None, + second_round: None, + message: Vec::new(), + nonce_submitted: false, + partial_sig_submitted: false, + }) + } + + /// Get the aggregated public key for all signers + pub fn aggregated_pubkey(&self) -> PublicKey { + self.key_agg_ctx.aggregated_pubkey() + } + + /// Start the signing session with a message + pub fn start_signing_session(&mut self, message: Vec) -> Result { + self.message = message.clone(); + + let msg_array = message.as_slice(); + + let first_round = FirstRound::new( + self.key_agg_ctx.clone(), + rand::random::<[u8; 32]>(), + self.signer_index, + SecNonceSpices::new() + .with_seckey(self.secret_key) + .with_message(&msg_array), + ) + .map_err(|e| MusigError::Musig2Error(e.to_string()))?; + + let nonce = first_round.our_public_nonce(); + self.first_round = Some(first_round); + Ok(nonce) + } + + /// Receive a nonce from another participant + pub fn receive_nonce( + &mut self, + signer_index: usize, + nonce: PubNonce, + ) -> Result<(), MusigError> { + let first_round = self + .first_round + .as_mut() + .ok_or_else(|| MusigError::InvalidState("First round not initialized".into()))?; + + first_round + .receive_nonce(signer_index, nonce) + .map_err(|e| MusigError::Musig2Error(e.to_string()))?; + Ok(()) + } + + /// Create partial signature + pub fn create_partial_signature(&mut self) -> Result { + let msg_array = self.message.clone(); + + let first_round = self + .first_round + .take() + .ok_or_else(|| MusigError::InvalidState("First round not initialized".into()))?; + + let second_round = first_round + .finalize(self.secret_key, msg_array) + .map_err(|e| MusigError::Musig2Error(e.to_string()))?; + + let partial_sig = second_round.our_signature(); + self.second_round = Some(second_round); + Ok(partial_sig) + } + + /// Receive partial signature from another signer + pub fn receive_partial_signature( + &mut self, + signer_index: usize, + partial_sig: PartialSignature, + ) -> Result<(), MusigError> { + let second_round = self + .second_round + .as_mut() + .ok_or_else(|| MusigError::InvalidState("Second round not initialized".into()))?; + + second_round + .receive_signature(signer_index, partial_sig) + .map_err(|e| MusigError::Musig2Error(e.to_string()))?; + Ok(()) + } + + /// Create final signature + pub fn create_final_signature(&mut self) -> Result { + let second_round = self + .second_round + .take() + .ok_or_else(|| MusigError::InvalidState("Second round not initialized".into()))?; + + second_round + .finalize() + .map_err(|e| MusigError::Musig2Error(e.to_string())) + } + + pub fn signer_index(&self) -> usize { + self.signer_index + } + + pub fn has_not_started(&self) -> bool { + self.first_round.is_none() + } + + pub fn has_submitted_nonce(&self) -> bool { + self.nonce_submitted + } + + pub fn mark_nonce_submitted(&mut self) { + self.nonce_submitted = true; + } + + pub fn our_nonce(&self) -> Option { + self.first_round + .as_ref() + .map(|round| round.our_public_nonce()) + } + + pub fn has_created_partial_sig(&self) -> bool { + self.second_round.is_some() + } + + pub fn mark_partial_sig_submitted(&mut self) { + self.partial_sig_submitted = true; + } +} + +/// Helper function to verify a complete signature +pub fn verify_signature( + pubkey: PublicKey, + signature: CompactSignature, + message: &[u8], +) -> Result<(), MusigError> { + verify_single(pubkey, signature, message).map_err(|e| MusigError::Musig2Error(e.to_string())) +} + +#[cfg(test)] +mod tests { + use rand::rngs::OsRng; + + use super::*; + + #[test] + fn test_signer_lifecycle() -> Result<(), MusigError> { + let mut rng = OsRng; + let secret_key_1 = SecretKey::new(&mut rng); + let secret_key_2 = SecretKey::new(&mut rng); + + let secp = Secp256k1::new(); + let public_key_1 = PublicKey::from_secret_key(&secp, &secret_key_1); + let public_key_2 = PublicKey::from_secret_key(&secp, &secret_key_2); + + let pubkeys = vec![public_key_1, public_key_2]; + + let mut signer1 = Signer::new(secret_key_1, 0, pubkeys.clone())?; + let mut signer2 = Signer::new(secret_key_2, 1, pubkeys)?; + + // Generate and exchange nonces + let message = b"test message".to_vec(); + let nonce1 = signer1.start_signing_session(message.clone())?; + let nonce2 = signer2.start_signing_session(message.clone())?; + + signer1.receive_nonce(1, nonce2)?; + signer2.receive_nonce(0, nonce1)?; + + // Create partial signatures + let partial_sig1 = signer1.create_partial_signature()?; + let partial_sig2 = signer2.create_partial_signature()?; + + // Exchange partial signatures + signer1.receive_partial_signature(1, partial_sig2)?; + signer2.receive_partial_signature(0, partial_sig1)?; + + // Create final signatures + let final_sig1 = signer1.create_final_signature()?; + let final_sig2 = signer2.create_final_signature()?; + + assert_eq!( + final_sig1.serialize(), + final_sig2.serialize(), + "Final signatures should match" + ); + // Verify the signature + verify_signature(signer1.aggregated_pubkey(), final_sig1, &message)?; + + Ok(()) + } +} diff --git a/via_verifier/lib/via_verification/Cargo.toml b/via_verifier/lib/via_verification/Cargo.toml new file mode 100644 index 000000000..71d8c22bd --- /dev/null +++ b/via_verifier/lib/via_verification/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "via_verification" +version = "0.1.0" +edition = "2021" + +[dependencies] +circuit_definitions = {git = "https://github.com/matter-labs/zksync-protocol.git", tag = "v0.150.16"} + +zksync_types.workspace = true + +hex.workspace = true +serde_json.workspace = true +serde.workspace = true +clap = { workspace = true, features = ["derive"]} +anyhow.workspace = true +reqwest = { workspace = true, features = ["json"]} +tokio.workspace = true +ethers = {version = "1"} +sha3.workspace = true +once_cell.workspace = true +primitive-types = "0.12.2" +tracing.workspace = true +tracing-subscriber.workspace = true +thiserror.workspace = true +async-trait.workspace = true + diff --git a/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/abis/IVerifier.json b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/abis/IVerifier.json new file mode 100644 index 000000000..2685bb996 --- /dev/null +++ b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/abis/IVerifier.json @@ -0,0 +1,44 @@ +[ + { + "inputs": [], + "name": "verificationKeyHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "vkHash", + "type": "bytes32" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256[]", + "name": "", + "type": "uint256[]" + }, + { + "internalType": "uint256[]", + "name": "", + "type": "uint256[]" + }, + { + "internalType": "uint256[]", + "name": "", + "type": "uint256[]" + } + ], + "name": "verify", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + } +] diff --git a/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/abis/IZkSync.json b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/abis/IZkSync.json new file mode 100644 index 000000000..07096eb84 --- /dev/null +++ b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/abis/IZkSync.json @@ -0,0 +1,2384 @@ +[ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "batchNumber", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "name": "BlockCommit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "batchNumber", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "name": "BlockExecution", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "totalBatchesCommitted", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalBatchesVerified", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalBatchesExecuted", + "type": "uint256" + } + ], + "name": "BlocksRevert", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "previousLastVerifiedBatch", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "currentLastVerifiedBatch", + "type": "uint256" + } + ], + "name": "BlocksVerification", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "indexed": false, + "internalType": "struct Diamond.DiamondCutData", + "name": "diamondCut", + "type": "tuple" + } + ], + "name": "ExecuteUpgrade", + "type": "event" + }, + { + "anonymous": false, + "inputs": [], + "name": "Freeze", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "bool", + "name": "isPorterAvailable", + "type": "bool" + } + ], + "name": "IsPorterAvailableStatusUpdate", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "oldAdmin", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newAdmin", + "type": "address" + } + ], + "name": "NewAdmin", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint128", + "name": "oldNominator", + "type": "uint128" + }, + { + "indexed": false, + "internalType": "uint128", + "name": "oldDenominator", + "type": "uint128" + }, + { + "indexed": false, + "internalType": "uint128", + "name": "newNominator", + "type": "uint128" + }, + { + "indexed": false, + "internalType": "uint128", + "name": "newDenominator", + "type": "uint128" + } + ], + "name": "NewBaseTokenMultiplier", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "components": [ + { + "internalType": "enum PubdataPricingMode", + "name": "pubdataPricingMode", + "type": "uint8" + }, + { + "internalType": "uint32", + "name": "batchOverheadL1Gas", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "maxPubdataPerBatch", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "maxL2GasPerBatch", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "priorityTxMaxPubdata", + "type": "uint32" + }, + { + "internalType": "uint64", + "name": "minimalL2GasPrice", + "type": "uint64" + } + ], + "indexed": false, + "internalType": "struct FeeParams", + "name": "oldFeeParams", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "enum PubdataPricingMode", + "name": "pubdataPricingMode", + "type": "uint8" + }, + { + "internalType": "uint32", + "name": "batchOverheadL1Gas", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "maxPubdataPerBatch", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "maxL2GasPerBatch", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "priorityTxMaxPubdata", + "type": "uint32" + }, + { + "internalType": "uint64", + "name": "minimalL2GasPrice", + "type": "uint64" + } + ], + "indexed": false, + "internalType": "struct FeeParams", + "name": "newFeeParams", + "type": "tuple" + } + ], + "name": "NewFeeParams", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "oldPendingAdmin", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newPendingAdmin", + "type": "address" + } + ], + "name": "NewPendingAdmin", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "txId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes32", + "name": "txHash", + "type": "bytes32" + }, + { + "indexed": false, + "internalType": "uint64", + "name": "expirationTimestamp", + "type": "uint64" + }, + { + "components": [ + { + "internalType": "uint256", + "name": "txType", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "from", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "to", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxPriorityFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "paymaster", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "nonce", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "uint256[4]", + "name": "reserved", + "type": "uint256[4]" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "signature", + "type": "bytes" + }, + { + "internalType": "uint256[]", + "name": "factoryDeps", + "type": "uint256[]" + }, + { + "internalType": "bytes", + "name": "paymasterInput", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "reservedDynamic", + "type": "bytes" + } + ], + "indexed": false, + "internalType": "struct L2CanonicalTransaction", + "name": "transaction", + "type": "tuple" + }, + { + "indexed": false, + "internalType": "bytes[]", + "name": "factoryDeps", + "type": "bytes[]" + } + ], + "name": "NewPriorityRequest", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "oldPriorityTxMaxGasLimit", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "newPriorityTxMaxGasLimit", + "type": "uint256" + } + ], + "name": "NewPriorityTxMaxGasLimit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "oldTransactionFilterer", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "newTransactionFilterer", + "type": "address" + } + ], + "name": "NewTransactionFilterer", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "indexed": false, + "internalType": "struct Diamond.DiamondCutData", + "name": "diamondCut", + "type": "tuple" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes32", + "name": "proposalSalt", + "type": "bytes32" + } + ], + "name": "ProposeTransparentUpgrade", + "type": "event" + }, + { + "anonymous": false, + "inputs": [], + "name": "Unfreeze", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "validatorAddress", + "type": "address" + }, + { + "indexed": false, + "internalType": "bool", + "name": "isActive", + "type": "bool" + } + ], + "name": "ValidatorStatusUpdate", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "enum PubdataPricingMode", + "name": "validiumMode", + "type": "uint8" + } + ], + "name": "ValidiumModeStatusUpdate", + "type": "event" + }, + { + "inputs": [], + "name": "acceptAdmin", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "baseTokenGasPriceMultiplierDenominator", + "outputs": [ + { + "internalType": "uint128", + "name": "", + "type": "uint128" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "baseTokenGasPriceMultiplierNominator", + "outputs": [ + { + "internalType": "uint128", + "name": "", + "type": "uint128" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "address", + "name": "contractL2", + "type": "address" + }, + { + "internalType": "uint256", + "name": "mintValue", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "l2Value", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "l2Calldata", + "type": "bytes" + }, + { + "internalType": "uint256", + "name": "l2GasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "l2GasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "bytes[]", + "name": "factoryDeps", + "type": "bytes[]" + }, + { + "internalType": "address", + "name": "refundRecipient", + "type": "address" + } + ], + "internalType": "struct BridgehubL2TransactionRequest", + "name": "_request", + "type": "tuple" + } + ], + "name": "bridgehubRequestL2Transaction", + "outputs": [ + { + "internalType": "bytes32", + "name": "canonicalTxHash", + "type": "bytes32" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "enum PubdataPricingMode", + "name": "pubdataPricingMode", + "type": "uint8" + }, + { + "internalType": "uint32", + "name": "batchOverheadL1Gas", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "maxPubdataPerBatch", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "maxL2GasPerBatch", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "priorityTxMaxPubdata", + "type": "uint32" + }, + { + "internalType": "uint64", + "name": "minimalL2GasPrice", + "type": "uint64" + } + ], + "internalType": "struct FeeParams", + "name": "_newFeeParams", + "type": "tuple" + } + ], + "name": "changeFeeParams", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo", + "name": "_lastCommittedBatchData", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "timestamp", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "newStateRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "bootloaderHeapInitialContentsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "eventsQueueStateHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "systemLogs", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "pubdataCommitments", + "type": "bytes" + } + ], + "internalType": "struct IExecutor.CommitBatchInfo[]", + "name": "_newBatchesData", + "type": "tuple[]" + } + ], + "name": "commitBatches", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_chainId", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo", + "name": "_lastCommittedBatchData", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "timestamp", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "newStateRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "bootloaderHeapInitialContentsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "eventsQueueStateHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "systemLogs", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "pubdataCommitments", + "type": "bytes" + } + ], + "internalType": "struct IExecutor.CommitBatchInfo[]", + "name": "_newBatchesData", + "type": "tuple[]" + } + ], + "name": "commitBatchesSharedBridge", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo[]", + "name": "_batchesData", + "type": "tuple[]" + } + ], + "name": "executeBatches", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_chainId", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo[]", + "name": "_batchesData", + "type": "tuple[]" + } + ], + "name": "executeBatchesSharedBridge", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "internalType": "struct Diamond.DiamondCutData", + "name": "_diamondCut", + "type": "tuple" + } + ], + "name": "executeUpgrade", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "_selector", + "type": "bytes4" + } + ], + "name": "facetAddress", + "outputs": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "facetAddresses", + "outputs": [ + { + "internalType": "address[]", + "name": "facets", + "type": "address[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_facet", + "type": "address" + } + ], + "name": "facetFunctionSelectors", + "outputs": [ + { + "internalType": "bytes4[]", + "name": "", + "type": "bytes4[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "facets", + "outputs": [ + { + "components": [ + { + "internalType": "address", + "name": "addr", + "type": "address" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct IGetters.Facet[]", + "name": "", + "type": "tuple[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_l2BatchNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBatch", + "type": "uint16" + }, + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + } + ], + "name": "finalizeEthWithdrawal", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "freezeDiamond", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "getAdmin", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getBaseToken", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getBaseTokenBridge", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getBridgehub", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getFirstUnprocessedPriorityTx", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getL2BootloaderBytecodeHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getL2DefaultAccountBytecodeHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getL2SystemContractsUpgradeBatchNumber", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getL2SystemContractsUpgradeTxHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getName", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getPendingAdmin", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getPriorityQueueSize", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getPriorityTxMaxGasLimit", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getProtocolVersion", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getPubdataPricingMode", + "outputs": [ + { + "internalType": "enum PubdataPricingMode", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getStateTransitionManager", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalBatchesCommitted", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalBatchesExecuted", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalBatchesVerified", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalPriorityTxs", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getVerifier", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getVerifierParams", + "outputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "recursionNodeLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionLeafLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionCircuitsSetVksHash", + "type": "bytes32" + } + ], + "internalType": "struct VerifierParams", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "isDiamondStorageFrozen", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_l2BatchNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + } + ], + "name": "isEthWithdrawalFinalized", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_facet", + "type": "address" + } + ], + "name": "isFacetFreezable", + "outputs": [ + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "_selector", + "type": "bytes4" + } + ], + "name": "isFunctionFreezable", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_address", + "type": "address" + } + ], + "name": "isValidator", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_batchNumber", + "type": "uint256" + } + ], + "name": "l2LogsRootHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "merkleRoot", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_gasPrice", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasPerPubdataByteLimit", + "type": "uint256" + } + ], + "name": "l2TransactionBaseCost", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "priorityQueueFrontOperation", + "outputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "canonicalTxHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "expirationTimestamp", + "type": "uint64" + }, + { + "internalType": "uint192", + "name": "layer2Tip", + "type": "uint192" + } + ], + "internalType": "struct PriorityOperation", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo", + "name": "_prevBatch", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo[]", + "name": "_committedBatches", + "type": "tuple[]" + }, + { + "components": [ + { + "internalType": "uint256[]", + "name": "recursiveAggregationInput", + "type": "uint256[]" + }, + { + "internalType": "uint256[]", + "name": "serializedProof", + "type": "uint256[]" + } + ], + "internalType": "struct IExecutor.ProofInput", + "name": "_proof", + "type": "tuple" + } + ], + "name": "proveBatches", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_chainId", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo", + "name": "_prevBatch", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo[]", + "name": "_committedBatches", + "type": "tuple[]" + }, + { + "components": [ + { + "internalType": "uint256[]", + "name": "recursiveAggregationInput", + "type": "uint256[]" + }, + { + "internalType": "uint256[]", + "name": "serializedProof", + "type": "uint256[]" + } + ], + "internalType": "struct IExecutor.ProofInput", + "name": "_proof", + "type": "tuple" + } + ], + "name": "proveBatchesSharedBridge", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_l2TxHash", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "_l2BatchNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBatch", + "type": "uint16" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + }, + { + "internalType": "enum TxStatus", + "name": "_status", + "type": "uint8" + } + ], + "name": "proveL1ToL2TransactionStatus", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_batchNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_index", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint8", + "name": "l2ShardId", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isService", + "type": "bool" + }, + { + "internalType": "uint16", + "name": "txNumberInBatch", + "type": "uint16" + }, + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "bytes32", + "name": "key", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "value", + "type": "bytes32" + } + ], + "internalType": "struct L2Log", + "name": "_log", + "type": "tuple" + }, + { + "internalType": "bytes32[]", + "name": "_proof", + "type": "bytes32[]" + } + ], + "name": "proveL2LogInclusion", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_batchNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_index", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint16", + "name": "txNumberInBatch", + "type": "uint16" + }, + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + } + ], + "internalType": "struct L2Message", + "name": "_message", + "type": "tuple" + }, + { + "internalType": "bytes32[]", + "name": "_proof", + "type": "bytes32[]" + } + ], + "name": "proveL2MessageInclusion", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_contractL2", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_l2Value", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_calldata", + "type": "bytes" + }, + { + "internalType": "uint256", + "name": "_l2GasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "bytes[]", + "name": "_factoryDeps", + "type": "bytes[]" + }, + { + "internalType": "address", + "name": "_refundRecipient", + "type": "address" + } + ], + "name": "requestL2Transaction", + "outputs": [ + { + "internalType": "bytes32", + "name": "canonicalTxHash", + "type": "bytes32" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newLastBatch", + "type": "uint256" + } + ], + "name": "revertBatches", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_chainId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_newLastBatch", + "type": "uint256" + } + ], + "name": "revertBatchesSharedBridge", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_newPendingAdmin", + "type": "address" + } + ], + "name": "setPendingAdmin", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bool", + "name": "_zkPorterIsAvailable", + "type": "bool" + } + ], + "name": "setPorterAvailability", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newPriorityTxMaxGasLimit", + "type": "uint256" + } + ], + "name": "setPriorityTxMaxGasLimit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "enum PubdataPricingMode", + "name": "_pricingMode", + "type": "uint8" + } + ], + "name": "setPubdataPricingMode", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint128", + "name": "_nominator", + "type": "uint128" + }, + { + "internalType": "uint128", + "name": "_denominator", + "type": "uint128" + } + ], + "name": "setTokenMultiplier", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_transactionFilterer", + "type": "address" + } + ], + "name": "setTransactionFilterer", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_validator", + "type": "address" + }, + { + "internalType": "bool", + "name": "_active", + "type": "bool" + } + ], + "name": "setValidator", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_batchNumber", + "type": "uint256" + } + ], + "name": "storedBatchHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "transferEthToSharedBridge", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "unfreezeDiamond", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_protocolVersion", + "type": "uint256" + }, + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "internalType": "struct Diamond.DiamondCutData", + "name": "_cutData", + "type": "tuple" + } + ], + "name": "upgradeChainFromVersion", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } + ] \ No newline at end of file diff --git a/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/contract.rs b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/contract.rs new file mode 100644 index 000000000..58ac144b0 --- /dev/null +++ b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/contract.rs @@ -0,0 +1,113 @@ +use std::str::FromStr; + +use async_trait::async_trait; +use ethers::{ + abi::Abi, + contract::Contract, + prelude::{Address, Http, Provider}, + types::H256, +}; +use tracing::debug; +use via_verification::{ + errors::VerificationError, l1_data_fetcher::L1DataFetcher, proof::ViaZKProof, + public_inputs::generate_inputs, +}; + +use crate::fetching::{fetch_batch_protocol_version, fetch_l1_commit_data, fetch_proof_from_l1}; + +pub struct ContractConfig { + pub provider: Provider, + pub diamond_proxy_contract: Contract>, + pub verifier_contract_abi: Abi, +} + +impl ContractConfig { + pub fn new(l1_rpc_url: &str) -> Result { + let provider = Provider::::try_from(l1_rpc_url) + .map_err(|e| VerificationError::ProviderError(e.to_string()))?; + + let diamond_proxy_abi: Abi = serde_json::from_slice(include_bytes!("abis/IZkSync.json")) + .map_err(|e| VerificationError::Other(e.to_string()))?; + let verifier_contract_abi: Abi = + serde_json::from_slice(include_bytes!("abis/IVerifier.json")) + .map_err(|e| VerificationError::Other(e.to_string()))?; + + // Diamond proxy contract address on mainnet. + let diamond_proxy_address = Address::from_str("0x32400084c286cf3e17e7b677ea9583e60a000324") + .map_err(|e| VerificationError::Other(e.to_string()))?; + + let diamond_proxy_contract = + Contract::new(diamond_proxy_address, diamond_proxy_abi, provider.clone()); + + Ok(Self { + provider, + diamond_proxy_contract, + verifier_contract_abi, + }) + } +} + +#[async_trait] +impl L1DataFetcher for ContractConfig { + async fn get_verification_key_hash( + &self, + block_number: u64, + ) -> Result { + let verifier_address: Address = self + .diamond_proxy_contract + .method::<_, Address>("getVerifier", ())? + .block(block_number) + .call() + .await + .map_err(|e| VerificationError::ContractError(e.to_string()))?; + + let verifier_contract = Contract::new( + verifier_address, + self.verifier_contract_abi.clone(), + self.provider.clone(), + ); + + let vk_hash: H256 = verifier_contract + .method::<_, H256>("verificationKeyHash", ())? + .block(block_number) + .call() + .await + .map_err(|e| VerificationError::ContractError(e.to_string()))?; + + Ok(vk_hash) + } + + async fn get_protocol_version(&self, batch_number: u64) -> Result { + fetch_batch_protocol_version(batch_number).await + } + + async fn get_proof_from_l1( + &self, + batch_number: u64, + ) -> Result<(ViaZKProof, u64), VerificationError> { + let protocol_version = self.get_protocol_version(batch_number).await?; + let protocol_version_id = protocol_version.parse::().map_err(|_| { + VerificationError::FetchError("Failed to parse protocol version".to_string()) + })?; + debug!( + "Protocol version: {} for batch # {}", + protocol_version, batch_number + ); + + let (mut proof, block_number) = fetch_proof_from_l1( + batch_number, + self.provider.url().as_ref(), + protocol_version_id, + ) + .await?; + let batch_l1_data = + fetch_l1_commit_data(batch_number, self.provider.url().as_ref()).await?; + let inputs = generate_inputs( + &batch_l1_data.prev_batch_commitment, + &batch_l1_data.curr_batch_commitment, + ); + proof.proof.inputs = inputs.clone(); + + Ok((proof, block_number)) + } +} diff --git a/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/fetching.rs b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/fetching.rs new file mode 100644 index 000000000..b6c451eb6 --- /dev/null +++ b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/fetching.rs @@ -0,0 +1,423 @@ +use std::str::FromStr; + +use circuit_definitions::{ + circuit_definitions::aux_layer::ZkSyncSnarkWrapperCircuit, + snark_wrapper::franklin_crypto::bellman::bn256::Bn256, +}; +use ethers::{ + abi::{ethabi, ethereum_types, Abi, Function, Token}, + contract::BaseContract, + providers::{Http, Middleware, Provider}, + types::TxHash, +}; +use once_cell::sync::Lazy; +use primitive_types::{H256, U256}; +use reqwest::StatusCode; +use via_verification::{ + crypto::deserialize_proof, errors::VerificationError, proof::ViaZKProof, types::BatchL1Data, +}; + +use crate::types::{JSONL2RPCResponse, JSONL2SyncRPCResponse, L1BatchRangeJson}; + +static BLOCK_COMMIT_EVENT_SIGNATURE: Lazy = Lazy::new(|| { + ethabi::long_signature( + "BlockCommit", + &[ + ethabi::ParamType::Uint(256), + ethabi::ParamType::FixedBytes(32), + ethabi::ParamType::FixedBytes(32), + ], + ) +}); + +// Fetches given batch information from Era RPC +pub async fn fetch_batch_protocol_version(batch_number: u64) -> Result { + tracing::info!( + "Fetching batch {} protocol version from zkSync Era mainnet", + batch_number + ); + + let domain = "https://mainnet.era.zksync.io"; + + let client = reqwest::Client::new(); + + let response = client + .post(domain) + .header("Content-Type", "application/json") + .body(format!( + r#"{{ + "jsonrpc": "2.0", + "method": "zks_getL1BatchBlockRange", + "params": [{}], + "id": "1" + }}"#, + batch_number + )) + .send() + .await?; + + if response.status().is_success() { + let json = response.json::().await?; + let l2_block_hex = json.result[0].clone(); + let without_prefix = l2_block_hex.trim_start_matches("0x"); + let l2_block = i64::from_str_radix(without_prefix, 16); + + let response_2 = client + .post(domain) + .header("Content-Type", "application/json") + .body(format!( + r#"{{ + "jsonrpc": "2.0", + "method": "en_syncL2Block", + "params": [{}, false], + "id": "1" + }}"#, + l2_block.unwrap() + )) + .send() + .await?; + + if response_2.status().is_success() { + let json_2 = response_2.json::().await?; + let version = json_2 + .result + .protocol_version + .strip_prefix("Version") + .unwrap(); + + tracing::info!("Batch {} has protocol version {}", batch_number, version); + + Ok(version.to_string()) + } else { + Err(VerificationError::Other( + "Failed to fetch protocol version".to_string(), + )) + } + } else { + Err(VerificationError::Other( + "Failed to fetch protocol version".to_string(), + )) + } +} + +// Fetches given batch information from Era RPC +pub async fn fetch_batch_commit_tx( + batch_number: u64, +) -> Result<(String, Option), VerificationError> { + tracing::info!( + "Fetching batch {} information from zkSync Era", + batch_number + ); + + let domain = "https://mainnet.era.zksync.io"; + let client = reqwest::Client::new(); + + let response = client + .post(domain) + .header("Content-Type", "application/json") + .body(format!( + r#"{{ + "jsonrpc": "2.0", + "method": "zks_getL1BatchDetails", + "params": [{}, false], + "id": "1" + }}"#, + batch_number + )) + .send() + .await?; + + if response.status().is_success() { + let json = response.json::().await?; + + Ok((json.result.commit_tx_hash, json.result.prove_tx_hash)) + } else { + Err(VerificationError::FetchError( + "Failed to fetch batch commit transaction".to_string(), + )) + } +} + +pub async fn fetch_l1_commit_data( + batch_number: u64, + rpc_url: &str, +) -> Result { + let client = Provider::::try_from(rpc_url).expect("Failed to connect to provider"); + + let contract_abi: Abi = Abi::load(&include_bytes!("abis/IZkSync.json")[..]).unwrap(); + let (function_name, fallback_fn_name) = ("commitBatchesSharedBridge", Some("commitBatches")); + + let function = contract_abi.functions_by_name(function_name).unwrap()[0].clone(); + let fallback_function = + fallback_fn_name.map(|fn_name| contract_abi.functions_by_name(fn_name).unwrap()[0].clone()); + + let previous_batch_number = batch_number - 1; + let address = + ethereum_types::Address::from_str("32400084c286cf3e17e7b677ea9583e60a000324").unwrap(); + + let mut roots = vec![]; + let mut l1_block_number = 0; + let mut prev_batch_commitment = H256::default(); + let mut curr_batch_commitment = H256::default(); + for b_number in [previous_batch_number, batch_number] { + let (commit_tx, _) = fetch_batch_commit_tx(b_number).await?; + + let tx = client + .get_transaction(TxHash::from_str(&commit_tx).unwrap()) + .await?; + + let tx = tx.unwrap(); + l1_block_number = tx.block_number.unwrap().as_u64(); + let calldata = tx.input.to_vec(); + + let found_data = + find_state_data_from_log(b_number, &function, fallback_function.clone(), &calldata)?; + + let found_data = found_data.unwrap(); + + let batch_commitment = client + .get_transaction_receipt(tx.hash) + .await? + .unwrap() + .logs + .iter() + .find(|log| { + log.address == address + && log.topics.len() == 4 + && log.topics[0] == *BLOCK_COMMIT_EVENT_SIGNATURE + && log.topics[1] == H256::from_low_u64_be(b_number) + }) + .map(|log| log.topics[3]); + + if batch_commitment.is_none() { + return Err(VerificationError::FetchError( + "Failed to find batch commitment".to_string(), + )); + } + + if b_number == previous_batch_number { + prev_batch_commitment = batch_commitment.unwrap(); + } else { + curr_batch_commitment = batch_commitment.unwrap(); + } + + roots.push(found_data); + } + + assert_eq!(roots.len(), 2); + + let (previous_enumeration_counter, previous_root) = roots[0].clone(); + let (new_enumeration_counter, new_root) = roots[1].clone(); + + tracing::info!( + "Will be verifying a proof for state transition from root {} to root {}", + format!("0x{}", hex::encode(&previous_root)), + format!("0x{}", hex::encode(&new_root)) + ); + + let base_contract: BaseContract = contract_abi.into(); + let contract_instance = base_contract.into_contract::>(address, client); + let bootloader_code_hash = contract_instance + .method::<_, H256>("getL2BootloaderBytecodeHash", ()) + .unwrap() + .block(l1_block_number) + .call() + .await + .unwrap(); + let default_aa_code_hash = contract_instance + .method::<_, H256>("getL2DefaultAccountBytecodeHash", ()) + .unwrap() + .block(l1_block_number) + .call() + .await + .unwrap(); + + tracing::info!( + "Will be using bootloader code hash {} and default AA code hash {}", + format!("0x{}", hex::encode(bootloader_code_hash.as_bytes())), + format!("0x{}", hex::encode(default_aa_code_hash.as_bytes())) + ); + let result = BatchL1Data { + previous_enumeration_counter, + previous_root, + new_enumeration_counter, + new_root, + bootloader_hash: *bootloader_code_hash.as_fixed_bytes(), + default_aa_hash: *default_aa_code_hash.as_fixed_bytes(), + prev_batch_commitment, + curr_batch_commitment, + }; + + Ok(result) +} + +fn find_state_data_from_log( + batch_number: u64, + function: &Function, + fallback_function: Option, + calldata: &[u8], +) -> Result)>, VerificationError> { + use ethers::abi; + + if calldata.len() < 5 { + return Err(VerificationError::FetchError( + "Calldata is too short".to_string(), + )); + } + + let mut parsed_input = function.decode_input(&calldata[4..]).unwrap_or_else(|_| { + fallback_function + .unwrap() + .decode_input(&calldata[4..]) + .unwrap() + }); + + let second_param = parsed_input.pop().unwrap(); + let first_param = parsed_input.pop().unwrap(); + + let abi::Token::Tuple(first_param) = first_param else { + return Err(VerificationError::FetchError( + "Failed to parse first param".to_string(), + )); + }; + + let abi::Token::Uint(_previous_l2_block_number) = first_param[0].clone() else { + return Err(VerificationError::FetchError( + "Failed to parse first param".to_string(), + )); + }; + // if _previous_l2_block_number.0[0] != batch_number { + // return Err(VerificationError::FetchError( + // "Batch number mismatch".to_string(), + // )); + // } + let abi::Token::Uint(previous_enumeration_index) = first_param[2].clone() else { + return Err(VerificationError::FetchError( + "Failed to parse second param".to_string(), + )); + }; + let _previous_enumeration_index = previous_enumeration_index.0[0]; + + let abi::Token::Array(inner) = second_param else { + return Err(VerificationError::FetchError( + "Failed to parse second param".to_string(), + )); + }; + + let mut found_params = None; + + for inner in inner.into_iter() { + let abi::Token::Tuple(inner) = inner else { + return Err(VerificationError::FetchError( + "Failed to parse inner tuple".to_string(), + )); + }; + let abi::Token::Uint(new_l2_block_number) = inner[0].clone() else { + return Err(VerificationError::FetchError( + "Failed to parse new l2 block number".to_string(), + )); + }; + let new_l2_block_number = new_l2_block_number.0[0]; + if new_l2_block_number == batch_number { + let abi::Token::Uint(new_enumeration_index) = inner[2].clone() else { + return Err(VerificationError::FetchError( + "Failed to parse new enumeration index".to_string(), + )); + }; + let new_enumeration_index = new_enumeration_index.0[0]; + + let abi::Token::FixedBytes(state_root) = inner[3].clone() else { + return Err(VerificationError::FetchError( + "Failed to parse state root".to_string(), + )); + }; + + assert_eq!(state_root.len(), 32); + + found_params = Some((new_enumeration_index, state_root)); + } else { + continue; + } + } + + Ok(found_params) +} + +pub(crate) async fn fetch_proof_from_l1( + batch_number: u64, + rpc_url: &str, + protocol_version: u16, +) -> Result<(ViaZKProof, u64), VerificationError> { + let client = Provider::::try_from(rpc_url).expect("Failed to connect to provider"); + + let contract_abi: Abi = Abi::load(&include_bytes!("abis/IZkSync.json")[..]).unwrap(); + + let function_name = if protocol_version < 23 { + "proveBatches" + } else { + "proveBatchesSharedBridge" + }; + + let function = contract_abi.functions_by_name(function_name).unwrap()[0].clone(); + + let (_, prove_tx) = fetch_batch_commit_tx(batch_number) + .await + .map_err(|_| StatusCode::BAD_REQUEST) + .unwrap(); + + if prove_tx.is_none() { + let msg = format!( + "Proof doesn't exist for batch {}, please try again soon. Exiting...", + batch_number, + ); + tracing::error!("{}", msg); + return Err(VerificationError::FetchError(msg)); + }; + + let tx = client + .get_transaction(TxHash::from_str(&prove_tx.unwrap()).unwrap()) + .await + .map_err(|_| StatusCode::BAD_REQUEST) + .unwrap() + .unwrap(); + + let l1_block_number = tx.block_number.unwrap().as_u64(); + let calldata = tx.input.to_vec(); + + let parsed_input = function.decode_input(&calldata[4..]).unwrap(); + + let Token::Tuple(proof) = parsed_input.as_slice().last().unwrap() else { + return Err(VerificationError::FetchError( + "Failed to parse proof from input".to_string(), + )); + }; + + assert_eq!(proof.len(), 2); + + let Token::Array(serialized_proof) = proof[1].clone() else { + return Err(VerificationError::FetchError( + "Failed to parse proof from input".to_string(), + )); + }; + + let proof = serialized_proof + .iter() + .filter_map(|e| { + if let Token::Uint(x) = e { + Some(*x) + } else { + None + } + }) + .collect::>(); + + if serialized_proof.is_empty() { + let msg = format!("Proof doesn't exist for batch {}, exiting...", batch_number,); + tracing::error!("{}", msg); + return Err(VerificationError::FetchError(msg)); + } + + let x: circuit_definitions::snark_wrapper::franklin_crypto::bellman::plonk::better_better_cs::proof::Proof = deserialize_proof(proof); + + Ok((ViaZKProof { proof: x }, l1_block_number)) +} diff --git a/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/main.rs b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/main.rs new file mode 100644 index 000000000..c716d3323 --- /dev/null +++ b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/main.rs @@ -0,0 +1,52 @@ +mod contract; +mod fetching; +mod types; + +use clap::Parser; +use tracing::{error, info}; +use via_verification::{ + errors::VerificationError, l1_data_fetcher::L1DataFetcher, verification::verify_snark, +}; + +use crate::contract::ContractConfig; + +#[derive(Debug, Parser)] +#[command(author = "Via", version, about = "Boojum CLI verifier")] +struct Cli { + /// Batch number to check proof for + #[arg(long, default_value = "493000")] + batch: u64, +} + +#[tokio::main] +async fn main() -> Result<(), VerificationError> { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::INFO) + .init(); + + let args = Cli::parse(); + + let batch_number = args.batch; + let l1_rpc = "https://rpc.ankr.com/eth".to_string(); + + info!( + "Starting Boojum CLI verifier with config: l1_rpc={}; batch #{}", + l1_rpc, batch_number + ); + + let contract = ContractConfig::new(&l1_rpc)?; + + let (proof, block_number) = contract.get_proof_from_l1(batch_number).await?; + + // Verify the proof + let verify_resp = verify_snark(&contract, proof, batch_number, block_number).await; + + if let Ok(input) = verify_resp { + info!("VERIFIED"); + info!("Public input: {}", input); + } else { + error!("Failed to verify proof due to an error : {:?}", verify_resp); + } + + Ok(()) +} diff --git a/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/types.rs b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/types.rs new file mode 100644 index 000000000..ff75565b1 --- /dev/null +++ b/via_verifier/lib/via_verification/examples/zksync-era-verification-cli/types.rs @@ -0,0 +1,30 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct L1BatchRangeJson { + pub result: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JSONL2SyncRPCResponse { + pub result: L2SyncDetails, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct L2SyncDetails { + #[serde(rename = "protocolVersion")] + pub protocol_version: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JSONL2RPCResponse { + pub result: L1BatchJson, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct L1BatchJson { + #[serde(rename = "commitTxHash")] + pub commit_tx_hash: String, + #[serde(rename = "proveTxHash")] + pub prove_tx_hash: Option, +} diff --git a/via_verifier/lib/via_verification/keys/protocol_version/24/scheduler_key.json b/via_verifier/lib/via_verification/keys/protocol_version/24/scheduler_key.json new file mode 100644 index 000000000..2e02eafe6 --- /dev/null +++ b/via_verifier/lib/via_verification/keys/protocol_version/24/scheduler_key.json @@ -0,0 +1,399 @@ +{ + "n": 16777215, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 14543631136906534221, + 11532161447842416044, + 11114175029926010938, + 1228896787564295039 + ], + "y": [ + 13293602262342424489, + 8897930584356943159, + 13256028170406220369, + 3214939367598363288 + ], + "infinity": false + }, + { + "x": [ + 11488992528554025682, + 12016824828223971094, + 11942004360057333370, + 316831626296641307 + ], + "y": [ + 304673622018339856, + 7139037552557818730, + 12475560967982555143, + 1055588351918295250 + ], + "infinity": false + }, + { + "x": [ + 2274984630539920017, + 5398167177582250136, + 16440396753384808945, + 1037682586893548769 + ], + "y": [ + 10168660308952593373, + 16526369642614237721, + 569062739734175056, + 155645558476901406 + ], + "infinity": false + }, + { + "x": [ + 14005362797509427677, + 2662603874351919260, + 14261489165672308143, + 1470528288349794782 + ], + "y": [ + 11144229651170108862, + 11439490264313454962, + 114993091474760680, + 1037267173208738614 + ], + "infinity": false + }, + { + "x": [ + 10726125240955612787, + 1916320162213728495, + 1058608086768277905, + 1651114031905829493 + ], + "y": [ + 13237242732587628574, + 4774776044666137690, + 14401013098807103799, + 2514139699916115771 + ], + "infinity": false + }, + { + "x": [ + 14434760601334248377, + 5316938318287831815, + 6221098547630910324, + 980422841280734466 + ], + "y": [ + 9201886393750447942, + 3840149540273146267, + 18179910191622136829, + 1563809864380914603 + ], + "infinity": false + }, + { + "x": [ + 9586697317366528906, + 2325800863365957883, + 1243781259615311278, + 3048012003267036960 + ], + "y": [ + 612821620743617231, + 1510385666449513894, + 9368337288452385056, + 2949736812933507034 + ], + "infinity": false + }, + { + "x": [ + 11830690209042008764, + 11761396005838073769, + 18271188400274886574, + 2896734446482773484 + ], + "y": [ + 1890606551566554401, + 10220931290312275762, + 3256711195869515344, + 2466626485328709457 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 10865727529243127085, + 4083978853392244827, + 14303622309482785753, + 2263042021033673595 + ], + "y": [ + 3019601017411802529, + 880444282195426618, + 9998743525359587628, + 2891421025832200233 + ], + "infinity": false + }, + { + "x": [ + 5208608554346323426, + 8575970970223832576, + 2966209169082345602, + 239576408267301488 + ], + "y": [ + 17715084817752316452, + 2726293100894160682, + 17920596859559317135, + 3485576345363305439 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 14761045450946573029, + 17157644513453531531, + 2555518804134782053, + 1415819224310783987 + ], + "y": [ + 17265629196749977462, + 4128711855633066822, + 8435602817910411328, + 1408116296902303196 + ], + "infinity": false + }, + { + "x": [ + 3307267823832528482, + 2406249680085831639, + 9091964031261402109, + 2846274000290842933 + ], + "y": [ + 17374905554931807856, + 6690578002079222163, + 11809376320193686210, + 2676076649992974574 + ], + "infinity": false + }, + { + "x": [ + 3159118708748226574, + 5508845413629697013, + 13350869305506486049, + 689297560178790472 + ], + "y": [ + 15696011303896469684, + 12551611148155235140, + 14438660833518031207, + 425021756161657108 + ], + "infinity": false + }, + { + "x": [ + 18349397811516917436, + 4473982696343317918, + 13070312540813307819, + 2109468484629113245 + ], + "y": [ + 13254534552549721008, + 17388411854346636521, + 17875890960520499518, + 1062184221180884481 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 1787472, + "lookup_selector_commitment": { + "x": [ + 9324906502432882695, + 14977861238256290580, + 12538013124354067293, + 3408438202312564138 + ], + "y": [ + 14942105932194201701, + 12210090881357612547, + 14774705021036784261, + 2531694948512337448 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 10873859091125335643, + 3906092213625635374, + 17046157606087980048, + 3193402705223440293 + ], + "y": [ + 10158946293873382504, + 2171386304067884865, + 6918663094168980658, + 350601565475975409 + ], + "infinity": false + }, + { + "x": [ + 12822112641313049260, + 3646552465186399021, + 10324071010773924047, + 2209084192380614662 + ], + "y": [ + 11045141628975531869, + 12589678537679955590, + 3065046617868727674, + 2099447669854151830 + ], + "infinity": false + }, + { + "x": [ + 11395032673621937545, + 3000063650268118516, + 7857619430005721792, + 805706808484810738 + ], + "y": [ + 6817063666434679427, + 1646386051225388537, + 4677946977082722827, + 1369650305976868514 + ], + "infinity": false + }, + { + "x": [ + 2885179371868476351, + 159944842081142878, + 6092294387055034894, + 213843603626505240 + ], + "y": [ + 11868113133779277990, + 8509646480531194854, + 14088068011597639414, + 707070630614027545 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 1732877442096985191, + 7537030715658833452, + 14073502080301311448, + 2178792007727681099 + ], + "y": [ + 8513095304113652904, + 6581396660744182779, + 13939755637576387431, + 2477157044961106453 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] + } \ No newline at end of file diff --git a/via_verifier/lib/via_verification/keys/protocol_version/25/scheduler_key.json b/via_verifier/lib/via_verification/keys/protocol_version/25/scheduler_key.json new file mode 100644 index 000000000..25921368c --- /dev/null +++ b/via_verifier/lib/via_verification/keys/protocol_version/25/scheduler_key.json @@ -0,0 +1,399 @@ +{ + "n": 16777215, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 14543631136906534221, + 11532161447842416044, + 11114175029926010938, + 1228896787564295039 + ], + "y": [ + 13293602262342424489, + 8897930584356943159, + 13256028170406220369, + 3214939367598363288 + ], + "infinity": false + }, + { + "x": [ + 11488992528554025682, + 12016824828223971094, + 11942004360057333370, + 316831626296641307 + ], + "y": [ + 304673622018339856, + 7139037552557818730, + 12475560967982555143, + 1055588351918295250 + ], + "infinity": false + }, + { + "x": [ + 2274984630539920017, + 5398167177582250136, + 16440396753384808945, + 1037682586893548769 + ], + "y": [ + 10168660308952593373, + 16526369642614237721, + 569062739734175056, + 155645558476901406 + ], + "infinity": false + }, + { + "x": [ + 14005362797509427677, + 2662603874351919260, + 14261489165672308143, + 1470528288349794782 + ], + "y": [ + 11144229651170108862, + 11439490264313454962, + 114993091474760680, + 1037267173208738614 + ], + "infinity": false + }, + { + "x": [ + 10726125240955612787, + 1916320162213728495, + 1058608086768277905, + 1651114031905829493 + ], + "y": [ + 13237242732587628574, + 4774776044666137690, + 14401013098807103799, + 2514139699916115771 + ], + "infinity": false + }, + { + "x": [ + 14434760601334248377, + 5316938318287831815, + 6221098547630910324, + 980422841280734466 + ], + "y": [ + 9201886393750447942, + 3840149540273146267, + 18179910191622136829, + 1563809864380914603 + ], + "infinity": false + }, + { + "x": [ + 9586697317366528906, + 2325800863365957883, + 1243781259615311278, + 3048012003267036960 + ], + "y": [ + 612821620743617231, + 1510385666449513894, + 9368337288452385056, + 2949736812933507034 + ], + "infinity": false + }, + { + "x": [ + 11830690209042008764, + 11761396005838073769, + 18271188400274886574, + 2896734446482773484 + ], + "y": [ + 1890606551566554401, + 10220931290312275762, + 3256711195869515344, + 2466626485328709457 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 10865727529243127085, + 4083978853392244827, + 14303622309482785753, + 2263042021033673595 + ], + "y": [ + 3019601017411802529, + 880444282195426618, + 9998743525359587628, + 2891421025832200233 + ], + "infinity": false + }, + { + "x": [ + 5208608554346323426, + 8575970970223832576, + 2966209169082345602, + 239576408267301488 + ], + "y": [ + 17715084817752316452, + 2726293100894160682, + 17920596859559317135, + 3485576345363305439 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 14761045450946573029, + 17157644513453531531, + 2555518804134782053, + 1415819224310783987 + ], + "y": [ + 17265629196749977462, + 4128711855633066822, + 8435602817910411328, + 1408116296902303196 + ], + "infinity": false + }, + { + "x": [ + 3307267823832528482, + 2406249680085831639, + 9091964031261402109, + 2846274000290842933 + ], + "y": [ + 17374905554931807856, + 6690578002079222163, + 11809376320193686210, + 2676076649992974574 + ], + "infinity": false + }, + { + "x": [ + 3159118708748226574, + 5508845413629697013, + 13350869305506486049, + 689297560178790472 + ], + "y": [ + 15696011303896469684, + 12551611148155235140, + 14438660833518031207, + 425021756161657108 + ], + "infinity": false + }, + { + "x": [ + 18349397811516917436, + 4473982696343317918, + 13070312540813307819, + 2109468484629113245 + ], + "y": [ + 13254534552549721008, + 17388411854346636521, + 17875890960520499518, + 1062184221180884481 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 1787472, + "lookup_selector_commitment": { + "x": [ + 9324906502432882695, + 14977861238256290580, + 12538013124354067293, + 3408438202312564138 + ], + "y": [ + 14942105932194201701, + 12210090881357612547, + 14774705021036784261, + 2531694948512337448 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 10873859091125335643, + 3906092213625635374, + 17046157606087980048, + 3193402705223440293 + ], + "y": [ + 10158946293873382504, + 2171386304067884865, + 6918663094168980658, + 350601565475975409 + ], + "infinity": false + }, + { + "x": [ + 12822112641313049260, + 3646552465186399021, + 10324071010773924047, + 2209084192380614662 + ], + "y": [ + 11045141628975531869, + 12589678537679955590, + 3065046617868727674, + 2099447669854151830 + ], + "infinity": false + }, + { + "x": [ + 11395032673621937545, + 3000063650268118516, + 7857619430005721792, + 805706808484810738 + ], + "y": [ + 6817063666434679427, + 1646386051225388537, + 4677946977082722827, + 1369650305976868514 + ], + "infinity": false + }, + { + "x": [ + 2885179371868476351, + 159944842081142878, + 6092294387055034894, + 213843603626505240 + ], + "y": [ + 11868113133779277990, + 8509646480531194854, + 14088068011597639414, + 707070630614027545 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 1732877442096985191, + 7537030715658833452, + 14073502080301311448, + 2178792007727681099 + ], + "y": [ + 8513095304113652904, + 6581396660744182779, + 13939755637576387431, + 2477157044961106453 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} diff --git a/via_verifier/lib/via_verification/keys/protocol_version/26/scheduler_key.json b/via_verifier/lib/via_verification/keys/protocol_version/26/scheduler_key.json new file mode 100644 index 000000000..2e02eafe6 --- /dev/null +++ b/via_verifier/lib/via_verification/keys/protocol_version/26/scheduler_key.json @@ -0,0 +1,399 @@ +{ + "n": 16777215, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 14543631136906534221, + 11532161447842416044, + 11114175029926010938, + 1228896787564295039 + ], + "y": [ + 13293602262342424489, + 8897930584356943159, + 13256028170406220369, + 3214939367598363288 + ], + "infinity": false + }, + { + "x": [ + 11488992528554025682, + 12016824828223971094, + 11942004360057333370, + 316831626296641307 + ], + "y": [ + 304673622018339856, + 7139037552557818730, + 12475560967982555143, + 1055588351918295250 + ], + "infinity": false + }, + { + "x": [ + 2274984630539920017, + 5398167177582250136, + 16440396753384808945, + 1037682586893548769 + ], + "y": [ + 10168660308952593373, + 16526369642614237721, + 569062739734175056, + 155645558476901406 + ], + "infinity": false + }, + { + "x": [ + 14005362797509427677, + 2662603874351919260, + 14261489165672308143, + 1470528288349794782 + ], + "y": [ + 11144229651170108862, + 11439490264313454962, + 114993091474760680, + 1037267173208738614 + ], + "infinity": false + }, + { + "x": [ + 10726125240955612787, + 1916320162213728495, + 1058608086768277905, + 1651114031905829493 + ], + "y": [ + 13237242732587628574, + 4774776044666137690, + 14401013098807103799, + 2514139699916115771 + ], + "infinity": false + }, + { + "x": [ + 14434760601334248377, + 5316938318287831815, + 6221098547630910324, + 980422841280734466 + ], + "y": [ + 9201886393750447942, + 3840149540273146267, + 18179910191622136829, + 1563809864380914603 + ], + "infinity": false + }, + { + "x": [ + 9586697317366528906, + 2325800863365957883, + 1243781259615311278, + 3048012003267036960 + ], + "y": [ + 612821620743617231, + 1510385666449513894, + 9368337288452385056, + 2949736812933507034 + ], + "infinity": false + }, + { + "x": [ + 11830690209042008764, + 11761396005838073769, + 18271188400274886574, + 2896734446482773484 + ], + "y": [ + 1890606551566554401, + 10220931290312275762, + 3256711195869515344, + 2466626485328709457 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 10865727529243127085, + 4083978853392244827, + 14303622309482785753, + 2263042021033673595 + ], + "y": [ + 3019601017411802529, + 880444282195426618, + 9998743525359587628, + 2891421025832200233 + ], + "infinity": false + }, + { + "x": [ + 5208608554346323426, + 8575970970223832576, + 2966209169082345602, + 239576408267301488 + ], + "y": [ + 17715084817752316452, + 2726293100894160682, + 17920596859559317135, + 3485576345363305439 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 14761045450946573029, + 17157644513453531531, + 2555518804134782053, + 1415819224310783987 + ], + "y": [ + 17265629196749977462, + 4128711855633066822, + 8435602817910411328, + 1408116296902303196 + ], + "infinity": false + }, + { + "x": [ + 3307267823832528482, + 2406249680085831639, + 9091964031261402109, + 2846274000290842933 + ], + "y": [ + 17374905554931807856, + 6690578002079222163, + 11809376320193686210, + 2676076649992974574 + ], + "infinity": false + }, + { + "x": [ + 3159118708748226574, + 5508845413629697013, + 13350869305506486049, + 689297560178790472 + ], + "y": [ + 15696011303896469684, + 12551611148155235140, + 14438660833518031207, + 425021756161657108 + ], + "infinity": false + }, + { + "x": [ + 18349397811516917436, + 4473982696343317918, + 13070312540813307819, + 2109468484629113245 + ], + "y": [ + 13254534552549721008, + 17388411854346636521, + 17875890960520499518, + 1062184221180884481 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 1787472, + "lookup_selector_commitment": { + "x": [ + 9324906502432882695, + 14977861238256290580, + 12538013124354067293, + 3408438202312564138 + ], + "y": [ + 14942105932194201701, + 12210090881357612547, + 14774705021036784261, + 2531694948512337448 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 10873859091125335643, + 3906092213625635374, + 17046157606087980048, + 3193402705223440293 + ], + "y": [ + 10158946293873382504, + 2171386304067884865, + 6918663094168980658, + 350601565475975409 + ], + "infinity": false + }, + { + "x": [ + 12822112641313049260, + 3646552465186399021, + 10324071010773924047, + 2209084192380614662 + ], + "y": [ + 11045141628975531869, + 12589678537679955590, + 3065046617868727674, + 2099447669854151830 + ], + "infinity": false + }, + { + "x": [ + 11395032673621937545, + 3000063650268118516, + 7857619430005721792, + 805706808484810738 + ], + "y": [ + 6817063666434679427, + 1646386051225388537, + 4677946977082722827, + 1369650305976868514 + ], + "infinity": false + }, + { + "x": [ + 2885179371868476351, + 159944842081142878, + 6092294387055034894, + 213843603626505240 + ], + "y": [ + 11868113133779277990, + 8509646480531194854, + 14088068011597639414, + 707070630614027545 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 1732877442096985191, + 7537030715658833452, + 14073502080301311448, + 2178792007727681099 + ], + "y": [ + 8513095304113652904, + 6581396660744182779, + 13939755637576387431, + 2477157044961106453 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] + } \ No newline at end of file diff --git a/via_verifier/lib/via_verification/src/crypto.rs b/via_verifier/lib/via_verification/src/crypto.rs new file mode 100644 index 000000000..4211fecdd --- /dev/null +++ b/via_verifier/lib/via_verification/src/crypto.rs @@ -0,0 +1,293 @@ +use circuit_definitions::{ + boojum::pairing::bn256::Fq, + ethereum_types::U256, + snark_wrapper::franklin_crypto::bellman::{ + bn256::{Bn256, Fr}, + plonk::better_better_cs::{ + cs::{Circuit, VerificationKey}, + proof::Proof, + }, + CurveAffine, Engine, PrimeField, PrimeFieldRepr, + }, +}; +use primitive_types::H256; +use sha3::{Digest, Keccak256}; + +/// Transforms a U256 element into a prime field element. +fn u256_to_scalar(el: &U256) -> F +where + F::Repr: PrimeFieldRepr + Default, +{ + let mut bytes = [0u8; 32]; + el.to_big_endian(&mut bytes); + + let mut repr = F::Repr::default(); + repr.read_be(&bytes[..]) + .expect("Failed to read bytes into field representation"); + + F::from_repr(repr).expect("Failed to convert U256 to scalar") +} + +/// Transforms a point represented as a pair of U256 into its affine representation. +fn deserialize_g1(point: (U256, U256)) -> ::G1Affine { + if point == (U256::zero(), U256::zero()) { + return ::G1Affine::zero(); + } + + let x_scalar = u256_to_scalar(&point.0); + let y_scalar = u256_to_scalar(&point.1); + + ::G1Affine::from_xy_unchecked(x_scalar, y_scalar) +} + +/// Transforms a field element represented as U256 into the field representation. +fn deserialize_fe(felt: U256) -> Fr { + u256_to_scalar(&felt) +} + +/// Deserializes a proof from a vector of U256 elements. +pub fn deserialize_proof>(mut proof: Vec) -> Proof { + let opening_proof_at_z_omega = { + let y = proof + .pop() + .expect("Missing y-coordinate for opening_proof_at_z_omega"); + let x = proof + .pop() + .expect("Missing x-coordinate for opening_proof_at_z_omega"); + deserialize_g1((x, y)) + }; + + let opening_proof_at_z = { + let y = proof + .pop() + .expect("Missing y-coordinate for opening_proof_at_z"); + let x = proof + .pop() + .expect("Missing x-coordinate for opening_proof_at_z"); + deserialize_g1((x, y)) + }; + + let linearization_poly_opening_at_z = deserialize_fe( + proof + .pop() + .expect("Missing linearization_poly_opening_at_z"), + ); + let quotient_poly_opening_at_z = + deserialize_fe(proof.pop().expect("Missing quotient_poly_opening_at_z")); + let lookup_table_type_poly_opening_at_z = deserialize_fe( + proof + .pop() + .expect("Missing lookup_table_type_poly_opening_at_z"), + ); + let lookup_selector_poly_opening_at_z = deserialize_fe( + proof + .pop() + .expect("Missing lookup_selector_poly_opening_at_z"), + ); + let lookup_t_poly_opening_at_z_omega = deserialize_fe( + proof + .pop() + .expect("Missing lookup_t_poly_opening_at_z_omega"), + ); + let lookup_t_poly_opening_at_z = + deserialize_fe(proof.pop().expect("Missing lookup_t_poly_opening_at_z")); + let lookup_grand_product_opening_at_z_omega = deserialize_fe( + proof + .pop() + .expect("Missing lookup_grand_product_opening_at_z_omega"), + ); + let lookup_s_poly_opening_at_z_omega = deserialize_fe( + proof + .pop() + .expect("Missing lookup_s_poly_opening_at_z_omega"), + ); + let copy_permutation_grand_product_opening_at_z_omega = deserialize_fe( + proof + .pop() + .expect("Missing copy_permutation_grand_product_opening_at_z_omega"), + ); + + let mut copy_permutation_polys_openings_at_z = vec![]; + for _ in 0..3 { + copy_permutation_polys_openings_at_z.push(deserialize_fe( + proof + .pop() + .expect("Missing copy_permutation_polys_openings_at_z"), + )); + } + copy_permutation_polys_openings_at_z.reverse(); + + let gate_selectors_openings_at_z = vec![( + 0_usize, + deserialize_fe(proof.pop().expect("Missing gate_selectors_openings_at_z")), + )]; + + let state_polys_openings_at_dilations = { + let fe = deserialize_fe( + proof + .pop() + .expect("Missing state_polys_openings_at_dilations"), + ); + vec![(1_usize, 3_usize, fe)] + }; + + let mut state_polys_openings_at_z = vec![]; + for _ in 0..4 { + state_polys_openings_at_z.push(deserialize_fe( + proof.pop().expect("Missing state_polys_openings_at_z"), + )); + } + state_polys_openings_at_z.reverse(); + + let mut quotient_poly_parts_commitments = vec![]; + for _ in 0..4 { + let y = proof + .pop() + .expect("Missing y-coordinate for quotient_poly_parts_commitments"); + let x = proof + .pop() + .expect("Missing x-coordinate for quotient_poly_parts_commitments"); + quotient_poly_parts_commitments.push(deserialize_g1((x, y))); + } + quotient_poly_parts_commitments.reverse(); + + let lookup_grand_product_commitment = { + let y = proof + .pop() + .expect("Missing y-coordinate for lookup_grand_product_commitment"); + let x = proof + .pop() + .expect("Missing x-coordinate for lookup_grand_product_commitment"); + deserialize_g1((x, y)) + }; + + let lookup_s_poly_commitment = { + let y = proof + .pop() + .expect("Missing y-coordinate for lookup_s_poly_commitment"); + let x = proof + .pop() + .expect("Missing x-coordinate for lookup_s_poly_commitment"); + deserialize_g1((x, y)) + }; + + let copy_permutation_grand_product_commitment = { + let y = proof + .pop() + .expect("Missing y-coordinate for copy_permutation_grand_product_commitment"); + let x = proof + .pop() + .expect("Missing x-coordinate for copy_permutation_grand_product_commitment"); + deserialize_g1((x, y)) + }; + + let mut state_polys_commitments = vec![]; + for _ in 0..4 { + let y = proof + .pop() + .expect("Missing y-coordinate for state_polys_commitments"); + let x = proof + .pop() + .expect("Missing x-coordinate for state_polys_commitments"); + state_polys_commitments.push(deserialize_g1((x, y))); + } + state_polys_commitments.reverse(); + + let mut proof_obj: Proof = Proof::empty(); + + proof_obj.state_polys_commitments = state_polys_commitments; + proof_obj.copy_permutation_grand_product_commitment = copy_permutation_grand_product_commitment; + proof_obj.lookup_s_poly_commitment = Some(lookup_s_poly_commitment); + proof_obj.lookup_grand_product_commitment = Some(lookup_grand_product_commitment); + proof_obj.quotient_poly_parts_commitments = quotient_poly_parts_commitments; + proof_obj.state_polys_openings_at_z = state_polys_openings_at_z; + proof_obj.state_polys_openings_at_dilations = state_polys_openings_at_dilations; + proof_obj.gate_selectors_openings_at_z = gate_selectors_openings_at_z; + proof_obj.copy_permutation_polys_openings_at_z = copy_permutation_polys_openings_at_z; + proof_obj.copy_permutation_grand_product_opening_at_z_omega = + copy_permutation_grand_product_opening_at_z_omega; + proof_obj.lookup_s_poly_opening_at_z_omega = Some(lookup_s_poly_opening_at_z_omega); + proof_obj.lookup_grand_product_opening_at_z_omega = + Some(lookup_grand_product_opening_at_z_omega); + proof_obj.lookup_t_poly_opening_at_z = Some(lookup_t_poly_opening_at_z); + proof_obj.lookup_t_poly_opening_at_z_omega = Some(lookup_t_poly_opening_at_z_omega); + proof_obj.lookup_selector_poly_opening_at_z = Some(lookup_selector_poly_opening_at_z); + proof_obj.lookup_table_type_poly_opening_at_z = Some(lookup_table_type_poly_opening_at_z); + proof_obj.quotient_poly_opening_at_z = quotient_poly_opening_at_z; + proof_obj.linearization_poly_opening_at_z = linearization_poly_opening_at_z; + proof_obj.opening_proof_at_z = opening_proof_at_z; + proof_obj.opening_proof_at_z_omega = opening_proof_at_z_omega; + + proof_obj +} + +/// Serialize a point's coordinates into a vector +fn serialize_point(point: &E::G1Affine, mut buffer: &mut Vec) -> anyhow::Result<()> { + let (x, y) = point.as_xy(); + #[allow(clippy::needless_borrows_for_generic_args)] + x.into_repr() + .write_be(&mut buffer) + .expect("Failed to write x coordinate"); + + #[allow(clippy::needless_borrows_for_generic_args)] + y.into_repr() + .write_be(&mut buffer) + .expect("Failed to write y coordinate"); + Ok(()) +} + +/// Calculates the hash of a verification key. +pub(crate) fn calculate_verification_key_hash>( + verification_key: &VerificationKey, +) -> H256 { + let mut res = Vec::new(); + + // Serialize gate setup commitments. + for gate_setup in &verification_key.gate_setup_commitments { + serialize_point::(gate_setup, &mut res) + .expect("Failed to serialize gate setup commitment"); + } + + // Serialize gate selectors commitments. + for gate_selector in &verification_key.gate_selectors_commitments { + serialize_point::(gate_selector, &mut res) + .expect("Failed to serialize gate selectors commitment"); + } + + // Serialize permutation commitments. + for permutation in &verification_key.permutation_commitments { + serialize_point::(permutation, &mut res) + .expect("Failed to serialize permutation commitment"); + } + + // Serialize lookup selector commitment if present. + if let Some(lookup_selector) = &verification_key.lookup_selector_commitment { + serialize_point::(lookup_selector, &mut res) + .expect("Failed to serialize lookup selector commitment"); + } + + // Serialize lookup tables commitments. + for table_commit in &verification_key.lookup_tables_commitments { + serialize_point::(table_commit, &mut res) + .expect("Failed to serialize lookup tables commitment"); + } + + // Serialize table type commitment if present. + if let Some(lookup_table) = &verification_key.lookup_table_type_commitment { + serialize_point::(lookup_table, &mut res) + .expect("Failed to serialize lookup table type commitment"); + } + + // Serialize flag for using recursive part. + Fq::default() + .into_repr() + .write_be(&mut res) + .expect("Failed to write recursive flag"); + + // Compute Keccak256 hash of the serialized data. + let mut hasher = Keccak256::new(); + hasher.update(&res); + let computed_vk_hash = hasher.finalize(); + + H256::from_slice(&computed_vk_hash) +} diff --git a/via_verifier/lib/via_verification/src/errors.rs b/via_verifier/lib/via_verification/src/errors.rs new file mode 100644 index 000000000..fd66523b3 --- /dev/null +++ b/via_verifier/lib/via_verification/src/errors.rs @@ -0,0 +1,56 @@ +use ethers::abi::ethabi; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum VerificationError { + #[error("Network not supported")] + UnsupportedNetwork, + + #[error("Failed to fetch data: {0}")] + FetchError(String), + + #[error("Verification key hash mismatch")] + VerificationKeyHashMismatch, + + #[error("Proof verification failed")] + ProofVerificationFailed, + + #[error("Invalid proof")] + InvalidProof, + + #[error("Abi error: {0}")] + AbiError(ethers::contract::AbiError), + + #[error("Provider error: {0}")] + ProviderError(String), + + #[error("Contract error: {0}")] + ContractError(String), + + #[error("Other error: {0}")] + Other(String), +} + +impl From for VerificationError { + fn from(e: reqwest::Error) -> Self { + VerificationError::FetchError(e.to_string()) + } +} + +impl From for VerificationError { + fn from(e: ethers::providers::ProviderError) -> Self { + VerificationError::ProviderError(e.to_string()) + } +} + +impl From for VerificationError { + fn from(e: ethabi::Error) -> Self { + VerificationError::Other(e.to_string()) + } +} + +impl From for VerificationError { + fn from(e: ethers::contract::AbiError) -> Self { + VerificationError::AbiError(e) + } +} diff --git a/via_verifier/lib/via_verification/src/l1_data_fetcher.rs b/via_verifier/lib/via_verification/src/l1_data_fetcher.rs new file mode 100644 index 000000000..112f8d108 --- /dev/null +++ b/via_verifier/lib/via_verification/src/l1_data_fetcher.rs @@ -0,0 +1,21 @@ +use async_trait::async_trait; +use primitive_types::H256; + +use crate::{errors::VerificationError, proof::ViaZKProof}; + +/// Trait for fetching data from L1 necessary for verification. +#[async_trait] +pub trait L1DataFetcher { + /// Fetches the verification key hash from L1 for a given block number. + async fn get_verification_key_hash(&self, block_number: u64) + -> Result; + + /// Fetches the protocol version for a given batch number. + async fn get_protocol_version(&self, batch_number: u64) -> Result; + + /// Fetches proof data from L1 for a given batch number. + async fn get_proof_from_l1( + &self, + batch_number: u64, + ) -> Result<(ViaZKProof, u64), VerificationError>; +} diff --git a/via_verifier/lib/via_verification/src/lib.rs b/via_verifier/lib/via_verification/src/lib.rs new file mode 100644 index 000000000..02c3af456 --- /dev/null +++ b/via_verifier/lib/via_verification/src/lib.rs @@ -0,0 +1,8 @@ +pub mod crypto; +pub mod errors; +pub mod l1_data_fetcher; +pub mod proof; +pub mod public_inputs; +pub mod types; +pub mod utils; +pub mod verification; diff --git a/via_verifier/lib/via_verification/src/proof.rs b/via_verifier/lib/via_verification/src/proof.rs new file mode 100644 index 000000000..8938deaef --- /dev/null +++ b/via_verifier/lib/via_verification/src/proof.rs @@ -0,0 +1,60 @@ +use circuit_definitions::snark_wrapper::franklin_crypto::bellman::plonk::{ + better_better_cs::{setup::VerificationKey, verifier::verify}, + commitments::transcript::keccak_transcript::RollingKeccakTranscript, +}; +// Re-export the necessary types from the `circuit_definitions` crate. +pub use circuit_definitions::{ + circuit_definitions::aux_layer::ZkSyncSnarkWrapperCircuit, + snark_wrapper::franklin_crypto::bellman::{ + bn256::Bn256, plonk::better_better_cs::proof::Proof as ZkSyncProof, + }, +}; + +use crate::{errors::VerificationError, types::Fr}; + +/// Trait for a proof that can be verified. +pub trait ProofTrait { + /// Verifies the proof with the given verification key and public inputs. + fn verify( + &self, + verification_key: VerificationKey, + ) -> Result; + + /// Returns the public inputs of the proof. + fn get_public_inputs(&self) -> &[Fr]; +} + +/// A struct representing an L1 batch proof. +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct ViaZKProof { + pub proof: ZkSyncProof, +} + +impl ProofTrait for ViaZKProof { + fn verify( + &self, + vk: VerificationKey, + ) -> Result { + // Ensure the proof's 'n' matches the verification key's 'n'. + let mut scheduler_proof = self.proof.clone(); + scheduler_proof.n = vk.n; + + tracing::debug!("Verifying proof with n = {}", scheduler_proof.n); + + // Verify the proof + verify::<_, _, RollingKeccakTranscript<_>>(&vk, &scheduler_proof, None) + .map_err(|_| VerificationError::ProofVerificationFailed) + } + + fn get_public_inputs(&self) -> &[Fr] { + &self.proof.inputs + } +} + +impl Default for ViaZKProof { + fn default() -> Self { + Self { + proof: ZkSyncProof::empty(), + } + } +} diff --git a/via_verifier/lib/via_verification/src/public_inputs.rs b/via_verifier/lib/via_verification/src/public_inputs.rs new file mode 100644 index 000000000..11ad541e3 --- /dev/null +++ b/via_verifier/lib/via_verification/src/public_inputs.rs @@ -0,0 +1,28 @@ +use circuit_definitions::snark_wrapper::franklin_crypto::bellman::{ + pairing::bn256::Fr, PrimeField, +}; +use ethers::types::U256; +use primitive_types::H256; +use sha3::{Digest, Keccak256}; + +use crate::utils::to_fixed_bytes; + +/// Computes the public inputs for a given batch commiements. +/// Public inputs require us to fetch multiple data from L1 (like state hash etc). +pub fn generate_inputs(prev_batch_commitment: &H256, curr_batch_commitment: &H256) -> Vec { + // Prepare the input fields + let input_fields = [ + prev_batch_commitment.to_fixed_bytes(), + curr_batch_commitment.to_fixed_bytes(), + ]; + let encoded_input_params = input_fields.into_iter().flatten().collect::>(); + + // Compute the Keccak256 hash of the input parameters + let input_keccak_hash = to_fixed_bytes(&Keccak256::digest(&encoded_input_params)); + let input_u256 = U256::from_big_endian(&input_keccak_hash); + + // Shift the input as per the protocol's requirement + let shifted_input = input_u256 >> 32; + + vec![Fr::from_str(&shifted_input.to_string()).unwrap()] +} diff --git a/via_verifier/lib/via_verification/src/types.rs b/via_verifier/lib/via_verification/src/types.rs new file mode 100644 index 000000000..196e09c24 --- /dev/null +++ b/via_verifier/lib/via_verification/src/types.rs @@ -0,0 +1,19 @@ +pub use circuit_definitions::snark_wrapper::franklin_crypto::bellman::bn256::Fr; +use ethers::types::H256; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Default, Clone, Serialize, Deserialize)] +pub struct BatchL1Data { + pub previous_enumeration_counter: u64, + pub previous_root: Vec, + // Enumeration counter (used for L2 -> L1 communication). + pub new_enumeration_counter: u64, + // Storage root. + pub new_root: Vec, + // Hash of the account abstraction code. + pub default_aa_hash: [u8; 32], + // Hash of the bootloader.yul code. + pub bootloader_hash: [u8; 32], + pub prev_batch_commitment: H256, + pub curr_batch_commitment: H256, +} diff --git a/via_verifier/lib/via_verification/src/utils.rs b/via_verifier/lib/via_verification/src/utils.rs new file mode 100644 index 000000000..cc1347346 --- /dev/null +++ b/via_verifier/lib/via_verification/src/utils.rs @@ -0,0 +1,110 @@ +use std::{env, fs, path::PathBuf}; + +use circuit_definitions::{ + boojum::pairing::bn256::Bn256, circuit_definitions::aux_layer::ZkSyncSnarkWrapperCircuit, + snark_wrapper::franklin_crypto::bellman::plonk::better_better_cs::cs::VerificationKey, +}; +use tracing::debug; + +use crate::{ + crypto::calculate_verification_key_hash, errors::VerificationError, + l1_data_fetcher::L1DataFetcher, +}; + +/// Load the verification key for a given batch number. +pub async fn load_verification_key( + l1_data_fetcher: &F, + batch_number: u64, + l1_block_number: u64, +) -> Result, VerificationError> { + let protocol_version = l1_data_fetcher.get_protocol_version(batch_number).await?; + + let file_path = format!( + "keys/protocol_version/{}/scheduler_key.json", + protocol_version + ); + let base_dir = + env::var("CARGO_MANIFEST_DIR").map_err(|e| VerificationError::Other(e.to_string()))?; + let base_path = PathBuf::from(base_dir); + let file = base_path.join(&file_path); + + // Load the verification key from the specified file. + let verification_key_content = fs::read_to_string(file).map_err(|e| { + VerificationError::Other(format!( + "Failed to read verification key from {}: {}", + file_path, e + )) + })?; + let vk_inner: VerificationKey = + serde_json::from_str(&verification_key_content).map_err(|e| { + VerificationError::Other(format!("Failed to deserialize verification key: {}", e)) + })?; + + // Get the verification key hash from L1. + let vk_hash_from_l1 = l1_data_fetcher + .get_verification_key_hash(l1_block_number) + .await?; + + // Calculate the verification key hash from the verification key. + let computed_vk_hash = calculate_verification_key_hash(&vk_inner); + + // Check that the verification key hash from L1 matches the computed hash. + debug!("Verification Key Hash Check:"); + debug!( + " Verification Key Hash from L1: 0x{}", + hex::encode(vk_hash_from_l1) + ); + debug!( + " Computed Verification Key Hash: 0x{}", + hex::encode(computed_vk_hash) + ); + + (computed_vk_hash == vk_hash_from_l1) + .then_some(vk_inner) + .ok_or(VerificationError::VerificationKeyHashMismatch) +} + +/// Load the verification key for a given batch number. +pub async fn load_verification_key_without_l1_check( + protocol_version: String, +) -> Result, VerificationError> { + let key_path = match env::var("VIA_VK_KEY_PATH") { + Ok(path) => { + let file_path = format!("protocol_version/{}/scheduler_key.json", protocol_version); + let base_path = PathBuf::from(path); + base_path.join(&file_path) + } + Err(_) => { + // from VIA_HOME + let base_dir = + env::var("VIA_HOME").map_err(|e| VerificationError::Other(e.to_string()))?; + let base_path = PathBuf::from(base_dir); + let file_path = format!( + "via_verifier/lib/via_verification/keys/protocol_version/{}/scheduler_key.json", + protocol_version + ); + base_path.join(&file_path) + } + }; + + // Load the verification key from the specified file. + let verification_key_content = fs::read_to_string(key_path.clone()).map_err(|e| { + VerificationError::Other(format!( + "Failed to read verification key from {:?}: {}", + key_path, e + )) + })?; + let vk_inner: VerificationKey = + serde_json::from_str(&verification_key_content).map_err(|e| { + VerificationError::Other(format!("Failed to deserialize verification key: {}", e)) + })?; + + Ok(vk_inner) +} + +pub(crate) fn to_fixed_bytes(ins: &[u8]) -> [u8; 32] { + let mut result = [0u8; 32]; + result.copy_from_slice(ins); + + result +} diff --git a/via_verifier/lib/via_verification/src/verification.rs b/via_verifier/lib/via_verification/src/verification.rs new file mode 100644 index 000000000..28b2e8971 --- /dev/null +++ b/via_verifier/lib/via_verification/src/verification.rs @@ -0,0 +1,29 @@ +use crate::{ + errors::VerificationError, l1_data_fetcher::L1DataFetcher, proof::ProofTrait, types::Fr, + utils::load_verification_key, +}; + +/// Verifies a SNARK proof with a given verification key, checking the verification key hash if provided. +/// Returns the public input, auxiliary witness, and computed VK hash on success. +pub async fn verify_snark( + l1_data_fetcher: &F, + proof: P, + batch_number: u64, + l1_block_number: u64, +) -> Result { + let vk_inner = load_verification_key(l1_data_fetcher, batch_number, l1_block_number).await?; + + // Verify the proof. + if !proof.verify(vk_inner)? { + return Err(VerificationError::ProofVerificationFailed); + } + + // Extract the public input from the proof. + let public_inputs = proof.get_public_inputs(); + let public_input = public_inputs + .first() + .cloned() + .ok_or_else(|| VerificationError::Other("No public inputs found in proof".to_string()))?; + + Ok(public_input) +} diff --git a/via_verifier/lib/via_withdrawal_client/Cargo.toml b/via_verifier/lib/via_withdrawal_client/Cargo.toml new file mode 100644 index 000000000..8325e43d7 --- /dev/null +++ b/via_verifier/lib/via_withdrawal_client/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "via_withdrawal_client" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +hex.workspace = true +anyhow.workspace = true +zksync_basic_types.workspace = true +zksync_da_client.workspace = true +zksync_config.workspace = true +zksync_types.workspace = true +zksync_utils.workspace = true +tokio.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +via_btc_client.workspace = true +via_da_client.workspace = true + +bitcoin = { version = "0.32.2", features = ["serde"] } + +[dev-dependencies] +zksync_dal.workspace = true +dotenv = "0.15" +via_da_clients.workspace = true + +[[example]] +name = "withdraw" +path = "examples/withdraw.rs" diff --git a/via_verifier/lib/via_withdrawal_client/examples/withdraw.rs b/via_verifier/lib/via_withdrawal_client/examples/withdraw.rs new file mode 100644 index 000000000..40bf79c90 --- /dev/null +++ b/via_verifier/lib/via_withdrawal_client/examples/withdraw.rs @@ -0,0 +1,70 @@ +use std::{env, str::FromStr}; + +use anyhow::{Context, Result}; +use tracing::info; +use via_da_clients::celestia::client::CelestiaClient; +use via_withdrawal_client::client::WithdrawalClient; +use zksync_config::ViaCelestiaConfig; +use zksync_da_client::DataAvailabilityClient; +use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_types::{url::SensitiveUrl, L1BatchNumber}; + +const DEFAULT_DATABASE_URL: &str = "postgres://postgres:notsecurepassword@0.0.0.0:5432/via"; +const DEFAULT_CELESTIA: &str = "http://0.0.0.0:26658"; + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::INFO) + .init(); + let home = env::var("VIA_HOME").context("VIA HOME not set")?; + let _ = dotenv::from_path(home.clone() + "/etc/env/target/via.env"); + + let celestia_auth_token = env::var("VIA_CELESTIA_CLIENT_AUTH_TOKEN")?; + + let args: Vec = env::args().collect(); + let block_number = args[1].parse::().unwrap(); + info!("Fetch withdrawals in block {}", block_number); + + // Connect to db + let url = SensitiveUrl::from_str(DEFAULT_DATABASE_URL).unwrap(); + let connection_pool = ConnectionPool::::builder(url, 100) + .build() + .await + .unwrap(); + let l1_batch_number = L1BatchNumber::from(block_number); + let mut storage = connection_pool.connection().await.unwrap(); + + let header_res = storage + .via_data_availability_dal() + .get_da_blob(l1_batch_number) + .await + .unwrap(); + if header_res.is_none() { + info!("DA for block not exists yet"); + return Ok(()); + } + + let header = header_res.unwrap(); + + let da_config = ViaCelestiaConfig { + api_node_url: String::from(DEFAULT_CELESTIA), + auth_token: celestia_auth_token, + blob_size_limit: 1973786, + }; + + // Connect to withdrawl client + let client = CelestiaClient::new(da_config).await?; + let da_client: Box = Box::new(client); + let withdrawal_client = WithdrawalClient::new(da_client, bitcoin::Network::Regtest); + + let withdrawals = withdrawal_client + .get_withdrawals(header.blob_id.as_str()) + .await?; + + info!("--------------------------------------------------------"); + info!("Withdrawals {:?}", withdrawals); + info!("--------------------------------------------------------"); + + Ok(()) +} diff --git a/via_verifier/lib/via_withdrawal_client/src/client.rs b/via_verifier/lib/via_withdrawal_client/src/client.rs new file mode 100644 index 000000000..cd052f083 --- /dev/null +++ b/via_verifier/lib/via_withdrawal_client/src/client.rs @@ -0,0 +1,149 @@ +use std::{collections::HashMap, str::FromStr}; + +use bitcoin::Network; +use via_btc_client::withdrawal_builder::WithdrawalRequest; +use via_da_client::{ + pubdata::Pubdata, + types::{L2BridgeLogMetadata, L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR}, +}; +use zksync_da_client::DataAvailabilityClient; +use zksync_types::{web3::keccak256, H160, H256}; + +use crate::withdraw::parse_l2_withdrawal_message; + +#[derive(Debug)] +pub struct WithdrawalClient { + network: Network, + client: Box, +} + +impl WithdrawalClient { + pub fn new(client: Box, network: Network) -> Self { + Self { client, network } + } + + pub async fn get_withdrawals(&self, blob_id: &str) -> anyhow::Result> { + let pubdata_bytes = self.fetch_pubdata(blob_id).await?; + let pubdata = Pubdata::decode_pubdata(pubdata_bytes)?; + let l2_bridge_metadata = WithdrawalClient::list_l2_bridge_metadata(&pubdata); + let withdrawals = WithdrawalClient::get_valid_withdrawals(self.network, l2_bridge_metadata); + Ok(withdrawals) + } + + async fn fetch_pubdata(&self, blob_id: &str) -> anyhow::Result> { + let response = self.client.get_inclusion_data(blob_id).await?; + if let Some(inclusion_data) = response { + return Ok(inclusion_data.data); + }; + Ok(Vec::new()) + } + + fn l2_to_l1_messages_hashmap(pubdata: &Pubdata) -> HashMap> { + let mut hashes: HashMap> = HashMap::new(); + for message in &pubdata.l2_to_l1_messages { + let hash = H256::from(keccak256(message)); + hashes.insert(hash, message.clone()); + } + hashes + } + + fn list_l2_bridge_metadata(pubdata: &Pubdata) -> Vec { + let mut withdrawals: Vec = Vec::new(); + let l2_bridges_hash = + H256::from(H160::from_str(L2_BASE_TOKEN_SYSTEM_CONTRACT_ADDR).unwrap()); + let l2_to_l1_messages_hashmap = WithdrawalClient::l2_to_l1_messages_hashmap(pubdata); + for log in pubdata.user_logs.clone() { + // Ignore the logs if not from emitted from the L2 bridge contract + if log.key != l2_bridges_hash { + continue; + }; + + withdrawals.push(L2BridgeLogMetadata { + message: l2_to_l1_messages_hashmap[&log.value].clone(), + log, + }); + } + withdrawals + } + + fn get_valid_withdrawals( + network: Network, + l2_bridge_logs_metadata: Vec, + ) -> Vec { + let mut withdrawal_requests: Vec = Vec::new(); + for l2_bridge_log_metadata in l2_bridge_logs_metadata { + let withdrawal_request = + parse_l2_withdrawal_message(l2_bridge_log_metadata.message, network); + + if let Ok(req) = withdrawal_request { + withdrawal_requests.push(req) + } + } + withdrawal_requests + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use bitcoin::{Address, Amount}; + + use super::*; + + #[test] + fn test_l2_to_l1_messages_hashmap() { + let input = "00000001000100000000000000000000000000000000000000008008000000000000000000000000000000000000000000000000000000000000800aa1fd131a17718668a78581197d19972abd907b7b343b9694e02246d18c3801c500000001000000506c0960f962637274317178326c6b30756e756b6d3830716d65706a703439687766397a36786e7a307337336b396a35360000000000000000000000000000000000000000000000000000000005f5e10000000000010001280400032c1818e4770f08c05b28829d7d5f9d401d492c7432c166dfecf4af04238ea323009d7042e8fb0f249338d18505e5ba1d4a546e9d21f47c847ca725ff53ac29f740ca1bbc31cc849a8092a36f9a321e17412dee200b956038af1c2dc83430a0e8b000d3e2c6760d91078e517a2cb882cd3c9551de3ab5f30d554d51b17e3744cf92b0cf368ce957aed709b985423cd3ba11615de01ecafa15eb9a11bc6cdef4f6327900436ef22b96a07224eb06f0eecfecc184033da7db2a5fb58f867f17298b896b55000000420901000000362205f5e1000000003721032b8b14000000382209216c140000003a8901000000000000000000000000000000170000003b8902000000000000000000000000000000170000003e890200000000000000000000000000000017"; + let encoded_pubdata = hex::decode(input).unwrap(); + let pubdata = Pubdata::decode_pubdata(encoded_pubdata).unwrap(); + + let hashes = WithdrawalClient::l2_to_l1_messages_hashmap(&pubdata); + let hash = pubdata.user_logs[0].value; + assert_eq!(hashes[&hash], pubdata.l2_to_l1_messages[0]); + } + + #[test] + fn test_list_l2_bridge_metadata() { + let input = "00000001000100000000000000000000000000000000000000008008000000000000000000000000000000000000000000000000000000000000800aa1fd131a17718668a78581197d19972abd907b7b343b9694e02246d18c3801c500000001000000506c0960f962637274317178326c6b30756e756b6d3830716d65706a703439687766397a36786e7a307337336b396a35360000000000000000000000000000000000000000000000000000000005f5e10000000000010001280400032c1818e4770f08c05b28829d7d5f9d401d492c7432c166dfecf4af04238ea323009d7042e8fb0f249338d18505e5ba1d4a546e9d21f47c847ca725ff53ac29f740ca1bbc31cc849a8092a36f9a321e17412dee200b956038af1c2dc83430a0e8b000d3e2c6760d91078e517a2cb882cd3c9551de3ab5f30d554d51b17e3744cf92b0cf368ce957aed709b985423cd3ba11615de01ecafa15eb9a11bc6cdef4f6327900436ef22b96a07224eb06f0eecfecc184033da7db2a5fb58f867f17298b896b55000000420901000000362205f5e1000000003721032b8b14000000382209216c140000003a8901000000000000000000000000000000170000003b8902000000000000000000000000000000170000003e890200000000000000000000000000000017"; + let encoded_pubdata = hex::decode(input).unwrap(); + let pubdata = Pubdata::decode_pubdata(encoded_pubdata).unwrap(); + + let hashes = WithdrawalClient::l2_to_l1_messages_hashmap(&pubdata); + let hash = pubdata.user_logs[0].value; + assert_eq!(hashes[&hash], pubdata.l2_to_l1_messages[0]); + + let l2_bridge_logs_metadata = WithdrawalClient::list_l2_bridge_metadata(&pubdata); + assert_eq!(l2_bridge_logs_metadata.len(), 1); + assert_eq!( + l2_bridge_logs_metadata[0].message, + pubdata.clone().l2_to_l1_messages[0] + ); + assert_eq!( + l2_bridge_logs_metadata[0].log.value, + pubdata.user_logs[0].value + ); + } + + #[test] + fn test_get_valid_withdrawals() { + let input = "00000001000100000000000000000000000000000000000000008008000000000000000000000000000000000000000000000000000000000000800aa1fd131a17718668a78581197d19972abd907b7b343b9694e02246d18c3801c500000001000000506c0960f962637274317178326c6b30756e756b6d3830716d65706a703439687766397a36786e7a307337336b396a35360000000000000000000000000000000000000000000000000000000005f5e10000000000010001280400032c1818e4770f08c05b28829d7d5f9d401d492c7432c166dfecf4af04238ea323009d7042e8fb0f249338d18505e5ba1d4a546e9d21f47c847ca725ff53ac29f740ca1bbc31cc849a8092a36f9a321e17412dee200b956038af1c2dc83430a0e8b000d3e2c6760d91078e517a2cb882cd3c9551de3ab5f30d554d51b17e3744cf92b0cf368ce957aed709b985423cd3ba11615de01ecafa15eb9a11bc6cdef4f6327900436ef22b96a07224eb06f0eecfecc184033da7db2a5fb58f867f17298b896b55000000420901000000362205f5e1000000003721032b8b14000000382209216c140000003a8901000000000000000000000000000000170000003b8902000000000000000000000000000000170000003e890200000000000000000000000000000017"; + let encoded_pubdata = hex::decode(input).unwrap(); + let pubdata = Pubdata::decode_pubdata(encoded_pubdata).unwrap(); + + let hashes = WithdrawalClient::l2_to_l1_messages_hashmap(&pubdata); + let hash = pubdata.user_logs[0].value; + assert_eq!(hashes[&hash], pubdata.l2_to_l1_messages[0]); + + let l2_bridge_logs_metadata = WithdrawalClient::list_l2_bridge_metadata(&pubdata); + let withdrawals = + WithdrawalClient::get_valid_withdrawals(Network::Regtest, l2_bridge_logs_metadata); + let expected_user_address = + Address::from_str("bcrt1qx2lk0unukm80qmepjp49hwf9z6xnz0s73k9j56") + .unwrap() + .assume_checked(); + assert_eq!(withdrawals.len(), 1); + assert_eq!(&withdrawals[0].address, &expected_user_address); + let expected_amount = Amount::from_sat(100000000); + assert_eq!(&withdrawals[0].amount, &expected_amount); + } +} diff --git a/via_verifier/lib/via_withdrawal_client/src/lib.rs b/via_verifier/lib/via_withdrawal_client/src/lib.rs new file mode 100644 index 000000000..6b131403c --- /dev/null +++ b/via_verifier/lib/via_withdrawal_client/src/lib.rs @@ -0,0 +1,2 @@ +pub mod client; +mod withdraw; diff --git a/via_verifier/lib/via_withdrawal_client/src/withdraw.rs b/via_verifier/lib/via_withdrawal_client/src/withdraw.rs new file mode 100644 index 000000000..92cf2f5fc --- /dev/null +++ b/via_verifier/lib/via_withdrawal_client/src/withdraw.rs @@ -0,0 +1,83 @@ +use std::str::FromStr; + +use anyhow::Context; +use bitcoin::{Address as BitcoinAddress, Amount, Network}; +use via_btc_client::withdrawal_builder::WithdrawalRequest; +use via_da_client::types::WITHDRAW_FUNC_SIG; +use zksync_basic_types::{web3::keccak256, U256}; + +pub fn parse_l2_withdrawal_message( + l2_to_l1_message: Vec, + network: Network, +) -> anyhow::Result { + // We check that the message is long enough to read the data. + // Please note that there are two versions of the message: + // The message that is sent by `withdraw(address _l1Receiver)` + // It should be equal to the length of the bytes4 function signature + bytes l1Receiver + uint256 amount = 4 + X + 32. + let message_len = l2_to_l1_message.len(); + let address_size = message_len - 36; + if message_len <= 36 { + return Err(anyhow::format_err!("Invalid message length.")); + } + + let func_selector_bytes = &l2_to_l1_message[0..4]; + if func_selector_bytes != _get_withdraw_function_selector() { + return Err(anyhow::format_err!("Invalid message function selector.")); + } + + // The address bytes represent the l1 receiver + let address_bytes = &l2_to_l1_message[4..4 + address_size]; + let address_str = + String::from_utf8(address_bytes.to_vec()).context("Parse address to string")?; + let address = BitcoinAddress::from_str(&address_str) + .context("parse bitcoin address")? + .require_network(network)?; + + // The last 32 bytes represent the amount (uint256) + let amount_bytes = &l2_to_l1_message[address_size + 4..]; + let amount = Amount::from_sat(U256::from_big_endian(amount_bytes).as_u64()); + + Ok(WithdrawalRequest { address, amount }) +} + +/// Get the withdrawal function selector. +fn _get_withdraw_function_selector() -> Vec { + let hash = keccak256(WITHDRAW_FUNC_SIG.as_bytes()); + hash[0..4].to_vec() +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use super::*; + + #[test] + fn test_parse_l2_withdrawal_message_when_address_bech32m() { + // Example transaction: https://etherscan.io/tx/0x70afe07734e9b0c2d8393ab2a51fda5ac2cfccc80a01cc4a5cf587eaea3c4610 + let l2_to_l1_message = hex::decode("6c0960f96263317179383267617732687466643573736c706c70676d7a346b74663979336b37706163323232366b30776c6a6c6d7733617466773571776d346176340000000000000000000000000000000000000000000000000de0b6b3a7640000").unwrap(); + let expected_receiver = BitcoinAddress::from_str( + "bc1qy82gaw2htfd5sslplpgmz4ktf9y3k7pac2226k0wljlmw3atfw5qwm4av4", + ) + .unwrap() + .assume_checked(); + let expected_amount = Amount::from_sat(1000000000000000000); + let res = parse_l2_withdrawal_message(l2_to_l1_message, Network::Bitcoin).unwrap(); + + assert_eq!(res.address, expected_receiver); + assert_eq!(res.amount, expected_amount); + } + + #[test] + fn test_parse_l2_withdrawal_message_when_address_p2pkh() { + let l2_to_l1_message = hex::decode("6c0960f93141317a5031655035514765666932444d505466544c35534c6d7637446976664e610000000000000000000000000000000000000000000000000de0b6b3a7640000").unwrap(); + let expected_receiver = BitcoinAddress::from_str("1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa") + .unwrap() + .assume_checked(); + let expected_amount = Amount::from_sat(1000000000000000000); + let res = parse_l2_withdrawal_message(l2_to_l1_message, Network::Bitcoin).unwrap(); + + assert_eq!(res.address, expected_receiver); + assert_eq!(res.amount, expected_amount); + } +} diff --git a/via_verifier/node/via_btc_sender/Cargo.toml b/via_verifier/node/via_btc_sender/Cargo.toml new file mode 100644 index 000000000..258df7a31 --- /dev/null +++ b/via_verifier/node/via_btc_sender/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "via_verifier_btc_sender" +description = "VIA Verifier Bitcoin Sender" +version.workspace = true +edition.workspace = true +authors = [ "VIA Protocol Team"] +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +via_btc_client.workspace = true +via_da_dispatcher.workspace = true +via_verifier_dal.workspace = true +zksync_config.workspace = true +zksync_object_store.workspace = true +zksync_l1_contract_interface.workspace = true +zksync_types.workspace = true +zksync_contracts.workspace = true +bitcoin = { version = "0.32.2", features = ["serde"] } +bincode = "1.3" + +tokio.workspace = true +anyhow.workspace = true +thiserror.workspace = true +async-trait.workspace = true +tracing.workspace = true +chrono.workspace = true +hex = "0.4" + +[dev-dependencies] +zksync_node_test_utils.workspace = true diff --git a/via_verifier/node/via_btc_sender/src/btc_inscription_manager.rs b/via_verifier/node/via_btc_sender/src/btc_inscription_manager.rs new file mode 100644 index 000000000..ddbffaec0 --- /dev/null +++ b/via_verifier/node/via_btc_sender/src/btc_inscription_manager.rs @@ -0,0 +1,214 @@ +use anyhow::{Context, Result}; +use bincode::serialize; +use tokio::sync::watch; +use via_btc_client::{inscriber::Inscriber, traits::Serializable, types::InscriptionMessage}; +use via_verifier_dal::{Connection, ConnectionPool, Verifier, VerifierDal}; +use zksync_config::ViaBtcSenderConfig; +use zksync_types::btc_sender::ViaBtcInscriptionRequest; + +use crate::config::BLOCK_RESEND; + +#[derive(Debug)] +pub struct ViaBtcInscriptionManager { + inscriber: Inscriber, + config: ViaBtcSenderConfig, + pool: ConnectionPool, +} + +impl ViaBtcInscriptionManager { + pub async fn new( + inscriber: Inscriber, + pool: ConnectionPool, + config: ViaBtcSenderConfig, + ) -> anyhow::Result { + Ok(Self { + inscriber, + config, + pool, + }) + } + + pub async fn run(mut self, mut stop_receiver: watch::Receiver) -> anyhow::Result<()> { + let mut timer = tokio::time::interval(self.config.poll_interval()); + let pool = self.pool.clone(); + + while !*stop_receiver.borrow_and_update() { + tokio::select! { + _ = timer.tick() => { /* continue iterations */ } + _ = stop_receiver.changed() => break, + } + + let mut storage = pool.connection_tagged("via_btc_sender").await?; + + match self.loop_iteration(&mut storage).await { + Ok(()) => { + tracing::info!("Inscription manager task finished"); + } + Err(err) => { + tracing::error!("Failed to process btc_sender_inscription_manager: {err}"); + } + } + } + + tracing::info!("Stop signal received, btc_sender is shutting down"); + Ok(()) + } + + async fn loop_iteration( + &mut self, + storage: &mut Connection<'_, Verifier>, + ) -> Result<(), anyhow::Error> { + self.update_inscription_status_or_resend(storage).await?; + self.send_new_inscription_txs(storage).await?; + Ok(()) + } + + async fn update_inscription_status_or_resend( + &mut self, + storage: &mut Connection<'_, Verifier>, + ) -> anyhow::Result<()> { + let inflight_inscriptions = storage + .via_btc_sender_dal() + .get_inflight_inscriptions() + .await?; + + for inscription in inflight_inscriptions { + if let Some(last_inscription_history) = storage + .via_btc_sender_dal() + .get_last_inscription_request_history(inscription.id) + .await? + { + let is_confirmed = self + .inscriber + .get_client() + .await + .check_tx_confirmation( + &last_inscription_history.reveal_tx_id, + self.config.block_confirmations(), + ) + .await?; + + if is_confirmed { + storage + .via_btc_sender_dal() + .confirm_inscription(inscription.id, last_inscription_history.id) + .await?; + tracing::info!( + "Inscription confirmed {reveal_tx}", + reveal_tx = last_inscription_history.reveal_tx_id, + ); + } else { + let current_block = self + .inscriber + .get_client() + .await + .fetch_block_height() + .await + .context("context")?; + + if last_inscription_history.sent_at_block + BLOCK_RESEND as i64 + > current_block as i64 + { + continue; + } + + tracing::warn!( + "Inscription {reveal_tx} stuck for more than {BLOCK_RESEND} block.", + reveal_tx = last_inscription_history.reveal_tx_id + ); + } + } + } + + Ok(()) + } + + async fn send_new_inscription_txs( + &mut self, + storage: &mut Connection<'_, Verifier>, + ) -> anyhow::Result<()> { + let number_inflight_txs = storage + .via_btc_sender_dal() + .get_inflight_inscriptions() + .await? + .len(); + + tracing::debug!( + "Inflight inscriptions: {count}", + count = number_inflight_txs + ); + + let number_of_available_slots_for_inscription_txs = self + .config + .max_txs_in_flight() + .saturating_sub(number_inflight_txs as i64); + + tracing::debug!( + "Available slots to process inscriptions: {count}", + count = number_of_available_slots_for_inscription_txs + ); + + if number_of_available_slots_for_inscription_txs > 0 { + let list_new_inscription_request = storage + .via_btc_sender_dal() + .list_new_inscription_request(number_of_available_slots_for_inscription_txs) + .await?; + + for inscription in list_new_inscription_request { + self.send_inscription_tx(storage, &inscription).await?; + } + } + Ok(()) + } + + pub(crate) async fn send_inscription_tx( + &mut self, + storage: &mut Connection<'_, Verifier>, + tx: &ViaBtcInscriptionRequest, + ) -> anyhow::Result<()> { + let sent_at_block = self + .inscriber + .get_client() + .await + .fetch_block_height() + .await + .context("Error to fetch current block number")? as i64; + + let input = + InscriptionMessage::from_bytes(&tx.inscription_message.clone().unwrap_or_default()); + + let inscribe_info = self + .inscriber + .inscribe(input) + .await + .context("Sent inscription tx")?; + + let signed_commit_tx = + serialize(&inscribe_info.final_commit_tx.tx).context("Serilize the commit tx")?; + let signed_reveal_tx = + serialize(&inscribe_info.final_reveal_tx.tx).context("Serilize the reveal tx")?; + + let actual_fees = inscribe_info.reveal_tx_output_info._reveal_fee + + inscribe_info.commit_tx_output_info.commit_tx_fee; + + tracing::info!( + "New inscription created {commit_tx} {reveal_tx}", + commit_tx = inscribe_info.final_commit_tx.txid, + reveal_tx = inscribe_info.final_reveal_tx.txid, + ); + + storage + .via_btc_sender_dal() + .insert_inscription_request_history( + inscribe_info.final_commit_tx.txid.to_string(), + inscribe_info.final_reveal_tx.txid.to_string(), + tx.id, + signed_commit_tx, + signed_reveal_tx, + actual_fees.to_sat() as i64, + sent_at_block, + ) + .await?; + Ok(()) + } +} diff --git a/via_verifier/node/via_btc_sender/src/btc_vote_inscription.rs b/via_verifier/node/via_btc_sender/src/btc_vote_inscription.rs new file mode 100644 index 000000000..bbb315d59 --- /dev/null +++ b/via_verifier/node/via_btc_sender/src/btc_vote_inscription.rs @@ -0,0 +1,148 @@ +use anyhow::Context; +use bitcoin::{hashes::Hash, Txid}; +use tokio::sync::watch; +use via_btc_client::{ + traits::Serializable, + types::{InscriptionMessage, ValidatorAttestationInput, Vote}, +}; +use via_verifier_dal::{Connection, ConnectionPool, Verifier, VerifierDal}; +use zksync_config::ViaBtcSenderConfig; +use zksync_types::{ + via_verifier_btc_inscription_operations::ViaVerifierBtcInscriptionRequestType, L1BatchNumber, +}; + +#[derive(Debug)] +pub struct ViaVoteInscription { + pool: ConnectionPool, + config: ViaBtcSenderConfig, +} + +impl ViaVoteInscription { + pub async fn new( + pool: ConnectionPool, + config: ViaBtcSenderConfig, + ) -> anyhow::Result { + Ok(Self { pool, config }) + } + + pub async fn run(mut self, mut stop_receiver: watch::Receiver) -> anyhow::Result<()> { + let mut timer = tokio::time::interval(self.config.poll_interval()); + let pool = self.pool.clone(); + + while !*stop_receiver.borrow_and_update() { + tokio::select! { + _ = timer.tick() => { /* continue iterations */ } + _ = stop_receiver.changed() => break, + } + + let mut storage = pool + .connection_tagged("via_btc_inscription_creator") + .await?; + + match self.loop_iteration(&mut storage).await { + Ok(()) => { + tracing::info!("Verifier vote inscription task finished"); + } + Err(err) => { + tracing::error!("Failed to process Verifier btc_sender_inscription: {err}"); + } + } + } + + tracing::info!("Stop signal received, Verifier btc_sender_inscription is shutting down"); + Ok(()) + } + + pub async fn loop_iteration( + &mut self, + storage: &mut Connection<'_, Verifier>, + ) -> anyhow::Result<()> { + if let Some((l1_batch_number, vote, tx_id)) = self.get_voting_operation(storage).await? { + tracing::info!("New voting operation ready to be processed"); + let mut transaction = storage.start_transaction().await?; + let inscription_message = self.construct_voting_inscription_message(vote, tx_id)?; + + let inscription_request = transaction + .via_btc_sender_dal() + .via_save_btc_inscriptions_request( + ViaVerifierBtcInscriptionRequestType::VoteOnchain.to_string(), + InscriptionMessage::to_bytes(&inscription_message), + 0, + ) + .await + .context("Via save btc inscriptions request")?; + + transaction + .via_block_dal() + .insert_vote_l1_batch_inscription_request_id( + l1_batch_number, + inscription_request.id, + ViaVerifierBtcInscriptionRequestType::VoteOnchain, + ) + .await + .context("Via set inscription request id")?; + transaction.commit().await?; + } + Ok(()) + } + + pub async fn get_voting_operation( + &mut self, + storage: &mut Connection<'_, Verifier>, + ) -> anyhow::Result)>> { + if let Some(batch_number) = storage + .via_votes_dal() + .get_first_non_finalized_block() + .await? + { + // Check if already created a voting inscription + let exists = storage + .via_block_dal() + .check_vote_l1_batch_inscription_request_if_exists(batch_number) + .await?; + if exists { + return Ok(None); + } + + if let Some((vote, tx_id)) = storage + .via_votes_dal() + .get_verifier_vote_status(batch_number) + .await? + { + return Ok(Some(( + L1BatchNumber::from(batch_number as u32), + vote, + tx_id, + ))); + } + } + Ok(None) + } + + pub fn construct_voting_inscription_message( + &self, + vote: bool, + tx_id: Vec, + ) -> anyhow::Result { + let attestation = if vote { Vote::Ok } else { Vote::NotOk }; + + // Convert H256 bytes to Txid + let txid = Self::h256_to_txid(&tx_id)?; + + let input = ValidatorAttestationInput { + reference_txid: txid, + attestation, + }; + Ok(InscriptionMessage::ValidatorAttestation(input)) + } + + /// Converts H256 bytes (from the DB) to a Txid by reversing the byte order. + fn h256_to_txid(h256_bytes: &[u8]) -> anyhow::Result { + if h256_bytes.len() != 32 { + return Err(anyhow::anyhow!("H256 must be 32 bytes")); + } + let mut reversed_bytes = h256_bytes.to_vec(); + reversed_bytes.reverse(); + Txid::from_slice(&reversed_bytes).context("Failed to convert H256 to Txid") + } +} diff --git a/via_verifier/node/via_btc_sender/src/config.rs b/via_verifier/node/via_btc_sender/src/config.rs new file mode 100644 index 000000000..56ce5556d --- /dev/null +++ b/via_verifier/node/via_btc_sender/src/config.rs @@ -0,0 +1,2 @@ +// Number of blocks to wait before increasing the inscription fee. +pub const BLOCK_RESEND: u32 = 6; diff --git a/via_verifier/node/via_btc_sender/src/lib.rs b/via_verifier/node/via_btc_sender/src/lib.rs new file mode 100644 index 000000000..099f87cf9 --- /dev/null +++ b/via_verifier/node/via_btc_sender/src/lib.rs @@ -0,0 +1,5 @@ +pub mod btc_inscription_manager; +pub mod btc_vote_inscription; +mod config; +#[cfg(test)] +mod tests; diff --git a/via_verifier/node/via_btc_sender/src/tests/mod.rs b/via_verifier/node/via_btc_sender/src/tests/mod.rs new file mode 100644 index 000000000..bc4b47358 --- /dev/null +++ b/via_verifier/node/via_btc_sender/src/tests/mod.rs @@ -0,0 +1,3 @@ +#[cfg(test)] +mod utils; +mod vote_inscription_test; diff --git a/via_verifier/node/via_btc_sender/src/tests/utils.rs b/via_verifier/node/via_btc_sender/src/tests/utils.rs new file mode 100644 index 000000000..838e88a3b --- /dev/null +++ b/via_verifier/node/via_btc_sender/src/tests/utils.rs @@ -0,0 +1,37 @@ +use via_btc_client::inscriber::test_utils::{ + get_mock_inscriber_and_conditions, MockBitcoinOpsConfig, +}; +use via_verifier_dal::{ConnectionPool, Verifier}; +use zksync_config::{configs::via_btc_sender::ProofSendingMode, ViaBtcSenderConfig}; + +use crate::btc_inscription_manager::ViaBtcInscriptionManager; + +pub fn get_btc_sender_config( + max_aggregated_blocks_to_commit: i32, + max_aggregated_proofs_to_commit: i32, +) -> ViaBtcSenderConfig { + ViaBtcSenderConfig { + actor_role: "sender".to_string(), + network: "testnet".to_string(), + private_key: "0x0".to_string(), + rpc_password: "password".to_string(), + rpc_url: "password".to_string(), + rpc_user: "rpc".to_string(), + poll_interval: 5000, + da_identifier: "CELESTIA".to_string(), + max_aggregated_blocks_to_commit, + max_aggregated_proofs_to_commit, + max_txs_in_flight: 1, + proof_sending_mode: ProofSendingMode::SkipEveryProof, + block_confirmations: 0, + } +} + +pub async fn get_inscription_manager_mock( + pool: ConnectionPool, + config: ViaBtcSenderConfig, + mock_btc_ops_config: MockBitcoinOpsConfig, +) -> ViaBtcInscriptionManager { + let inscriber = get_mock_inscriber_and_conditions(mock_btc_ops_config); + Result::unwrap(ViaBtcInscriptionManager::new(inscriber, pool, config).await) +} diff --git a/via_verifier/node/via_btc_sender/src/tests/vote_inscription_test.rs b/via_verifier/node/via_btc_sender/src/tests/vote_inscription_test.rs new file mode 100644 index 000000000..f01abb668 --- /dev/null +++ b/via_verifier/node/via_btc_sender/src/tests/vote_inscription_test.rs @@ -0,0 +1,283 @@ +#[cfg(test)] +mod tests { + + use tokio::{sync::watch, time}; + use via_btc_client::{ + inscriber::test_utils::MockBitcoinOpsConfig, traits::Serializable, + types::InscriptionMessage, + }; + use via_verifier_dal::{Connection, ConnectionPool, Verifier, VerifierDal}; + use zksync_config::ViaBtcSenderConfig; + use zksync_types::{L1BatchNumber, H256}; + + use crate::{ + btc_vote_inscription::ViaVoteInscription, + tests::utils::{get_btc_sender_config, get_inscription_manager_mock}, + }; + + pub struct ViaVoteInscriptionTest { + pub aggregator: ViaVoteInscription, + pub storage: Connection<'static, Verifier>, + } + + impl ViaVoteInscriptionTest { + pub async fn new( + pool: ConnectionPool, + mut config: Option, + ) -> Self { + let storage = pool.connection().await.unwrap(); + + if config.is_none() { + config = Some(ViaBtcSenderConfig::for_tests()); + } + let aggregator = ViaVoteInscription::new(pool, config.unwrap()) + .await + .unwrap(); + + Self { + aggregator, + storage, + } + } + } + + // Get the current operation (commitBatch or commitProof) to execute when there is no batches. Should return 'None' + #[tokio::test] + async fn test_get_next_ready_vote_operation() { + let pool = ConnectionPool::::test_pool().await; + let mut aggregator_test = ViaVoteInscriptionTest::new(pool, None).await; + + let tx_id = H256::random(); + let _ = aggregator_test + .storage + .via_votes_dal() + .insert_votable_transaction( + 1, + tx_id, + "".to_string(), + "".to_string(), + "".to_string(), + "".to_string(), + ) + .await; + + let op = aggregator_test + .aggregator + .get_voting_operation(&mut aggregator_test.storage) + .await + .unwrap(); + assert!(op.is_none()); + + let _ = aggregator_test + .storage + .via_votes_dal() + .verify_votable_transaction(1, tx_id, true) + .await; + + let op = aggregator_test + .aggregator + .get_voting_operation(&mut aggregator_test.storage) + .await + .unwrap(); + assert!(op.is_some()); + let (l1_block_number, vote, tx_id_vec) = op.unwrap(); + assert_eq!(l1_block_number, L1BatchNumber::from(1)); + assert!(vote); + assert_eq!(H256::from_slice(&tx_id_vec), tx_id); + + let inscription = aggregator_test + .aggregator + .construct_voting_inscription_message(vote, tx_id_vec) + .unwrap(); + + aggregator_test + .aggregator + .loop_iteration(&mut aggregator_test.storage) + .await + .unwrap(); + + let inscriptions = aggregator_test + .storage + .via_btc_sender_dal() + .list_new_inscription_request(10) + .await + .unwrap(); + assert_eq!(inscriptions.len(), 1); + + assert_eq!( + InscriptionMessage::from_bytes( + inscriptions + .first() + .unwrap() + .inscription_message + .as_ref() + .unwrap() + ), + inscription + ); + } + + #[tokio::test] + async fn test_verifier_vote_inscription_manager() { + let pool = ConnectionPool::::test_pool().await; + let config = get_btc_sender_config(1, 1); + let mut mock_btc_ops_config = MockBitcoinOpsConfig::default(); + mock_btc_ops_config.set_block_height(1); + + let mut aggregator_test = ViaVoteInscriptionTest::new(pool.clone(), None).await; + + let tx_id = H256::random(); + + let _ = aggregator_test + .storage + .via_votes_dal() + .insert_votable_transaction( + 1, + tx_id, + "".to_string(), + "".to_string(), + "".to_string(), + "".to_string(), + ) + .await; + + let _ = aggregator_test + .storage + .via_votes_dal() + .verify_votable_transaction(1, tx_id, true) + .await; + + run_aggregator(pool.clone()).await; + run_manager(pool.clone(), config.clone(), mock_btc_ops_config.clone()).await; + + let inflight_inscriptions_before = aggregator_test + .storage + .via_btc_sender_dal() + .get_inflight_inscriptions() + .await + .unwrap(); + + assert!(!inflight_inscriptions_before.is_empty()); + + let last_inscription_history_before = aggregator_test + .storage + .via_btc_sender_dal() + .get_last_inscription_request_history(inflight_inscriptions_before[0].id) + .await + .unwrap(); + + assert!(last_inscription_history_before.is_some()); + + // Simulate the transaction is stuck for 10 blocks + mock_btc_ops_config.set_block_height(10); + + // THis should create a new inscription_history + run_manager(pool.clone(), config.clone(), mock_btc_ops_config.clone()).await; + + let last_inscription_history_after = aggregator_test + .storage + .via_btc_sender_dal() + .get_last_inscription_request_history(inflight_inscriptions_before[0].id) + .await + .unwrap(); + + assert!(last_inscription_history_after.is_some()); + + assert_ne!( + last_inscription_history_after.unwrap().id, + last_inscription_history_before.unwrap().id + ); + + // Simulate the transaction was processed in next block + mock_btc_ops_config.set_block_height(11); + mock_btc_ops_config.set_tx_confirmation(true); + + run_manager(pool.clone(), config.clone(), mock_btc_ops_config.clone()).await; + + let inflight_inscriptions_after = aggregator_test + .storage + .via_btc_sender_dal() + .get_inflight_inscriptions() + .await + .unwrap(); + + assert!(inflight_inscriptions_after.is_empty()); + + let last_inscription_history_after = aggregator_test + .storage + .via_btc_sender_dal() + .get_last_inscription_request_history(inflight_inscriptions_before[0].id) + .await + .unwrap(); + + assert!(last_inscription_history_after + .unwrap() + .confirmed_at + .is_some()); + + // Run the manager to make sure there is no unexpected behavior + run_manager(pool.clone(), config.clone(), mock_btc_ops_config.clone()).await; + } + + async fn run_aggregator(pool: ConnectionPool) { + { + // Create an async channel to break the while loop afer 3 seconds. + let (sender, receiver): (watch::Sender, watch::Receiver) = + watch::channel(false); + + let toggle_handler = tokio::spawn(async move { + let mut toggle = false; + + loop { + time::sleep(time::Duration::from_secs(3)).await; + toggle = !toggle; + if sender.send(toggle).is_err() { + break; + } + println!("Sent: {}", toggle); + } + }); + + let aggregator_test = ViaVoteInscriptionTest::new(pool.clone(), None).await; + + aggregator_test.aggregator.run(receiver).await.unwrap(); + if let Err(e) = toggle_handler.await { + eprintln!("Toggle task failed: {:?}", e); + } + } + } + + async fn run_manager( + pool: ConnectionPool, + config: ViaBtcSenderConfig, + mock_btc_ops_config: MockBitcoinOpsConfig, + ) { + { + // Create an async channel to break the while loop afer 3 seconds. + let (sender, receiver): (watch::Sender, watch::Receiver) = + watch::channel(false); + + let toggle_handler = tokio::spawn(async move { + let mut toggle = false; + + loop { + time::sleep(time::Duration::from_secs(3)).await; + toggle = !toggle; + if sender.send(toggle).is_err() { + break; + } + println!("Sent: {}", toggle); + } + }); + + let inscription_manager_mock = + get_inscription_manager_mock(pool.clone(), config.clone(), mock_btc_ops_config) + .await; + + inscription_manager_mock.run(receiver).await.unwrap(); + if let Err(e) = toggle_handler.await { + eprintln!("Toggle task failed: {:?}", e); + } + } + } +} diff --git a/via_verifier/node/via_btc_watch/Cargo.toml b/via_verifier/node/via_btc_watch/Cargo.toml new file mode 100644 index 000000000..45489839c --- /dev/null +++ b/via_verifier/node/via_btc_watch/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "via_verifier_btc_watch" +description = "VIA Bitcoin watcher for verifier" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +vise.workspace = true +via_btc_client.workspace = true +zksync_shared_metrics.workspace = true +zksync_types.workspace = true +zksync_config.workspace = true + +tokio.workspace = true +anyhow.workspace = true +thiserror.workspace = true +async-trait.workspace = true +tracing.workspace = true +sqlx.workspace = true + +via_verifier_dal.workspace = true + + +[dev-dependencies] diff --git a/via_verifier/node/via_btc_watch/src/lib.rs b/via_verifier/node/via_btc_watch/src/lib.rs new file mode 100644 index 000000000..014d55873 --- /dev/null +++ b/via_verifier/node/via_btc_watch/src/lib.rs @@ -0,0 +1,178 @@ +mod message_processors; +mod metrics; + +use std::time::Duration; + +use anyhow::Context as _; +use tokio::sync::watch; +// re-export via_btc_client types +pub use via_btc_client::types::BitcoinNetwork; +use via_btc_client::{ + indexer::BitcoinInscriptionIndexer, + types::{BitcoinTxid, NodeAuth}, +}; +use via_verifier_dal::{Connection, ConnectionPool, Verifier, VerifierDal}; +use zksync_config::ActorRole; + +use self::{ + message_processors::{MessageProcessor, MessageProcessorError}, + metrics::METRICS, +}; +use crate::{ + message_processors::{L1ToL2MessageProcessor, VerifierMessageProcessor}, + metrics::ErrorType, +}; + +const DEFAULT_VOTING_THRESHOLD: f64 = 0.5; + +#[derive(Debug)] +struct BtcWatchState { + last_processed_bitcoin_block: u32, +} + +#[derive(Debug)] +pub struct VerifierBtcWatch { + indexer: BitcoinInscriptionIndexer, + poll_interval: Duration, + confirmations_for_btc_msg: u64, + last_processed_bitcoin_block: u32, + pool: ConnectionPool, + message_processors: Vec>, + btc_blocks_lag: u32, +} + +impl VerifierBtcWatch { + #[allow(clippy::too_many_arguments)] + pub async fn new( + rpc_url: &str, + network: BitcoinNetwork, + node_auth: NodeAuth, + confirmations_for_btc_msg: Option, + bootstrap_txids: Vec, + pool: ConnectionPool, + poll_interval: Duration, + btc_blocks_lag: u32, + actor_role: &ActorRole, + ) -> anyhow::Result { + let indexer = + BitcoinInscriptionIndexer::new(rpc_url, network, node_auth, bootstrap_txids).await?; + let mut storage = pool.connection_tagged("via_btc_watch").await?; + let state = Self::initialize_state(&indexer, &mut storage, btc_blocks_lag).await?; + tracing::info!("initialized state: {state:?}"); + drop(storage); + + assert_eq!(actor_role, &ActorRole::Verifier); + + let message_processors: Vec> = vec![ + Box::new(L1ToL2MessageProcessor::new(indexer.get_state().0)), + Box::new(VerifierMessageProcessor::new(DEFAULT_VOTING_THRESHOLD)), + ]; + + let confirmations_for_btc_msg = confirmations_for_btc_msg.unwrap_or(0); + + // We should not set confirmations_for_btc_msg to 0 for mainnet, + // because we need to wait for some confirmations to be sure that the transaction is included in a block. + if network == BitcoinNetwork::Bitcoin && confirmations_for_btc_msg == 0 { + return Err(anyhow::anyhow!( + "confirmations_for_btc_msg cannot be 0 for mainnet" + )); + } + + Ok(Self { + indexer, + poll_interval, + confirmations_for_btc_msg, + last_processed_bitcoin_block: state.last_processed_bitcoin_block, + pool, + message_processors, + btc_blocks_lag, + }) + } + + async fn initialize_state( + indexer: &BitcoinInscriptionIndexer, + storage: &mut Connection<'_, Verifier>, + btc_blocks_lag: u32, + ) -> anyhow::Result { + let last_processed_bitcoin_block = + match storage.via_votes_dal().get_last_inserted_block().await? { + Some(block) => block.saturating_sub(1), + None => indexer + .fetch_block_height() + .await + .context("cannot get current Bitcoin block")? + .saturating_sub(btc_blocks_lag as u128) as u32, // TODO: remove cast + }; + + // TODO: get the bridge address from the database? + let (_bridge_address, ..) = indexer.get_state(); + + Ok(BtcWatchState { + last_processed_bitcoin_block, + }) + } + + pub async fn run(mut self, mut stop_receiver: watch::Receiver) -> anyhow::Result<()> { + let mut timer = tokio::time::interval(self.poll_interval); + let pool = self.pool.clone(); + + while !*stop_receiver.borrow_and_update() { + tokio::select! { + _ = timer.tick() => { /* continue iterations */ } + _ = stop_receiver.changed() => break, + } + METRICS.btc_poll.inc(); + + let mut storage = pool.connection_tagged("via_btc_watch").await?; + match self.loop_iteration(&mut storage).await { + Ok(()) => { /* everything went fine */ } + Err(MessageProcessorError::Internal(err)) => { + METRICS.errors[&ErrorType::InternalError].inc(); + tracing::error!("Internal error processing new blocks: {err:?}"); + return Err(err); + } + Err(err) => { + tracing::error!("Failed to process new blocks: {err}"); + self.last_processed_bitcoin_block = + Self::initialize_state(&self.indexer, &mut storage, self.btc_blocks_lag) + .await? + .last_processed_bitcoin_block; + } + } + } + + tracing::info!("Stop signal received, via_btc_watch is shutting down"); + Ok(()) + } + + async fn loop_iteration( + &mut self, + storage: &mut Connection<'_, Verifier>, + ) -> Result<(), MessageProcessorError> { + let to_block = self + .indexer + .fetch_block_height() + .await + .map_err(|e| MessageProcessorError::Internal(anyhow::anyhow!(e.to_string())))? + .saturating_sub(self.confirmations_for_btc_msg as u128) as u32; + if to_block <= self.last_processed_bitcoin_block { + return Ok(()); + } + + let messages = self + .indexer + .process_blocks(self.last_processed_bitcoin_block + 1, to_block) + .await + .map_err(|e| MessageProcessorError::Internal(e.into()))?; + + for processor in self.message_processors.iter_mut() { + processor + .process_messages(storage, messages.clone(), &mut self.indexer) + .await + .map_err(|e| MessageProcessorError::Internal(e.into()))?; + } + + self.last_processed_bitcoin_block = to_block; + Ok(()) + } +} diff --git a/via_verifier/node/via_btc_watch/src/message_processors/l1_to_l2.rs b/via_verifier/node/via_btc_watch/src/message_processors/l1_to_l2.rs new file mode 100644 index 000000000..9c784867e --- /dev/null +++ b/via_verifier/node/via_btc_watch/src/message_processors/l1_to_l2.rs @@ -0,0 +1,205 @@ +use via_btc_client::{ + indexer::BitcoinInscriptionIndexer, + types::{BitcoinAddress, FullInscriptionMessage, L1ToL2Message}, +}; +use via_verifier_dal::{Connection, Verifier, VerifierDal}; +use zksync_types::{ + abi::L2CanonicalTransaction, + ethabi::Address, + helpers::unix_timestamp_ms, + l1::{L1Tx, OpProcessingType, PriorityQueueType}, + Execute, L1TxCommonData, PriorityOpId, H256, PRIORITY_OPERATION_L2_TX_TYPE, U256, +}; + +use crate::{ + message_processors::{MessageProcessor, MessageProcessorError}, + metrics::{ErrorType, InscriptionStage, METRICS}, +}; + +#[derive(Debug)] +pub struct L1ToL2Transaction { + priority_id: i64, + tx_id: H256, + receiver: Address, + value: i64, + calldata: Vec, + canonical_tx_hash: H256, +} + +#[derive(Debug)] +pub struct L1ToL2MessageProcessor { + bridge_address: BitcoinAddress, +} + +impl L1ToL2MessageProcessor { + pub fn new(bridge_address: BitcoinAddress) -> Self { + Self { bridge_address } + } +} + +#[async_trait::async_trait] +impl MessageProcessor for L1ToL2MessageProcessor { + async fn process_messages( + &mut self, + storage: &mut Connection<'_, Verifier>, + msgs: Vec, + _: &mut BitcoinInscriptionIndexer, + ) -> Result<(), MessageProcessorError> { + let mut priority_ops = Vec::new(); + let last_priority_id = storage + .via_transactions_dal() + .get_last_priority_id() + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))?; + let mut next_expected_priority_id = PriorityOpId::from(last_priority_id as u64); + + for msg in msgs { + if let FullInscriptionMessage::L1ToL2Message(l1_to_l2_msg) = msg { + if l1_to_l2_msg + .tx_outputs + .iter() + .any(|output| output.script_pubkey == self.bridge_address.script_pubkey()) + { + let mut tx_id_bytes = l1_to_l2_msg.common.tx_id.as_raw_hash()[..].to_vec(); + tx_id_bytes.reverse(); + let tx_id = H256::from_slice(&tx_id_bytes); + + if storage + .via_transactions_dal() + .transaction_exists_with_txid(&tx_id) + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))? + { + tracing::debug!( + "Transaction with tx_id {} already processed, skipping", + tx_id + ); + continue; + } + let serial_id = next_expected_priority_id; + let l1_tx = self.create_l1_tx_from_message(tx_id, serial_id, &l1_to_l2_msg)?; + priority_ops.push(l1_tx); + next_expected_priority_id = next_expected_priority_id.next(); + } + } + } + + if priority_ops.is_empty() { + return Ok(()); + } + + for new_op in priority_ops { + METRICS.inscriptions_processed[&InscriptionStage::Deposit].inc(); + storage + .via_transactions_dal() + .insert_transaction( + new_op.priority_id, + new_op.tx_id, + new_op.receiver.to_string(), + new_op.value, + new_op.calldata, + new_op.canonical_tx_hash, + ) + .await + .map_err(|e| { + METRICS.errors[&ErrorType::DatabaseError].inc(); + MessageProcessorError::DatabaseError(e.to_string()) + })?; + } + + Ok(()) + } +} + +impl L1ToL2MessageProcessor { + fn create_l1_tx_from_message( + &self, + tx_id: H256, + serial_id: PriorityOpId, + msg: &L1ToL2Message, + ) -> Result { + let amount = msg.amount.to_sat() as i64; + let eth_address_l2 = msg.input.receiver_l2_address; + let calldata = msg.input.call_data.clone(); + + let mantissa = U256::from(10_000_000_000u64); // Eth 18 decimals - BTC 8 decimals + let value = U256::from(amount) * mantissa; + let max_fee_per_gas = U256::from(100_000_000u64); + let gas_limit = U256::from(1_000_000u64); + let gas_per_pubdata_limit = U256::from(800u64); + + let mut l1_tx = L1Tx { + execute: Execute { + contract_address: eth_address_l2, + calldata: calldata.clone(), + value: U256::zero(), + factory_deps: vec![], + }, + common_data: L1TxCommonData { + sender: eth_address_l2, + serial_id, + layer_2_tip_fee: U256::zero(), + full_fee: U256::zero(), + max_fee_per_gas, + gas_limit, + gas_per_pubdata_limit, + op_processing_type: OpProcessingType::Common, + priority_queue_type: PriorityQueueType::Deque, + canonical_tx_hash: H256::zero(), + to_mint: value, + refund_recipient: eth_address_l2, + eth_block: msg.common.block_height as u64, + }, + received_timestamp_ms: unix_timestamp_ms(), + }; + + let l2_transaction = L2CanonicalTransaction { + tx_type: PRIORITY_OPERATION_L2_TX_TYPE.into(), + from: address_to_u256(&l1_tx.common_data.sender), + to: address_to_u256(&l1_tx.execute.contract_address), + gas_limit: l1_tx.common_data.gas_limit, + gas_per_pubdata_byte_limit: l1_tx.common_data.gas_per_pubdata_limit, + max_fee_per_gas: l1_tx.common_data.max_fee_per_gas, + max_priority_fee_per_gas: U256::zero(), + paymaster: U256::zero(), + nonce: l1_tx.common_data.serial_id.0.into(), + value: l1_tx.execute.value, + reserved: [ + l1_tx.common_data.to_mint, + address_to_u256(&l1_tx.common_data.refund_recipient), + U256::zero(), + U256::zero(), + ], + data: l1_tx.execute.calldata.clone(), + signature: vec![], + factory_deps: vec![], + paymaster_input: vec![], + reserved_dynamic: vec![], + }; + + let canonical_tx_hash = l2_transaction.hash(); + + l1_tx.common_data.canonical_tx_hash = canonical_tx_hash; + + tracing::info!( + "Created L1 transaction with serial id {:?} (block {}) with deposit amount {} and tx hash {}", + l1_tx.common_data.serial_id, + l1_tx.common_data.eth_block, + amount, + l1_tx.common_data.canonical_tx_hash, + ); + + Ok(L1ToL2Transaction { + priority_id: serial_id.0 as i64, + tx_id, + receiver: eth_address_l2, + value: amount, + calldata, + canonical_tx_hash: l1_tx.common_data.canonical_tx_hash, + }) + } +} + +fn address_to_u256(address: &Address) -> U256 { + U256::from_big_endian(&address.0) +} diff --git a/via_verifier/node/via_btc_watch/src/message_processors/mod.rs b/via_verifier/node/via_btc_watch/src/message_processors/mod.rs new file mode 100644 index 000000000..2e38a9801 --- /dev/null +++ b/via_verifier/node/via_btc_watch/src/message_processors/mod.rs @@ -0,0 +1,41 @@ +pub(crate) use l1_to_l2::L1ToL2MessageProcessor; +pub(crate) use verifier::VerifierMessageProcessor; +use via_btc_client::{ + indexer::BitcoinInscriptionIndexer, + types::{BitcoinTxid, FullInscriptionMessage, IndexerError}, +}; +use via_verifier_dal::{Connection, Verifier}; +use zksync_types::H256; + +mod l1_to_l2; +mod verifier; + +#[derive(Debug, thiserror::Error)] +pub(super) enum MessageProcessorError { + #[error("internal processing error: {0:?}")] + Internal(#[from] anyhow::Error), + #[error("database error: {0}")] + DatabaseError(String), +} + +impl From for MessageProcessorError { + fn from(err: IndexerError) -> Self { + MessageProcessorError::Internal(err.into()) + } +} + +#[async_trait::async_trait] +pub(super) trait MessageProcessor: 'static + std::fmt::Debug + Send + Sync { + async fn process_messages( + &mut self, + storage: &mut Connection<'_, Verifier>, + msgs: Vec, + indexer: &mut BitcoinInscriptionIndexer, + ) -> Result<(), MessageProcessorError>; +} + +pub(crate) fn convert_txid_to_h256(txid: BitcoinTxid) -> H256 { + let mut tx_id_bytes = txid.as_raw_hash()[..].to_vec(); + tx_id_bytes.reverse(); + H256::from_slice(&tx_id_bytes) +} diff --git a/via_verifier/node/via_btc_watch/src/message_processors/verifier.rs b/via_verifier/node/via_btc_watch/src/message_processors/verifier.rs new file mode 100644 index 000000000..56055469f --- /dev/null +++ b/via_verifier/node/via_btc_watch/src/message_processors/verifier.rs @@ -0,0 +1,148 @@ +use via_btc_client::{indexer::BitcoinInscriptionIndexer, types::FullInscriptionMessage}; +use via_verifier_dal::{Connection, Verifier, VerifierDal}; + +use super::{convert_txid_to_h256, MessageProcessor, MessageProcessorError}; + +#[derive(Debug)] +pub struct VerifierMessageProcessor { + threshold: f64, +} + +impl VerifierMessageProcessor { + pub fn new(threshold: f64) -> Self { + Self { threshold } + } +} + +#[async_trait::async_trait] +impl MessageProcessor for VerifierMessageProcessor { + async fn process_messages( + &mut self, + storage: &mut Connection<'_, Verifier>, + msgs: Vec, + indexer: &mut BitcoinInscriptionIndexer, + ) -> Result<(), MessageProcessorError> { + for msg in msgs { + match msg { + ref f @ FullInscriptionMessage::ProofDAReference(ref proof_msg) => { + if let Some(l1_batch_number) = indexer.get_l1_batch_number(f).await { + let mut votes_dal = storage.via_votes_dal(); + + let last_inserted_block = votes_dal + .get_last_inserted_block() + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))? + .unwrap_or(0); + + if l1_batch_number.0 != last_inserted_block + 1 { + tracing::warn!( + "Skipping ProofDAReference message with l1_batch_number: {:?}. Last inserted block: {:?}", + l1_batch_number, last_inserted_block + ); + continue; + } + + let tx_id = convert_txid_to_h256(proof_msg.common.tx_id); + + let pubdata_msgs = indexer + .parse_transaction(&proof_msg.input.l1_batch_reveal_txid) + .await?; + + if pubdata_msgs.len() != 1 { + return Err(MessageProcessorError::Internal(anyhow::Error::msg( + "Invalid pubdata msg lenght", + ))); + } + + let inscription = pubdata_msgs[0].clone(); + + let l1_batch_da_ref_inscription = match inscription { + FullInscriptionMessage::L1BatchDAReference(da_msg) => da_msg, + _ => { + return Err(MessageProcessorError::Internal(anyhow::Error::msg( + "Invalid inscription type", + ))) + } + }; + + votes_dal + .insert_votable_transaction( + l1_batch_number.0, + tx_id, + proof_msg.input.da_identifier.clone(), + proof_msg.input.blob_id.clone(), + proof_msg.input.l1_batch_reveal_txid.to_string(), + l1_batch_da_ref_inscription.input.blob_id, + ) + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))?; + } else { + tracing::warn!( + "L1BatchNumber not found for ProofDAReference message : {:?}", + proof_msg + ); + } + } + ref f @ FullInscriptionMessage::ValidatorAttestation(ref attestation_msg) => { + if let Some(l1_batch_number) = indexer.get_l1_batch_number(f).await { + let mut votes_dal = storage.via_votes_dal(); + + let reference_txid = + convert_txid_to_h256(attestation_msg.input.reference_txid); + let tx_id = convert_txid_to_h256(attestation_msg.common.tx_id); + + // Vote = true if attestation_msg.input.attestation == Vote::Ok + let is_ok = matches!( + attestation_msg.input.attestation, + via_btc_client::types::Vote::Ok + ); + + let p2wpkh_address = attestation_msg + .common + .p2wpkh_address + .as_ref() + .expect("ValidatorAttestation message must have a p2wpkh address"); + votes_dal + .insert_vote( + l1_batch_number.0, + reference_txid, + &p2wpkh_address.to_string(), + is_ok, + ) + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))?; + + // Check finalization + if votes_dal + .finalize_transaction_if_needed( + l1_batch_number.0, + reference_txid, + self.threshold, + indexer.get_number_of_verifiers(), + ) + .await + .map_err(|e| MessageProcessorError::DatabaseError(e.to_string()))? + { + tracing::info!( + "Finalizing transaction with tx_id: {:?} and block number: {:?}", + tx_id, + l1_batch_number + ); + } + } + } + // bootstrapping phase is already covered + FullInscriptionMessage::ProposeSequencer(_) + | FullInscriptionMessage::SystemBootstrapping(_) => { + // do nothing + } + // Non-votable messages like L1BatchDAReference or L1ToL2Message are ignored by this processor + FullInscriptionMessage::L1ToL2Message(_) + | FullInscriptionMessage::L1BatchDAReference(_) => { + // do nothing + } + } + } + Ok(()) + } +} diff --git a/via_verifier/node/via_btc_watch/src/metrics.rs b/via_verifier/node/via_btc_watch/src/metrics.rs new file mode 100644 index 000000000..e7174fb2f --- /dev/null +++ b/via_verifier/node/via_btc_watch/src/metrics.rs @@ -0,0 +1,30 @@ +use vise::{Counter, EncodeLabelSet, EncodeLabelValue, Family, Metrics}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] +#[metrics(label = "stage", rename_all = "snake_case")] +pub enum InscriptionStage { + Deposit, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] +#[metrics(label = "error_type", rename_all = "snake_case")] +pub enum ErrorType { + InternalError, + DatabaseError, +} + +#[derive(Debug, Metrics)] +#[metrics(prefix = "via_verifier_btc_watch")] +pub struct ViaVerifierBtcWatcherMetrics { + /// Number of times Bitcoin was polled. + pub btc_poll: Counter, + + /// Number of inscriptions processed, labeled by type. + pub inscriptions_processed: Family, + + /// Number of errors encountered, labeled by error type. + pub errors: Family, +} + +#[vise::register] +pub static METRICS: vise::Global = vise::Global::new(); diff --git a/via_verifier/node/via_zk_verifier/Cargo.toml b/via_verifier/node/via_zk_verifier/Cargo.toml new file mode 100644 index 000000000..cead363a8 --- /dev/null +++ b/via_verifier/node/via_zk_verifier/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "via_zk_verifier" +description = "VIA zk verifier" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +vise.workspace = true +via_btc_client.workspace = true +zksync_shared_metrics.workspace = true +zksync_dal.workspace = true +zksync_types.workspace = true +zksync_config.workspace = true +zksync_da_client.workspace = true +zksync_prover_interface.workspace = true +via_da_client.workspace = true + +tokio.workspace = true +anyhow.workspace = true +bincode.workspace = true +thiserror.workspace = true +async-trait.workspace = true +tracing.workspace = true +sqlx.workspace = true +serde.workspace = true + +via_verification.workspace = true +via_verifier_dal.workspace = true + +[dev-dependencies] diff --git a/via_verifier/node/via_zk_verifier/src/lib.rs b/via_verifier/node/via_zk_verifier/src/lib.rs new file mode 100644 index 000000000..2b81621d3 --- /dev/null +++ b/via_verifier/node/via_zk_verifier/src/lib.rs @@ -0,0 +1,333 @@ +use std::str::FromStr; + +use anyhow::Context; +use serde::{Deserialize, Serialize}; +use tokio::sync::watch; +use via_btc_client::{ + indexer::BitcoinInscriptionIndexer, + types::{ + BitcoinNetwork, BitcoinTxid, FullInscriptionMessage, L1BatchDAReference, NodeAuth, + ProofDAReference, + }, + utils::bytes_to_txid, +}; +use via_da_client::{pubdata::Pubdata, types::L2_BOOTLOADER_CONTRACT_ADDR}; +use via_verification::proof::{ + Bn256, ProofTrait, ViaZKProof, ZkSyncProof, ZkSyncSnarkWrapperCircuit, +}; +use via_verifier_dal::{Connection, ConnectionPool, Verifier, VerifierDal}; +use zksync_config::ViaVerifierConfig; +use zksync_da_client::{types::InclusionData, DataAvailabilityClient}; +use zksync_types::{ + commitment::L1BatchWithMetadata, protocol_version::ProtocolSemanticVersion, H160, H256, +}; + +/// Copy of `zksync_l1_contract_interface::i_executor::methods::ProveBatches` +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProveBatches { + pub prev_l1_batch: L1BatchWithMetadata, + pub l1_batches: Vec, + pub proofs: Vec, + pub should_verify: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct L1BatchProofForL1 { + pub aggregation_result_coords: [[u8; 32]; 4], + pub scheduler_proof: ZkSyncProof, + pub protocol_version: ProtocolSemanticVersion, +} + +#[derive(Debug)] +pub struct ViaVerifier { + pool: ConnectionPool, + da_client: Box, + indexer: BitcoinInscriptionIndexer, + config: ViaVerifierConfig, +} + +impl ViaVerifier { + pub async fn new( + rpc_url: &str, + network: BitcoinNetwork, + node_auth: NodeAuth, + bootstrap_txids: Vec, + pool: ConnectionPool, + client: Box, + config: ViaVerifierConfig, + ) -> anyhow::Result { + let indexer = + BitcoinInscriptionIndexer::new(rpc_url, network, node_auth, bootstrap_txids).await?; + Ok(Self { + pool, + da_client: client, + indexer, + config, + }) + } + + pub async fn run(mut self, mut stop_receiver: watch::Receiver) -> anyhow::Result<()> { + let mut timer = tokio::time::interval(self.config.polling_interval()); + let pool = self.pool.clone(); + + while !*stop_receiver.borrow_and_update() { + tokio::select! { + _ = timer.tick() => { /* continue iterations */ } + _ = stop_receiver.changed() => break, + } + + let mut storage = pool.connection_tagged("via_zk_verifier").await?; + match self.loop_iteration(&mut storage).await { + Ok(()) => {} + Err(err) => tracing::error!("Failed to process via_zk_verifier: {err}"), + } + } + + tracing::info!("Stop signal received, via_zk_verifier is shutting down"); + Ok(()) + } + + pub async fn loop_iteration( + &mut self, + storage: &mut Connection<'_, Verifier>, + ) -> anyhow::Result<()> { + if let Some((l1_batch_number, mut raw_tx_id)) = storage + .via_votes_dal() + .get_first_not_verified_block() + .await? + { + let db_raw_tx_id = H256::from_slice(&raw_tx_id); + tracing::info!("New non executed block ready to be processed"); + + raw_tx_id.reverse(); + let proof_txid = bytes_to_txid(&raw_tx_id).context("Failed to parse tx_id")?; + tracing::info!("trying to get proof_txid: {}", proof_txid); + let proof_msgs = self.indexer.parse_transaction(&proof_txid).await?; + let proof_msg = self.expect_single_msg(&proof_msgs, "ProofDAReference")?; + + let proof_da = match proof_msg { + FullInscriptionMessage::ProofDAReference(ref a) => a, + _ => { + tracing::error!("Expected ProofDAReference, got something else"); + return Ok(()); + } + }; + + let (proof_blob, batch_tx_id) = self.process_proof_da_reference(proof_da).await?; + + let batch_msgs = self.indexer.parse_transaction(&batch_tx_id).await?; + let batch_msg = self.expect_single_msg(&batch_msgs, "L1BatchDAReference")?; + + let batch_da = match batch_msg { + FullInscriptionMessage::L1BatchDAReference(ref a) => a, + _ => { + tracing::error!("Expected L1BatchDAReference, got something else"); + return Ok(()); + } + }; + + tracing::info!( + "Fetch l1 batch pubdata for blob id {}", + batch_da.input.blob_id + ); + + let (batch_blob, batch_hash) = self.process_batch_da_reference(batch_da).await?; + + let mut is_verified = self + .verify_op_priority_id(storage, l1_batch_number, &batch_blob.data) + .await?; + + if is_verified { + is_verified = self.verify_proof(batch_hash, &proof_blob.data).await?; + } + + storage + .via_votes_dal() + .verify_votable_transaction(l1_batch_number as u32, db_raw_tx_id, is_verified) + .await?; + } + + Ok(()) + } + + pub async fn verify_op_priority_id( + &mut self, + storage: &mut Connection<'_, Verifier>, + l1_batch_number: i64, + pubdata: &[u8], + ) -> anyhow::Result { + let pubdata = Pubdata::decode_pubdata(pubdata.to_vec())?; + let mut deposit_logs = Vec::new(); + + for log in &pubdata.user_logs { + if log.sender == H160::from_str(L2_BOOTLOADER_CONTRACT_ADDR)? { + deposit_logs.push(log); + } + } + + let txs = storage + .via_transactions_dal() + .list_transactions_not_processed(deposit_logs.len() as i64) + .await?; + + if txs.len() != deposit_logs.len() { + tracing::error!( + "Verifier did not index all the deposits, expected {} found {}", + deposit_logs.len(), + txs.len() + ); + return Ok(false); + } + + if txs.is_empty() { + tracing::error!("There is no transactions to validate the op priority id",); + return Ok(true); + } + + for (raw_tx_id, deposit_log) in txs.iter().zip(deposit_logs.iter()) { + let db_raw_tx_id = H256::from_slice(raw_tx_id); + if db_raw_tx_id != deposit_log.key { + tracing::error!( + "Sequencer did not process the deposit transactions in series for l1 batch {}, \ + invalid priority id for transaction hash {}", + l1_batch_number, + db_raw_tx_id + ); + return Ok(false); + } + + let status = !deposit_log.value.is_zero(); + storage + .via_transactions_dal() + .update_transaction(&deposit_log.key, status) + .await?; + } + + tracing::info!( + "Priority_id verified successfuly for l1 batch {}", + l1_batch_number + ); + + Ok(true) + } + + /// Helper to ensure there's exactly one message in the array, or log an error. + fn expect_single_msg<'a>( + &self, + msgs: &'a [FullInscriptionMessage], + expected_type: &str, + ) -> anyhow::Result<&'a FullInscriptionMessage> { + match msgs.len() { + 1 => Ok(&msgs[0]), + n => { + tracing::error!("Expected 1 {expected_type} message, got {n}"); + Err(anyhow::anyhow!("Expected exactly 1 message, got {n}")) + } + } + } + + /// Processes a `ProofDAReference` message by retrieving the DA blob + async fn process_proof_da_reference( + &mut self, + proof_msg: &ProofDAReference, + ) -> anyhow::Result<(InclusionData, BitcoinTxid)> { + let blob = self + .da_client + .get_inclusion_data(&proof_msg.input.blob_id) + .await + .context("Failed to get blob")? + .ok_or_else(|| anyhow::anyhow!("Blob not found"))?; + let batch_tx_id = proof_msg.input.l1_batch_reveal_txid; + + Ok((blob, batch_tx_id)) + } + + /// Processes an `L1BatchDAReference` message by retrieving the DA blob + async fn process_batch_da_reference( + &mut self, + batch_msg: &L1BatchDAReference, + ) -> anyhow::Result<(InclusionData, H256)> { + let blob = self + .da_client + .get_inclusion_data(&batch_msg.input.blob_id) + .await + .context("Failed to get blob")? + .ok_or_else(|| anyhow::anyhow!("Blob not found"))?; + let hash = batch_msg.input.l1_batch_hash; + + Ok((blob, hash)) + } + + async fn verify_proof(&self, batch_hash: H256, proof_bytes: &[u8]) -> anyhow::Result { + tracing::info!( + ?batch_hash, + proof_len = proof_bytes.len(), + "Verifying proof" + ); + let proof_data: ProveBatches = bincode::deserialize(proof_bytes)?; + + if proof_data.l1_batches.len() != 1 { + tracing::error!( + "Expected exactly one L1Batch and one proof, got {} and {}", + proof_data.l1_batches.len(), + proof_data.proofs.len() + ); + return Ok(false); + } + + // TODO: decide if we need to verify the batch data (already have batch data from ProofDAReference inscription) + // let batch: PubData... = bincode::deserialize(&batch) + // .context("Failed to deserialize L1BatchWithMetadata")?; + + let protocol_version = proof_data.l1_batches[0] + .header + .protocol_version + .unwrap() + .to_string(); + + if !proof_data.should_verify { + tracing::info!( + "Proof verification is disabled for proof with batch number : {:?}", + proof_data.l1_batches[0].header.number + ); + tracing::info!( + "Verifying proof with protocol version: {}", + protocol_version + ); + tracing::info!("Skipping verification"); + Ok(true) + } else { + if proof_data.proofs.len() != 1 { + tracing::error!( + "Expected exactly one proof, got {}", + proof_data.proofs.len() + ); + return Ok(false); + } + + let (prev_commitment, curr_commitment) = ( + proof_data.prev_l1_batch.metadata.commitment, + proof_data.l1_batches[0].metadata.commitment, + ); + let mut proof = proof_data.proofs[0].scheduler_proof.clone(); + + // Put correct inputs + proof.inputs = via_verification::public_inputs::generate_inputs( + &prev_commitment, + &curr_commitment, + ); + + // Verify the proof + let via_proof = ViaZKProof { proof }; + let vk_inner = + via_verification::utils::load_verification_key_without_l1_check(protocol_version) + .await?; + + let is_valid = via_proof.verify(vk_inner)?; + + tracing::info!("Proof verification result: {}", is_valid); + + Ok(is_valid) + } + } +} diff --git a/via_verifier/node/withdrawal_service/Cargo.toml b/via_verifier/node/withdrawal_service/Cargo.toml new file mode 100644 index 000000000..883c378ff --- /dev/null +++ b/via_verifier/node/withdrawal_service/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "via_withdrawal_service" +description = "Via Withdrawal Service" +version.workspace = true +edition.workspace = true +authors = ["Via Network"] + + +[dependencies] +hex.workspace = true +uuid = { version = "1.3", features = ["v4"] } +zksync_config.workspace = true +via_verifier_dal.workspace=true +zksync_types.workspace = true +vise.workspace = true +via_btc_client.workspace = true +via_musig2.workspace = true +reqwest.workspace = true +via_withdrawal_client.workspace = true + +anyhow.workspace = true +axum.workspace = true +tokio = { workspace = true, features = ["time"] } +tower-http = { workspace = true, features = ["cors"] } +tower = { workspace = true } +tracing.workspace = true +serde.workspace = true +serde_json.workspace = true +bitcoin = { version = "0.32.2", features = ["serde"] } +secp256k1_musig2 = { package = "secp256k1", version = "0.30.0", features = [ + "rand", +] } +musig2 = "0.2.0" +base64 = "0.21" +thiserror = "1.0.57" +sha2.workspace = true +chrono.workspace = true + +[dev-dependencies] diff --git a/via_verifier/node/withdrawal_service/src/auth.rs b/via_verifier/node/withdrawal_service/src/auth.rs new file mode 100644 index 000000000..6bf97744e --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/auth.rs @@ -0,0 +1,76 @@ +use anyhow::Context; +use base64::Engine; +use bitcoin::secp256k1::{Message, PublicKey, Secp256k1, SecretKey}; +use serde::Serialize; +use sha2::{Digest, Sha256}; + +/// Signs a request payload using the verifier's private key. +pub fn sign_request(payload: &T, secret_key: &SecretKey) -> anyhow::Result { + let secp = Secp256k1::new(); + + // Serialize and hash the payload. + let payload_bytes = serde_json::to_vec(payload).context("Failed to serialize payload")?; + let hash = Sha256::digest(&payload_bytes); + let message = Message::from_digest_slice(hash.as_ref()).context("Hash is not 32 bytes")?; + + // Sign the message. + let sig = secp.sign_ecdsa(&message, secret_key); + // Encode the compact 64-byte signature in base64. + let sig_bytes = sig.serialize_compact(); + Ok(base64::engine::general_purpose::STANDARD.encode(sig_bytes)) +} + +/// Verifies a request signature using the verifier's public key. +pub fn verify_signature( + payload: &T, + signature_b64: &str, + public_key: &PublicKey, +) -> anyhow::Result { + let secp = Secp256k1::new(); + + // Decode the base64 signature. + let sig_bytes = base64::engine::general_purpose::STANDARD + .decode(signature_b64) + .context("Failed to decode base64 signature")?; + let sig = bitcoin::secp256k1::ecdsa::Signature::from_compact(&sig_bytes) + .context("Failed to parse signature from compact form")?; + + // Serialize and hash the payload. + let payload_bytes = serde_json::to_vec(payload).context("Failed to serialize payload")?; + let hash = Sha256::digest(&payload_bytes); + let message = Message::from_digest_slice(hash.as_ref()).context("Hash is not 32 bytes")?; + + // Verify the signature. + Ok(secp.verify_ecdsa(&message, &sig, public_key).is_ok()) +} + +#[cfg(test)] +mod tests { + use bitcoin::secp256k1::rand::rngs::OsRng; + use serde_json::json; + + use super::*; + + #[test] + fn test_signature_verification() { + let secp = Secp256k1::new(); + let (secret_key, public_key) = secp.generate_keypair(&mut OsRng); + + let payload = json!({ + "test": "data", + "number": 123 + }); + + // Sign the payload. + let signature = sign_request(&payload, &secret_key).expect("Signature generation failed"); + + // Verify the signature. + assert!(verify_signature(&payload, &signature, &public_key) + .expect("Signature verification failed")); + + // Verify that a wrong public key does not verify. + let (_, wrong_public_key) = secp.generate_keypair(&mut OsRng); + assert!(!verify_signature(&payload, &signature, &wrong_public_key) + .expect("Verification with wrong key unexpectedly succeeded")); + } +} diff --git a/via_verifier/node/withdrawal_service/src/coordinator/api.rs b/via_verifier/node/withdrawal_service/src/coordinator/api.rs new file mode 100644 index 000000000..9eaec8b7f --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/coordinator/api.rs @@ -0,0 +1,40 @@ +use anyhow::Context as _; +use tokio::sync::watch; +use via_btc_client::withdrawal_builder::WithdrawalBuilder; +use via_verifier_dal::{ConnectionPool, Verifier}; +use via_withdrawal_client::client::WithdrawalClient; +use zksync_config::configs::via_verifier::ViaVerifierConfig; + +use crate::coordinator::api_decl::RestApi; + +pub async fn start_coordinator_server( + config: ViaVerifierConfig, + master_connection_pool: ConnectionPool, + withdrawal_builder: WithdrawalBuilder, + withdrawal_client: WithdrawalClient, + mut stop_receiver: watch::Receiver, +) -> anyhow::Result<()> { + let bind_address = config.bind_addr(); + let api = RestApi::new( + config, + master_connection_pool, + withdrawal_builder, + withdrawal_client, + )? + .into_router(); + + let listener = tokio::net::TcpListener::bind(bind_address) + .await + .context("Cannot bind to the specified address")?; + axum::serve(listener, api) + .with_graceful_shutdown(async move { + if stop_receiver.changed().await.is_err() { + tracing::warn!("Stop signal sender for coordinator server was dropped without sending a signal"); + } + tracing::info!("Stop signal received, coordinator server is shutting down"); + }) + .await + .context("coordinator handler server failed")?; + tracing::info!("coordinator handler server shut down"); + Ok(()) +} diff --git a/via_verifier/node/withdrawal_service/src/coordinator/api_decl.rs b/via_verifier/node/withdrawal_service/src/coordinator/api_decl.rs new file mode 100644 index 000000000..d33353a08 --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/coordinator/api_decl.rs @@ -0,0 +1,77 @@ +use std::{str::FromStr, sync::Arc}; + +use axum::middleware; +use tokio::sync::RwLock; +use tower_http::cors::CorsLayer; +use via_btc_client::withdrawal_builder::WithdrawalBuilder; +use via_verifier_dal::{ConnectionPool, Verifier}; +use via_withdrawal_client::client::WithdrawalClient; +use zksync_config::configs::via_verifier::ViaVerifierConfig; + +use crate::{ + coordinator::auth_middleware, + types::{SigningSession, ViaWithdrawalState}, +}; + +pub struct RestApi { + pub master_connection_pool: ConnectionPool, + pub state: ViaWithdrawalState, + pub withdrawal_builder: WithdrawalBuilder, + pub withdrawal_client: WithdrawalClient, +} + +impl RestApi { + pub fn new( + config: ViaVerifierConfig, + master_connection_pool: ConnectionPool, + withdrawal_builder: WithdrawalBuilder, + withdrawal_client: WithdrawalClient, + ) -> anyhow::Result { + let state = ViaWithdrawalState { + signing_session: Arc::new(RwLock::new(SigningSession::default())), + required_signers: config.required_signers, + verifiers_pub_keys: config + .verifiers_pub_keys_str + .iter() + .map(|s| bitcoin::secp256k1::PublicKey::from_str(s).unwrap()) + .collect(), + }; + Ok(Self { + master_connection_pool, + state, + withdrawal_builder, + withdrawal_client, + }) + } + + pub fn into_router(self) -> axum::Router<()> { + // Wrap the API state in an Arc. + let shared_state = Arc::new(self); + + // Create middleware layers using from_fn_with_state. + let auth_mw = + middleware::from_fn_with_state(shared_state.clone(), auth_middleware::auth_middleware); + let body_mw = + middleware::from_fn_with_state(shared_state.clone(), auth_middleware::extract_body); + + let router = axum::Router::new() + .route("/new", axum::routing::post(Self::new_session)) + .route("/", axum::routing::get(Self::get_session)) + .route( + "/signature", + axum::routing::post(Self::submit_partial_signature), + ) + .route( + "/signature", + axum::routing::get(Self::get_submitted_signatures), + ) + .route("/nonce", axum::routing::post(Self::submit_nonce)) + .route("/nonce", axum::routing::get(Self::get_nonces)) + .route_layer(body_mw) + .route_layer(auth_mw) + .with_state(shared_state.clone()) + .layer(CorsLayer::permissive()); + + axum::Router::new().nest("/session", router) + } +} diff --git a/via_verifier/node/withdrawal_service/src/coordinator/api_impl.rs b/via_verifier/node/withdrawal_service/src/coordinator/api_impl.rs new file mode 100644 index 000000000..64e241fab --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/coordinator/api_impl.rs @@ -0,0 +1,268 @@ +use std::{collections::HashMap, sync::Arc}; + +use anyhow::Context; +use axum::{extract::State, response::Response, Json}; +use base64::Engine; +use bitcoin::{ + hashes::Hash, + sighash::{Prevouts, SighashCache}, + TapSighashType, Txid, +}; +use musig2::{BinaryEncoding, PubNonce}; +use serde::Serialize; +use tracing::instrument; +use via_btc_client::{traits::Serializable, withdrawal_builder::WithdrawalRequest}; +use via_verifier_dal::VerifierDal; +use zksync_types::H256; + +use super::{api_decl::RestApi, error::ApiError}; +use crate::{ + types::{NoncePair, PartialSignaturePair, SigningSession, SigningSessionResponse}, + utils::{decode_signature, encode_signature, h256_to_txid}, +}; + +fn ok_json(data: T) -> Response { + Response::builder() + .status(axum::http::StatusCode::OK) + .body(serde_json::to_string(&data).expect("Failed to serialize")) + .unwrap() +} + +impl RestApi { + #[instrument(skip(self_))] + pub async fn new_session( + State(self_): State>, + ) -> anyhow::Result, ApiError> { + let mut l1_block_number: i64; + + { + let signing_session = self_.state.signing_session.read().await; + l1_block_number = signing_session.l1_block_number; + } + + if l1_block_number != 0 { + let withdrawal_tx = self_ + .master_connection_pool + .connection_tagged("coordinator api") + .await? + .via_votes_dal() + .get_vote_transaction_withdrawal_tx(l1_block_number) + .await?; + + if withdrawal_tx.is_none() { + // The withdrawal process is in progress + return Ok(ok_json(l1_block_number)); + } + } + + // Get the l1 batches finilized but withdrawals not yet processed + let blocks = self_ + .master_connection_pool + .connection_tagged("coordinator") + .await? + .via_votes_dal() + .get_finalized_blocks_and_non_processed_withdrawals() + .await?; + + if blocks.is_empty() { + if l1_block_number != 0 { + self_.reset_session().await; + } + return Ok(ok_json("No block found for processing withdrawals")); + } + + let mut withdrawals_to_process: Vec = Vec::new(); + let mut proof_txid = Txid::all_zeros(); + + tracing::info!( + "Found {} finalized unprocessed L1 batch(es) with withdrawals waiting to be processed", + blocks.len() + ); + + for (block_number, blob_id, proof_tx_id) in blocks.iter() { + let withdrawals: Vec = self_ + .withdrawal_client + .get_withdrawals(blob_id) + .await + .context("Error to get withdrawals from DA")?; + + if !withdrawals.is_empty() { + proof_txid = h256_to_txid(proof_tx_id).context("Invalid proof tx id")?; + l1_block_number = *block_number; + withdrawals_to_process = withdrawals; + tracing::info!( + "L1 batch {} includes withdrawal requests {}", + block_number.clone(), + withdrawals_to_process.len() + ); + break; + } else { + // If there is no withdrawals to process in a batch, update the status and mark it as processed + self_ + .master_connection_pool + .connection_tagged("coordinator") + .await? + .via_votes_dal() + .mark_vote_transaction_as_processed_withdrawals(H256::zero(), *block_number) + .await + .context("Error to mark a vote transaction as processed")?; + tracing::info!( + "There is no withdrawal to process in l1 batch {}", + block_number.clone() + ); + } + } + + if withdrawals_to_process.is_empty() { + self_.reset_session().await; + return Ok(ok_json("There are no withdrawals to process")); + } + + tracing::info!( + "Found withdrawals in the l1 batch {}, total withdrawals: {}", + l1_block_number, + withdrawals_to_process.len() + ); + + let unsigned_tx_result = self_ + .withdrawal_builder + .create_unsigned_withdrawal_tx(withdrawals_to_process, proof_txid) + .await; + + let unsigned_tx = match unsigned_tx_result { + Ok(unsigned_tx) => unsigned_tx, + Err(err) => { + tracing::error!("Invalid unsigned tx for batch {l1_block_number}: {err}"); + return Err(ApiError::InternalServerError( + "Invalid unsigned tx".to_string(), + )); + } + }; + + let mut sighash_cache = SighashCache::new(&unsigned_tx.tx); + let sighash_type = TapSighashType::All; + let mut txout_list = Vec::with_capacity(unsigned_tx.utxos.len()); + + for (_, txout) in unsigned_tx.utxos.clone() { + txout_list.push(txout); + } + let sighash = sighash_cache + .taproot_key_spend_signature_hash(0, &Prevouts::All(&txout_list), sighash_type) + .context("Error taproot_key_spend_signature_hash")?; + + let new_sesssion = SigningSession { + l1_block_number, + received_nonces: HashMap::new(), + received_sigs: HashMap::new(), + message: sighash.to_byte_array().to_vec(), + unsigned_tx: Some(unsigned_tx), + }; + + { + let mut session = self_.state.signing_session.write().await; + *session = new_sesssion; + } + + tracing::info!("New session created for l1 batch {}", l1_block_number); + + return Ok(ok_json(l1_block_number)); + } + + #[instrument(skip(self_))] + pub async fn get_session(State(self_): State>) -> Response { + let session = self_.state.signing_session.read().await; + + let mut unsigned_tx_bytes = Vec::new(); + if let Some(unsigned_tx) = self_.state.signing_session.read().await.unsigned_tx.clone() { + unsigned_tx_bytes = unsigned_tx.to_bytes(); + }; + + return ok_json(SigningSessionResponse { + l1_block_number: session.l1_block_number, + message_to_sign: hex::encode(&session.message), + required_signers: self_.state.required_signers, + received_nonces: session.received_nonces.len(), + received_partial_signatures: session.received_sigs.len(), + unsigned_tx: unsigned_tx_bytes, + }); + } + + #[instrument(skip(self_))] + pub async fn submit_nonce( + State(self_): State>, + Json(nonce_pair): Json, + ) -> anyhow::Result, ApiError> { + let decoded_nonce = + match base64::engine::general_purpose::STANDARD.decode(&nonce_pair.nonce) { + Ok(nonce) => nonce, + Err(_) => return Err(ApiError::BadRequest("Invalid nonce pair".to_string())), + }; + + let pub_nonce = match PubNonce::from_bytes(&decoded_nonce) { + Ok(nonce) => nonce, + Err(_) => return Err(ApiError::BadRequest("Invalid pub nonce".to_string())), + }; + + let mut session = self_.state.signing_session.write().await; + + session + .received_nonces + .insert(nonce_pair.signer_index, pub_nonce); + + Ok(ok_json("Success")) + } + + #[instrument(skip(self_))] + pub async fn submit_partial_signature( + State(self_): State>, + Json(sig_pair): Json, + ) -> anyhow::Result, ApiError> { + let partial_sig = match decode_signature(sig_pair.signature) { + Ok(sig) => sig, + Err(_) => { + return Err(ApiError::BadRequest( + "Invalid partial signature submitted".to_string(), + )) + } + }; + + { + let mut session = self_.state.signing_session.write().await; + + session + .received_sigs + .insert(sig_pair.signer_index, partial_sig); + } + Ok(ok_json("Success")) + } + + #[instrument(skip(self_))] + pub async fn get_nonces(State(self_): State>) -> Response { + let session = self_.state.signing_session.read().await; + + let mut nonces = HashMap::new(); + for (&idx, nonce) in &session.received_nonces { + nonces.insert( + idx, + base64::engine::general_purpose::STANDARD.encode(nonce.to_bytes()), + ); + } + ok_json(nonces) + } + + #[instrument(skip(self_))] + pub async fn get_submitted_signatures(State(self_): State>) -> Response { + let session = self_.state.signing_session.read().await; + let mut signatures = HashMap::new(); + for (&signer_index, signature) in &session.received_sigs { + let sig = encode_signature(signer_index, *signature).unwrap(); + signatures.insert(signer_index, sig); + } + ok_json(signatures) + } + + pub async fn reset_session(&self) { + let mut session = self.state.signing_session.write().await; + *session = SigningSession::default(); + } +} diff --git a/via_verifier/node/withdrawal_service/src/coordinator/auth_middleware.rs b/via_verifier/node/withdrawal_service/src/coordinator/auth_middleware.rs new file mode 100644 index 000000000..3e34a796c --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/coordinator/auth_middleware.rs @@ -0,0 +1,82 @@ +use std::sync::Arc; + +use axum::{ + body::{self, Body}, + extract::{Request, State}, + middleware::Next, + response::Response, +}; + +use crate::coordinator::{api_decl::RestApi, error::ApiError}; + +pub async fn auth_middleware( + State(state): State>, + request: Request, + next: Next, +) -> Result { + let headers = request.headers(); + + // Extract required headers + let timestamp = headers + .get("X-Timestamp") + .and_then(|h| h.to_str().ok()) + .ok_or_else(|| ApiError::Unauthorized("Missing timestamp header".into()))?; + + let verifier_index = headers + .get("X-Verifier-Index") + .and_then(|h| h.to_str().ok()) + .and_then(|s| s.parse::().ok()) + .ok_or_else(|| ApiError::Unauthorized("Missing or invalid verifier index".into()))?; + + let signature = headers + .get("X-Signature") + .and_then(|h| h.to_str().ok()) + .ok_or_else(|| ApiError::Unauthorized("Missing signature header".into()))?; + + // Validate the verifier index + if verifier_index >= state.state.verifiers_pub_keys.len() { + return Err(ApiError::Unauthorized("Invalid verifier index".into())); + } + + let timestamp_now = chrono::Utc::now().timestamp(); + let timestamp_diff = timestamp_now - timestamp.parse::().unwrap(); + + //Todo: move this to config + if timestamp_diff > 10 { + return Err(ApiError::Unauthorized("Timestamp is too old".into())); + } + + // Get the public key for this verifier + let public_key = &state.state.verifiers_pub_keys[verifier_index]; + + // verify timestamp + verifier_index + let payload = serde_json::json!({ + "timestamp": timestamp, + "verifier_index": verifier_index.to_string(), + }); + + // Verify the signature + if !crate::auth::verify_signature(&payload, signature, public_key) + .map_err(|_| ApiError::InternalServerError("Signature verification failed".into()))? + { + return Err(ApiError::Unauthorized( + "Invalid authentication signature".into(), + )); + } + + Ok(next.run(request).await) +} + +pub async fn extract_body( + State(_state): State>, + request: Request, + next: Next, +) -> Result { + let (parts, body) = request.into_parts(); + let bytes = body::to_bytes(body, usize::MAX) + .await + .map_err(|_| ApiError::InternalServerError("Failed to read body".into()))?; + let mut req = Request::from_parts(parts, Body::from(bytes.clone())); + req.extensions_mut().insert(bytes); + Ok(next.run(req).await) +} diff --git a/via_verifier/node/withdrawal_service/src/coordinator/error.rs b/via_verifier/node/withdrawal_service/src/coordinator/error.rs new file mode 100644 index 000000000..36a60179b --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/coordinator/error.rs @@ -0,0 +1,63 @@ +use axum::{ + http::StatusCode, + response::{IntoResponse, Response}, + Json, +}; +use serde::Serialize; +use thiserror::Error; +use tracing::error; +use via_verifier_dal::DalError; + +// Custom error type for API-specific errors +#[derive(Error, Debug)] +pub enum ApiError { + #[error("Invalid input: {0}")] + BadRequest(String), + #[error("Unauthorized: {0}")] + Unauthorized(String), + #[error("Unexpected error: {0}")] + InternalServerError(String), +} + +impl From for ApiError { + fn from(error: anyhow::Error) -> Self { + ApiError::InternalServerError(error.to_string()) + } +} + +impl From for ApiError { + fn from(error: DalError) -> Self { + ApiError::InternalServerError(error.to_string()) + } +} + +impl IntoResponse for ApiError { + fn into_response(self) -> Response { + let (status, error_response) = match self { + ApiError::BadRequest(msg) => (StatusCode::BAD_REQUEST, ErrorResponse::new(&msg)), + ApiError::Unauthorized(msg) => (StatusCode::UNAUTHORIZED, ErrorResponse::new(&msg)), + ApiError::InternalServerError(msg) => { + (StatusCode::INTERNAL_SERVER_ERROR, ErrorResponse::new(&msg)) + } + }; + + let response = Json(error_response).into_response(); + (status, response).into_response() + } +} + +// Struct for standardized error responses +#[derive(Serialize)] +struct ErrorResponse { + error: String, + message: String, +} + +impl ErrorResponse { + fn new(message: &E) -> Self { + Self { + error: "Coordinator API Error".to_string(), + message: message.to_string(), + } + } +} diff --git a/via_verifier/node/withdrawal_service/src/coordinator/mod.rs b/via_verifier/node/withdrawal_service/src/coordinator/mod.rs new file mode 100644 index 000000000..3a5fbd0b0 --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/coordinator/mod.rs @@ -0,0 +1,5 @@ +pub mod api; +mod api_decl; +mod api_impl; +mod auth_middleware; +mod error; diff --git a/via_verifier/node/withdrawal_service/src/lib.rs b/via_verifier/node/withdrawal_service/src/lib.rs new file mode 100644 index 000000000..20e491d51 --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/lib.rs @@ -0,0 +1,6 @@ +pub mod coordinator; +pub mod verifier; + +mod auth; +mod types; +mod utils; diff --git a/via_verifier/node/withdrawal_service/src/types.rs b/via_verifier/node/withdrawal_service/src/types.rs new file mode 100644 index 000000000..20f6afccf --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/types.rs @@ -0,0 +1,49 @@ +use std::{clone::Clone, collections::HashMap, sync::Arc}; + +use musig2::{PartialSignature, PubNonce}; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock; +use via_btc_client::withdrawal_builder::UnsignedWithdrawalTx; + +#[derive(Debug, Clone)] +pub struct ViaWithdrawalState { + pub signing_session: Arc>, + pub required_signers: usize, + pub verifiers_pub_keys: Vec, +} + +#[derive(Default, Debug, Clone)] +pub struct SigningSession { + pub l1_block_number: i64, + pub unsigned_tx: Option, + pub received_nonces: HashMap, + pub received_sigs: HashMap, + pub message: Vec, +} + +/// Data posted by other signers to submit their nonce +#[derive(Serialize, Deserialize, Debug)] +pub struct NoncePair { + pub signer_index: usize, + /// Base64 encoded signer nonce + pub nonce: String, +} + +/// Data posted by other signers to submit their partial signature +#[derive(Serialize, Deserialize, Debug)] +pub struct PartialSignaturePair { + pub signer_index: usize, + /// Base64 encoded signature + pub signature: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct SigningSessionResponse { + pub l1_block_number: i64, + /// hex-encoded message (txid) + pub message_to_sign: String, + pub required_signers: usize, + pub unsigned_tx: Vec, + pub received_nonces: usize, + pub received_partial_signatures: usize, +} diff --git a/via_verifier/node/withdrawal_service/src/utils.rs b/via_verifier/node/withdrawal_service/src/utils.rs new file mode 100644 index 000000000..49d39adee --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/utils.rs @@ -0,0 +1,81 @@ +use std::str::FromStr; + +use anyhow::Context; +use base64::Engine; +use bitcoin::{hashes::Hash, PrivateKey, Txid}; +use musig2::{BinaryEncoding, PartialSignature, PubNonce}; +use secp256k1_musig2::{PublicKey, Secp256k1, SecretKey}; +use via_musig2::Signer; + +use crate::types::{NoncePair, PartialSignaturePair}; + +pub fn get_signer( + private_key_wif: &str, + verifiers_pub_keys_str: Vec, +) -> anyhow::Result { + let private_key = PrivateKey::from_wif(private_key_wif)?; + let secret_key = SecretKey::from_byte_array(&private_key.inner.secret_bytes()) + .context("Error to compute the coordinator sk")?; + let secp = Secp256k1::new(); + let public_key = PublicKey::from_secret_key(&secp, &secret_key); + + let mut all_pubkeys = Vec::new(); + + let mut signer_index = 0; + + for (i, key) in verifiers_pub_keys_str.iter().enumerate() { + let pk = PublicKey::from_str(key)?; + all_pubkeys.push(pk); + if pk == public_key { + signer_index = i; + } + } + + let signer = Signer::new(secret_key, signer_index, all_pubkeys.clone())?; + Ok(signer) +} + +pub fn decode_signature(signature: String) -> anyhow::Result { + let decoded_sig = base64::engine::general_purpose::STANDARD + .decode(&signature) + .context("error to decode signature")?; + Ok(PartialSignature::from_slice(&decoded_sig)?) +} + +pub fn encode_signature( + signer_index: usize, + partial_sig: PartialSignature, +) -> anyhow::Result { + let sig_b64 = base64::engine::general_purpose::STANDARD.encode(partial_sig.serialize()); + let sig_pair = PartialSignaturePair { + signer_index, + signature: sig_b64, + }; + Ok(sig_pair) +} + +pub fn encode_nonce(signer_index: usize, nonce: PubNonce) -> anyhow::Result { + let nonce = base64::engine::general_purpose::STANDARD.encode(nonce.to_bytes()); + Ok(NoncePair { + signer_index, + nonce, + }) +} + +pub fn decode_nonce(nonce_pair: NoncePair) -> anyhow::Result { + let decoded_nonce = base64::engine::general_purpose::STANDARD + .decode(&nonce_pair.nonce) + .context("error to encode nonde")?; + let pub_nonce = PubNonce::from_bytes(&decoded_nonce)?; + Ok(pub_nonce) +} + +/// Converts H256 bytes (from the DB) to a Txid by reversing the byte order. +pub(crate) fn h256_to_txid(h256_bytes: &[u8]) -> anyhow::Result { + if h256_bytes.len() != 32 { + return Err(anyhow::anyhow!("H256 must be 32 bytes")); + } + let mut reversed_bytes = h256_bytes.to_vec(); + reversed_bytes.reverse(); + Txid::from_slice(&reversed_bytes).context("Failed to convert H256 to Txid") +} diff --git a/via_verifier/node/withdrawal_service/src/verifier/mod.rs b/via_verifier/node/withdrawal_service/src/verifier/mod.rs new file mode 100644 index 000000000..51e978fcb --- /dev/null +++ b/via_verifier/node/withdrawal_service/src/verifier/mod.rs @@ -0,0 +1,590 @@ +use std::{collections::HashMap, sync::Arc}; + +use anyhow::{Context, Result}; +use bitcoin::{ + hashes::Hash, + sighash::{Prevouts, SighashCache}, + Amount, TapSighashType, Txid, Witness, +}; +use musig2::{CompactSignature, PartialSignature}; +use reqwest::{header, Client, StatusCode}; +use tokio::sync::watch; +use via_btc_client::{ + traits::{BitcoinOps, Serializable}, + withdrawal_builder::{UnsignedWithdrawalTx, WithdrawalBuilder}, +}; +use via_musig2::{verify_signature, Signer}; +use via_verifier_dal::{ConnectionPool, Verifier, VerifierDal}; +use via_withdrawal_client::client::WithdrawalClient; +use zksync_config::configs::via_verifier::{VerifierMode, ViaVerifierConfig}; +use zksync_types::H256; + +use crate::{ + types::{NoncePair, PartialSignaturePair, SigningSessionResponse}, + utils::{ + decode_nonce, decode_signature, encode_nonce, encode_signature, get_signer, h256_to_txid, + }, +}; + +pub struct ViaWithdrawalVerifier { + master_connection_pool: ConnectionPool, + btc_client: Arc, + config: ViaVerifierConfig, + client: Client, + withdrawal_client: WithdrawalClient, + signer: Signer, + final_sig: Option, +} + +impl ViaWithdrawalVerifier { + pub async fn new( + master_connection_pool: ConnectionPool, + btc_client: Arc, + withdrawal_client: WithdrawalClient, + config: ViaVerifierConfig, + ) -> anyhow::Result { + let signer = get_signer( + &config.private_key.clone(), + config.verifiers_pub_keys_str.clone(), + )?; + + Ok(Self { + master_connection_pool, + btc_client, + signer, + client: Client::new(), + withdrawal_client, + config, + final_sig: None, + }) + } + + pub async fn run(mut self, mut stop_receiver: watch::Receiver) -> anyhow::Result<()> { + let mut timer = tokio::time::interval(self.config.polling_interval()); + + while !*stop_receiver.borrow_and_update() { + tokio::select! { + _ = timer.tick() => { /* continue iterations */ } + _ = stop_receiver.changed() => break, + } + + match self.loop_iteration().await { + Ok(()) => {} + Err(err) => { + tracing::error!("Failed to process verifier withdrawal task: {err}"); + } + } + } + + tracing::info!("Stop signal received, verifier withdrawal is shutting down"); + Ok(()) + } + + async fn loop_iteration(&mut self) -> Result<(), anyhow::Error> { + let mut session_info = self.get_session().await?; + + if self.config.verifier_mode == VerifierMode::COORDINATOR { + tracing::info!("create a new session"); + + if session_info.l1_block_number != 0 { + let withdrawal_txid = self + .master_connection_pool + .connection_tagged("coordinator task") + .await? + .via_votes_dal() + .get_vote_transaction_withdrawal_tx(session_info.l1_block_number) + .await?; + + // TODO: refactore the transaction confirmation for the musig2, and implement utxo manager like in the inscriber + // Check if the previous batch musig2 transaction was minted before start a new session. + if let Some(tx) = withdrawal_txid { + let tx_id = Txid::from_slice(&tx)?; + let is_confirmed = self.btc_client.check_tx_confirmation(&tx_id, 1).await?; + if !is_confirmed { + return Ok(()); + } + } + } + + self.create_new_session().await?; + } + + session_info = self.get_session().await?; + if session_info.l1_block_number == 0 { + tracing::info!("Empty session, nothing to process"); + return Ok(()); + } + + if self.config.verifier_mode == VerifierMode::COORDINATOR + && self + .build_and_broadcast_final_transaction(&session_info) + .await? + { + return Ok(()); + } + + let session_signature = self.get_session_signatures().await?; + let session_nonces = self.get_session_nonces().await?; + let verifier_index = self.signer.signer_index(); + + if session_signature.contains_key(&verifier_index) + && session_nonces.contains_key(&verifier_index) + { + return Ok(()); + } + + // Reinit the signer, when a new session is created by the coordinator. + if !session_signature.contains_key(&verifier_index) + && !session_nonces.contains_key(&verifier_index) + && (self.signer.has_created_partial_sig() || self.signer.has_submitted_nonce()) + { + self.reinit_signer()?; + return Ok(()); + } + + if session_info.received_nonces < session_info.required_signers { + let message = hex::decode(&session_info.message_to_sign)?; + + if !self.verify_message(&session_info).await? { + anyhow::bail!("Error when verify the session message"); + } + + if self.signer.has_not_started() { + self.signer.start_signing_session(message)?; + } + + if !session_nonces.contains_key(&verifier_index) { + self.submit_nonce().await?; + } + } else if session_info.received_nonces >= session_info.required_signers { + if self.signer.has_created_partial_sig() { + return Ok(()); + } + self.submit_partial_signature(session_nonces).await?; + } + + Ok(()) + } + + fn create_request_headers(&self) -> anyhow::Result { + let mut headers = header::HeaderMap::new(); + let timestamp = chrono::Utc::now().timestamp().to_string(); + let verifier_index = self.signer.signer_index().to_string(); + + let private_key = bitcoin::PrivateKey::from_wif(&self.config.private_key)?; + let secret_key = private_key.inner; + + // Sign timestamp + verifier_index as a JSON object + let payload = serde_json::json!({ + "timestamp": timestamp, + "verifier_index": verifier_index, + }); + let signature = crate::auth::sign_request(&payload, &secret_key)?; + + headers.insert("X-Timestamp", header::HeaderValue::from_str(×tamp)?); + headers.insert( + "X-Verifier-Index", + header::HeaderValue::from_str(&verifier_index)?, + ); + headers.insert("X-Signature", header::HeaderValue::from_str(&signature)?); + + Ok(headers) + } + + async fn get_session(&self) -> anyhow::Result { + let url = format!("{}/session", self.config.url); + let headers = self.create_request_headers()?; + let resp = self + .client + .get(&url) + .headers(headers.clone()) + .send() + .await?; + if resp.status().as_u16() != StatusCode::OK.as_u16() { + anyhow::bail!( + "Error to fetch the session, status: {}, url: {}, headers: {:?}, resp: {:?}", + resp.status(), + url, + headers, + resp.text().await? + ); + } + let session_info: SigningSessionResponse = resp.json().await?; + Ok(session_info) + } + + async fn verify_message(&self, session: &SigningSessionResponse) -> anyhow::Result { + // Get the l1 batches finilized but withdrawals not yet processed + if let Some((blob_id, proof_tx_id)) = self + .master_connection_pool + .connection_tagged("verifier") + .await? + .via_votes_dal() + .get_finalized_block_and_non_processed_withdrawal(session.l1_block_number) + .await? + { + if !self + ._verify_withdrawals(session, &blob_id, proof_tx_id) + .await? + { + return Ok(false); + } + + return self._verify_sighash(session).await; + } + Ok(false) + } + async fn _verify_withdrawals( + &self, + session: &SigningSessionResponse, + blob_id: &str, + proof_tx_id: Vec, + ) -> anyhow::Result { + let withdrawals = self.withdrawal_client.get_withdrawals(blob_id).await?; + let unsigned_tx = UnsignedWithdrawalTx::from_bytes(&session.unsigned_tx); + + // Group withdrawals by address and sum amounts + let mut grouped_withdrawals: HashMap = HashMap::new(); + for w in &withdrawals { + let key = w.address.script_pubkey().to_string(); + *grouped_withdrawals.entry(key).or_insert(Amount::ZERO) = grouped_withdrawals + .get(&key) + .unwrap_or(&Amount::ZERO) + .checked_add(w.amount) + .ok_or_else(|| anyhow::anyhow!("Withdrawal amount overflow when grouping"))?; + } + + let len = grouped_withdrawals.len(); + if len == 0 { + tracing::error!( + "Invalid session, there are no withdrawals to process, l1 batch: {}", + session.l1_block_number + ); + return Ok(false); + } + if len + 2 != unsigned_tx.tx.output.len() { + // Log an error + return Ok(false); + } + + // Verify if all grouped_withdrawals are included with valid amount. + for (i, txout) in unsigned_tx + .tx + .output + .iter() + .enumerate() + .take(unsigned_tx.tx.output.len().saturating_sub(2)) + { + let amount = &grouped_withdrawals[&txout.script_pubkey.to_string()]; + if amount != &txout.value { + tracing::error!( + "Invalid request withdrawal for batch {}, index: {}", + session.l1_block_number, + i + ); + return Ok(false); + } + } + tracing::info!( + "All request withdrawals for batch {} are valid", + session.l1_block_number + ); + + // Verify the OP return + let tx_id = h256_to_txid(&proof_tx_id)?; + let op_return_data = WithdrawalBuilder::create_op_return_script(tx_id)?; + let op_return_tx_out = &unsigned_tx.tx.output[unsigned_tx.tx.output.len() - 2]; + + if op_return_tx_out.script_pubkey.to_string() != op_return_data.to_string() + || op_return_tx_out.value != Amount::ZERO + { + tracing::error!( + "Invalid op return data for l1 batch: {}", + session.l1_block_number + ); + return Ok(false); + } + + Ok(true) + } + + async fn _verify_sighash(&self, session: &SigningSessionResponse) -> anyhow::Result { + // Verify the sighash + let unsigned_tx = UnsignedWithdrawalTx::from_bytes(&session.unsigned_tx); + let mut sighash_cache = SighashCache::new(&unsigned_tx.tx); + + let sighash_type = TapSighashType::All; + let mut txout_list = Vec::with_capacity(unsigned_tx.utxos.len()); + + for (_, txout) in unsigned_tx.utxos.clone() { + txout_list.push(txout); + } + let sighash = sighash_cache + .taproot_key_spend_signature_hash(0, &Prevouts::All(&txout_list), sighash_type) + .context("Error taproot_key_spend_signature_hash")?; + + if session.message_to_sign != sighash.to_string() { + tracing::error!( + "Invalid transaction sighash for session with block id {}", + session.l1_block_number + ); + return Ok(false); + } + tracing::info!("Sighash for batch {} is valid", session.l1_block_number); + Ok(true) + } + + async fn get_session_nonces(&self) -> anyhow::Result> { + let nonces_url = format!("{}/session/nonce", self.config.url); + let headers = self.create_request_headers()?; + let resp = self + .client + .get(&nonces_url) + .headers(headers.clone()) + .send() + .await?; + + if resp.status().as_u16() != StatusCode::OK.as_u16() { + anyhow::bail!( + "Error to fetch the session nonces, status: {}, url: {}, headers: {:?}, resp: {:?}", + resp.status(), + nonces_url, + headers, + resp.text().await? + ); + } + let nonces: HashMap = resp.json().await?; + Ok(nonces) + } + + async fn submit_nonce(&mut self) -> anyhow::Result<()> { + let nonce = self + .signer + .our_nonce() + .ok_or_else(|| anyhow::anyhow!("No nonce available"))?; + + let nonce_pair = encode_nonce(self.signer.signer_index(), nonce).unwrap(); + let url = format!("{}/session/nonce", self.config.url); + let headers = self.create_request_headers()?; + let res = self + .client + .post(&url) + .headers(headers.clone()) + .json(&nonce_pair) + .send() + .await?; + + if res.status().is_success() { + self.signer.mark_nonce_submitted(); + Ok(()) + } else { + anyhow::bail!( + "Failed to submit nonce, response: {}, url: {}, headers: {:?}, body: {:?} ", + res.text().await?, + url, + headers, + nonce_pair + ); + } + } + + async fn get_session_signatures(&self) -> anyhow::Result> { + let url = format!("{}/session/signature", self.config.url); + let headers = self.create_request_headers()?; + let resp = self + .client + .get(&url) + .headers(headers.clone()) + .send() + .await?; + + if resp.status().as_u16() != StatusCode::OK.as_u16() { + anyhow::bail!( + "Error to fetch the session signatures, status: {}, url: {}, headers: {:?}, resp: {:?}", + resp.status(), + url, + headers, + resp.text().await? + ); + } + let signatures: HashMap = resp.json().await?; + let mut partial_sigs: HashMap = HashMap::new(); + for (idx, sig) in signatures { + partial_sigs.insert(idx, decode_signature(sig.signature).unwrap()); + } + Ok(partial_sigs) + } + + async fn submit_partial_signature( + &mut self, + session_nonces: HashMap, + ) -> anyhow::Result<()> { + // Process each nonce + for (idx, nonce_b64) in session_nonces { + if idx != self.signer.signer_index() { + let nonce = decode_nonce(NoncePair { + signer_index: idx, + nonce: nonce_b64, + })?; + self.signer + .receive_nonce(idx, nonce.clone()) + .map_err(|e| anyhow::anyhow!("Failed to receive nonce: {}", e))?; + } + } + + let partial_sig = self.signer.create_partial_signature()?; + let sig_pair = encode_signature(self.signer.signer_index(), partial_sig)?; + + let url = format!("{}/session/signature", self.config.url); + let headers = self.create_request_headers()?; + let resp = self + .client + .post(&url) + .headers(headers.clone()) + .json(&sig_pair) + .send() + .await?; + if resp.status().is_success() { + self.signer.mark_partial_sig_submitted(); + Ok(()) + } else { + anyhow::bail!( + "Failed to submit partial signature, response: {}, url: {}, headers: {:?}, body: {:?} ", + resp.text().await?, + url, + headers, + sig_pair + ); + } + } + + fn reinit_signer(&mut self) -> anyhow::Result<()> { + let signer = get_signer( + &self.config.private_key.clone(), + self.config.verifiers_pub_keys_str.clone(), + )?; + self.signer = signer; + self.final_sig = None; + Ok(()) + } + + async fn create_new_session(&mut self) -> anyhow::Result<()> { + let url = format!("{}/session/new", self.config.url); + let headers = self.create_request_headers()?; + let resp = self + .client + .post(&url) + .headers(headers.clone()) + .header(header::CONTENT_TYPE, "application/json") + .send() + .await?; + + if !resp.status().is_success() { + tracing::warn!( + "Failed to create a new session, response: {}, url: {}, headers: {:?}", + resp.text().await?, + url, + headers + ); + self.reinit_signer()?; + } + Ok(()) + } + + async fn create_final_signature( + &mut self, + session_info: &SigningSessionResponse, + ) -> anyhow::Result<()> { + if self.final_sig.is_some() { + return Ok(()); + } + + if session_info.received_partial_signatures >= session_info.required_signers { + let signatures = self.get_session_signatures().await?; + for (&i, sig) in &signatures { + if self.signer.signer_index() != i { + self.signer.receive_partial_signature(i, *sig)?; + } + } + + let final_sig = self.signer.create_final_signature()?; + let agg_pub = self.signer.aggregated_pubkey(); + verify_signature( + agg_pub, + final_sig, + &hex::decode(&session_info.message_to_sign)?, + )?; + self.final_sig = Some(final_sig); + + return Ok(()); + } + Ok(()) + } + + fn sign_transaction( + &self, + unsigned_tx: UnsignedWithdrawalTx, + musig2_signature: CompactSignature, + ) -> String { + let mut unsigned_tx = unsigned_tx; + let mut final_sig_with_hashtype = musig2_signature.serialize().to_vec(); + let sighash_type = TapSighashType::All; + final_sig_with_hashtype.push(sighash_type as u8); + for tx in &mut unsigned_tx.tx.input { + tx.witness = Witness::from(vec![final_sig_with_hashtype.clone()]); + } + bitcoin::consensus::encode::serialize_hex(&unsigned_tx.tx) + } + + async fn build_and_broadcast_final_transaction( + &mut self, + session_info: &SigningSessionResponse, + ) -> anyhow::Result { + self.create_final_signature(session_info) + .await + .context("Error create final signature")?; + + if let Some(musig2_signature) = self.final_sig { + let withdrawal_txid = self + .master_connection_pool + .connection_tagged("coordinator task") + .await? + .via_votes_dal() + .get_vote_transaction_withdrawal_tx(session_info.l1_block_number) + .await?; + + if withdrawal_txid.is_some() { + return Ok(false); + } + + let unsigned_tx = UnsignedWithdrawalTx::from_bytes(&session_info.unsigned_tx); + let signed_tx = self.sign_transaction(unsigned_tx.clone(), musig2_signature); + + let txid = self + .btc_client + .broadcast_signed_transaction(&signed_tx) + .await?; + + self.master_connection_pool + .connection_tagged("coordinator task") + .await? + .via_votes_dal() + .mark_vote_transaction_as_processed_withdrawals( + H256::from_slice(&txid.as_raw_hash().to_byte_array()), + session_info.l1_block_number, + ) + .await?; + + tracing::info!( + "New withdrawal transaction processed, l1 batch {} musig2 tx_id {}", + session_info.l1_block_number, + txid + ); + + self.reinit_signer()?; + + return Ok(true); + } + Ok(false) + } +} diff --git a/via_verifier/rust-toolchain b/via_verifier/rust-toolchain new file mode 100644 index 000000000..03c040b91 --- /dev/null +++ b/via_verifier/rust-toolchain @@ -0,0 +1 @@ +nightly-2024-08-01 diff --git a/yarn.lock b/yarn.lock index 9da6ab7d3..c80c9142e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -466,11 +466,6 @@ resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.10.0.tgz#548f6de556857c8bb73bbee70c35dc82a2e74d63" integrity sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA== -"@eslint-community/regexpp@^4.6.1": - version "4.11.0" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.11.0.tgz#b0ffd0312b4a3fd2d6f77237e7248a5ad3a680ae" - integrity sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A== - "@eslint/eslintrc@^0.4.3": version "0.4.3" resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.3.tgz#9e42981ef035beb3dd49add17acb96e8ff6f394c" @@ -2559,6 +2554,14 @@ dependencies: ethers "^5.0.2" +"@typechain/ethers-v6@^0.5.1": + version "0.5.1" + resolved "https://registry.yarnpkg.com/@typechain/ethers-v6/-/ethers-v6-0.5.1.tgz#42fe214a19a8b687086c93189b301e2b878797ea" + integrity sha512-F+GklO8jBWlsaVV+9oHaPh5NJdd6rAKN4tklGfInX1Q7h0xPgVLP39Jl3eCulPB5qexI71ZFHwbljx4ZXNfouA== + dependencies: + lodash "^4.17.15" + ts-essentials "^7.0.1" + "@types/abstract-leveldown@*": version "7.2.5" resolved "https://registry.yarnpkg.com/@types/abstract-leveldown/-/abstract-leveldown-7.2.5.tgz#db2cf364c159fb1f12be6cd3549f56387eaf8d73" @@ -2789,6 +2792,13 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.13.tgz#f64277c341150c979e42b00e4ac289290c9df469" integrity sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q== +"@types/node@22.7.5": + version "22.7.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.7.5.tgz#cfde981727a7ab3611a481510b473ae54442b92b" + integrity sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ== + dependencies: + undici-types "~6.19.2" + "@types/node@^10.0.3": version "10.17.60" resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.60.tgz#35f3d6213daed95da7f0f73e75bcc6980e90597b" @@ -5379,6 +5389,19 @@ ethers@^5.0.2, ethers@^5.7.0, ethers@^5.7.2, ethers@~5.7.0: "@ethersproject/web" "5.7.1" "@ethersproject/wordlists" "5.7.0" +ethers@^6.13.4: + version "6.13.4" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-6.13.4.tgz#bd3e1c3dc1e7dc8ce10f9ffb4ee40967a651b53c" + integrity sha512-21YtnZVg4/zKkCQPjrDj38B1r4nQvTZLopUGMLQ1ePU2zV/joCfDC3t3iKQjWRzjjjbzR+mdAIoikeBRNkdllA== + dependencies: + "@adraffy/ens-normalize" "1.10.1" + "@noble/curves" "1.2.0" + "@noble/hashes" "1.3.2" + "@types/node" "22.7.5" + aes-js "4.0.0-beta.5" + tslib "2.7.0" + ws "8.17.1" + ethers@^6.7.1: version "6.12.1" resolved "https://registry.yarnpkg.com/ethers/-/ethers-6.12.1.tgz#517ff6d66d4fd5433e38e903051da3e57c87ff37" @@ -5786,15 +5809,6 @@ form-data@~2.3.2: combined-stream "^1.0.6" mime-types "^2.1.12" -form-data@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" - integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.6" - mime-types "^2.1.12" - fp-ts@1.19.3: version "1.19.3" resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-1.19.3.tgz#261a60d1088fbff01f91256f91d21d0caaaaa96f" @@ -7612,13 +7626,6 @@ linkify-it@^4.0.1: dependencies: uc.micro "^1.0.1" -linkify-it@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/linkify-it/-/linkify-it-4.0.1.tgz#01f1d5e508190d06669982ba31a7d9f56a5751ec" - integrity sha512-C7bfi1UZmoj8+PQx22XyeXCuBlokoyWQL5pWSP+EI6nzRylyThouddufc2c1NDIcP9k5agmN9fLpA7VNJfIiqw== - dependencies: - uc.micro "^1.0.1" - load-json-file@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b" @@ -8522,23 +8529,6 @@ ordinal@^1.0.3: resolved "https://registry.yarnpkg.com/ordinal/-/ordinal-1.0.3.tgz#1a3c7726a61728112f50944ad7c35c06ae3a0d4d" integrity sha512-cMddMgb2QElm8G7vdaa02jhUNbTSrhsgAGUz1OokD83uJTwSUn+nKoNoKVVaRa08yF6sgfO7Maou1+bgLd9rdQ== -optionator@^0.9.3: - version "0.9.4" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.4.tgz#7ea1c1a5d91d764fb282139c88fe11e182a3a734" - integrity sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g== - dependencies: - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - word-wrap "^1.2.5" - -ordinal@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/ordinal/-/ordinal-1.0.3.tgz#1a3c7726a61728112f50944ad7c35c06ae3a0d4d" - integrity sha512-cMddMgb2QElm8G7vdaa02jhUNbTSrhsgAGUz1OokD83uJTwSUn+nKoNoKVVaRa08yF6sgfO7Maou1+bgLd9rdQ== - os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" @@ -9547,13 +9537,6 @@ serialize-javascript@^6.0.2: dependencies: randombytes "^2.1.0" -serialize-javascript@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2" - integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== - dependencies: - randombytes "^2.1.0" - set-function-length@^1.2.1: version "1.2.2" resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" @@ -10476,16 +10459,16 @@ tslib@2.4.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== +tslib@2.7.0, tslib@^2.6.2: + version "2.7.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.7.0.tgz#d9b40c5c40ab59e8738f297df3087bf1a2690c01" + integrity sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA== + tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.6.2: - version "2.7.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.7.0.tgz#d9b40c5c40ab59e8738f297df3087bf1a2690c01" - integrity sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA== - tsort@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/tsort/-/tsort-0.0.1.tgz#e2280f5e817f8bf4275657fd0f9aebd44f5a2786" @@ -10687,6 +10670,11 @@ undici-types@~5.26.4: resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== +undici-types@~6.19.2: + version "6.19.8" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.19.8.tgz#35111c9d1437ab83a7cdc0abae2f26d88eda0a02" + integrity sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw== + undici@^5.14.0: version "5.28.4" resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068" @@ -10972,6 +10960,11 @@ ws@7.4.6: resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== +ws@8.17.1: + version "8.17.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b" + integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ== + ws@8.5.0: version "8.5.0" resolved "https://registry.yarnpkg.com/ws/-/ws-8.5.0.tgz#bfb4be96600757fe5382de12c670dab984a1ed4f" @@ -11097,6 +11090,11 @@ zksync-ethers@^5.9.0: dependencies: ethers "~5.7.0" +zksync-ethers@^6.13.1: + version "6.15.2" + resolved "https://registry.yarnpkg.com/zksync-ethers/-/zksync-ethers-6.15.2.tgz#bff003ed346aa4ef9a4c714dd21944c3cfed6673" + integrity sha512-eqFeKVYXyfHYW1Tw0CkCk255zeuFltDbfZfraxpe/Z/idVR1WxeBlKvLLzIM884KVVeghRkConSRlOibhtm6xw== + zksync-ethers@^6.9.0: version "6.9.0" resolved "https://registry.yarnpkg.com/zksync-ethers/-/zksync-ethers-6.9.0.tgz#efaff1d59e2cff837eeda84c4ba59fdca4972a91"