From c9e48980d4ffc1ff8e93395f872e0899496c1073 Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Mon, 8 Jul 2024 12:01:13 +0200 Subject: [PATCH 01/24] refactor(bridge-withdrawer): move generated contract bindings to crate (#1237) ## Summary Moves the generated Rust bindings for Astria's bridge smart contracts to a free standing crate. ## Background Breaking out the bindings outside the `astria-bridge-withdrawer` binary crate is needed to share code with a CLI tool to manually initiate withdrawals. This patch also uses the opportunity and makes the process of re-generating bindings (and committing them to the repo) more similar to how protobuf-bindings are generated: `tools/solidity-compiler` updates the git submodule in `astria-bridge-contracts` and then regenerates all bindings. The bindings are placed in `astria-bridge-contracts/src/generated` and commited to the repository. ## Changes - Create `crates/astria-bridge-contracts` crate and move the submodule of the same name there - Create `tools/solidity-compiler` tool and use it to generate rust bindings for the bridge contracts - Update `astria-bridge-withdrawer` to use `astria-bridge-contracts` as a dependency - Add a job `solidity-contracts-compiled` to ensure that the contracts are up-to-date. ## Testing Apart from some types being renamed, nothing has changed. --- .github/workflows/test.yml | 27 +- .gitmodules | 6 +- Cargo.lock | 373 +--- Cargo.toml | 6 +- crates/astria-bridge-contracts/Cargo.toml | 9 + crates/astria-bridge-contracts/README.md | 23 + .../astria-bridge-contracts | 0 .../src}/generated/astria_bridgeable_erc20.rs | 0 .../src}/generated/astria_withdrawer.rs | 0 .../src/generated/i_astria_withdrawer.rs} | 0 .../src/generated/mod.rs | 8 + crates/astria-bridge-contracts/src/lib.rs | 4 + crates/astria-bridge-withdrawer/Cargo.toml | 4 +- crates/astria-bridge-withdrawer/build.rs | 38 - .../src/bridge_withdrawer/ethereum/convert.rs | 12 +- .../ethereum/generated/mod.rs | 15 - .../src/bridge_withdrawer/ethereum/mod.rs | 4 - .../bridge_withdrawer/ethereum/test_utils.rs | 15 +- .../src/bridge_withdrawer/ethereum/watcher.rs | 34 +- justfile | 4 + tools/solidity-compiler/Cargo.lock | 1563 +++++++++++++++++ tools/solidity-compiler/Cargo.toml | 10 + tools/solidity-compiler/README.md | 5 + tools/solidity-compiler/src/main.rs | 43 + 24 files changed, 1744 insertions(+), 459 deletions(-) create mode 100644 crates/astria-bridge-contracts/Cargo.toml create mode 100644 crates/astria-bridge-contracts/README.md rename crates/{astria-bridge-withdrawer => astria-bridge-contracts}/astria-bridge-contracts (100%) rename crates/{astria-bridge-withdrawer/src/bridge_withdrawer/ethereum => astria-bridge-contracts/src}/generated/astria_bridgeable_erc20.rs (100%) rename crates/{astria-bridge-withdrawer/src/bridge_withdrawer/ethereum => astria-bridge-contracts/src}/generated/astria_withdrawer.rs (100%) rename crates/{astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/astria_withdrawer_interface.rs => astria-bridge-contracts/src/generated/i_astria_withdrawer.rs} (100%) create mode 100644 crates/astria-bridge-contracts/src/generated/mod.rs create mode 100644 crates/astria-bridge-contracts/src/lib.rs delete mode 100644 crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/mod.rs create mode 100644 tools/solidity-compiler/Cargo.lock create mode 100644 tools/solidity-compiler/Cargo.toml create mode 100644 tools/solidity-compiler/README.md create mode 100644 tools/solidity-compiler/src/main.rs diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 22dc6f9fde..bed1d4fd3f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -44,6 +44,31 @@ jobs: exit 1 fi + solidity-contracts-compiled: + runs-on: ubuntu-22.04 + needs: run_checker + if: needs.run_checker.outputs.run_tests == 'true' + steps: + - uses: actions/checkout@v4 + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@1.76.0 + - uses: Swatinem/rust-cache@v2.7.3 + with: + cache-provider: "buildjet" + - name: Install just + uses: taiki-e/install-action@just + - name: Check if protobuf specs compile to commited Rust sources + run: | + just compile-solidity-contracts + modified=$(git status --porcelain) + if [[ -n "$modified" ]]; then + echo "ERROR: solidity contracts are out of sync with the commited Rust sources" + echo "Recompile locally with \`just compile-solidity-contracts\` and commit to the repository." + echo "Files that reported differences:" + echo "$modified" + exit 1 + fi + compiles: runs-on: buildjet-4vcpu-ubuntu-2204 needs: run_checker @@ -232,7 +257,7 @@ jobs: test: if: ${{ always() && !cancelled() }} - needs: [compiles, protos-compiled, rust, doctest, clippy, lockfile, custom-lints] + needs: [compiles, protos-compiled, solidity-contracts-compiled, rust, doctest, clippy, lockfile, custom-lints] uses: ./.github/workflows/reusable-success.yml with: success: ${{ !contains(needs.*.result, 'failure') }} diff --git a/.gitmodules b/.gitmodules index 83d8292afe..4b3be37842 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ -[submodule "crates/astria-bridge-withdrawer/astria-bridge-contracts"] - path = crates/astria-bridge-withdrawer/astria-bridge-contracts - url = https://github.com/astriaorg/astria-bridge-contracts.git +[submodule "crates/astria-bridge-contracts/astria-bridge-contracts"] + path = crates/astria-bridge-contracts/astria-bridge-contracts + url = https://github.com/astriaorg/astria-bridge-contracts diff --git a/Cargo.lock b/Cargo.lock index 255c2d638b..cacf34f979 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -459,15 +459,6 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" -[[package]] -name = "ascii-canvas" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" -dependencies = [ - "term", -] - [[package]] name = "assert-json-diff" version = "2.0.2" @@ -493,10 +484,18 @@ dependencies = [ "wait-timeout", ] +[[package]] +name = "astria-bridge-contracts" +version = "0.1.0" +dependencies = [ + "ethers", +] + [[package]] name = "astria-bridge-withdrawer" version = "0.1.0" dependencies = [ + "astria-bridge-contracts", "astria-build-info", "astria-config", "astria-core", @@ -1224,21 +1223,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "bit-set" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - [[package]] name = "bitflags" version = "1.3.2" @@ -1452,16 +1436,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bzip2" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8" -dependencies = [ - "bzip2-sys", - "libc", -] - [[package]] name = "bzip2-sys" version = "0.1.11+1.0.8" @@ -2080,16 +2054,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "crossbeam-deque" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - [[package]] name = "crossbeam-epoch" version = "0.9.18" @@ -2665,15 +2629,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "ena" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1" -dependencies = [ - "log", -] - [[package]] name = "encode_unicode" version = "0.3.6" @@ -2854,11 +2809,9 @@ dependencies = [ "ethers-addressbook", "ethers-contract", "ethers-core", - "ethers-etherscan", "ethers-middleware", "ethers-providers", "ethers-signers", - "ethers-solc", ] [[package]] @@ -2902,13 +2855,11 @@ dependencies = [ "const-hex", "dunce", "ethers-core", - "ethers-etherscan", "eyre", "prettyplease", "proc-macro2 1.0.79", "quote", "regex", - "reqwest", "serde", "serde_json", "syn 2.0.58", @@ -2962,23 +2913,6 @@ dependencies = [ "unicode-xid 0.2.4", ] -[[package]] -name = "ethers-etherscan" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79e5973c26d4baf0ce55520bd732314328cabe53193286671b47144145b9649" -dependencies = [ - "chrono", - "ethers-core", - "ethers-solc", - "reqwest", - "semver 1.0.22", - "serde", - "serde_json", - "thiserror", - "tracing", -] - [[package]] name = "ethers-middleware" version = "2.0.14" @@ -2989,7 +2923,6 @@ dependencies = [ "auto_impl", "ethers-contract", "ethers-core", - "ethers-etherscan", "ethers-providers", "ethers-signers", "futures-channel", @@ -3063,38 +2996,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "ethers-solc" -version = "2.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66244a771d9163282646dbeffe0e6eca4dda4146b6498644e678ac6089b11edd" -dependencies = [ - "cfg-if", - "const-hex", - "dirs", - "dunce", - "ethers-core", - "glob", - "home", - "md-5", - "num_cpus", - "once_cell", - "path-slash", - "rayon", - "regex", - "semver 1.0.22", - "serde", - "serde_json", - "solang-parser", - "svm-rs", - "thiserror", - "tiny-keccak", - "tokio", - "tracing", - "walkdir", - "yansi 0.5.1", -] - [[package]] name = "ethnum" version = "1.5.0" @@ -3250,16 +3151,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "fs2" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "funty" version = "2.0.0" @@ -4518,36 +4409,6 @@ dependencies = [ "cpufeatures", ] -[[package]] -name = "lalrpop" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" -dependencies = [ - "ascii-canvas", - "bit-set", - "ena", - "itertools 0.11.0", - "lalrpop-util", - "petgraph", - "regex", - "regex-syntax 0.8.3", - "string_cache", - "term", - "tiny-keccak", - "unicode-xid 0.2.4", - "walkdir", -] - -[[package]] -name = "lalrpop-util" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" -dependencies = [ - "regex-automata 0.4.6", -] - [[package]] name = "lazy_static" version = "1.4.0" @@ -4725,16 +4586,6 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" -[[package]] -name = "md-5" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" -dependencies = [ - "cfg-if", - "digest 0.10.7", -] - [[package]] name = "memchr" version = "2.7.2" @@ -4921,12 +4772,6 @@ dependencies = [ "rand 0.8.5", ] -[[package]] -name = "new_debug_unreachable" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" - [[package]] name = "nmt-rs" version = "0.1.0" @@ -5331,29 +5176,12 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "password-hash" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" -dependencies = [ - "base64ct", - "rand_core 0.6.4", - "subtle", -] - [[package]] name = "paste" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" -[[package]] -name = "path-slash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" - [[package]] name = "pbjson" version = "0.6.0" @@ -5398,9 +5226,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" dependencies = [ "digest 0.10.7", - "hmac", - "password-hash", - "sha2 0.10.8", ] [[package]] @@ -5421,7 +5246,7 @@ checksum = "bdeeaa00ce488657faba8ebf44ab9361f9365a97bd39ffb8a60663f57ff4b467" dependencies = [ "inlinable_string", "pear_codegen", - "yansi 1.0.1", + "yansi", ] [[package]] @@ -5798,57 +5623,6 @@ dependencies = [ "rustc_version 0.4.0", ] -[[package]] -name = "phf" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" -dependencies = [ - "phf_macros", - "phf_shared 0.11.2", -] - -[[package]] -name = "phf_generator" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" -dependencies = [ - "phf_shared 0.11.2", - "rand 0.8.5", -] - -[[package]] -name = "phf_macros" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" -dependencies = [ - "phf_generator", - "phf_shared 0.11.2", - "proc-macro2 1.0.79", - "quote", - "syn 2.0.58", -] - -[[package]] -name = "phf_shared" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" -dependencies = [ - "siphasher", -] - -[[package]] -name = "phf_shared" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" -dependencies = [ - "siphasher", -] - [[package]] name = "pin-project" version = "1.1.5" @@ -5991,12 +5765,6 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" -[[package]] -name = "precomputed-hash" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" - [[package]] name = "predicates" version = "3.1.0" @@ -6141,7 +5909,7 @@ dependencies = [ "quote", "syn 2.0.58", "version_check", - "yansi 1.0.1", + "yansi", ] [[package]] @@ -6361,26 +6129,6 @@ dependencies = [ "bitflags 2.5.0", ] -[[package]] -name = "rayon" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - [[package]] name = "redox_syscall" version = "0.4.1" @@ -7230,12 +6978,6 @@ dependencies = [ "time", ] -[[package]] -name = "siphasher" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" - [[package]] name = "sized-chunks" version = "0.6.5" @@ -7308,20 +7050,6 @@ dependencies = [ "sha-1", ] -[[package]] -name = "solang-parser" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c425ce1c59f4b154717592f0bdf4715c3a1d55058883622d3157e1f0908a5b26" -dependencies = [ - "itertools 0.11.0", - "lalrpop", - "lalrpop-util", - "phf", - "thiserror", - "unicode-xid 0.2.4", -] - [[package]] name = "spin" version = "0.5.2" @@ -7350,19 +7078,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "string_cache" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" -dependencies = [ - "new_debug_unreachable", - "once_cell", - "parking_lot", - "phf_shared 0.10.0", - "precomputed-hash", -] - [[package]] name = "strsim" version = "0.10.0" @@ -7418,26 +7133,6 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" -[[package]] -name = "svm-rs" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11297baafe5fa0c99d5722458eac6a5e25c01eb1b8e5cd137f54079093daa7a4" -dependencies = [ - "dirs", - "fs2", - "hex", - "once_cell", - "reqwest", - "semver 1.0.22", - "serde", - "serde_json", - "sha2 0.10.8", - "thiserror", - "url", - "zip", -] - [[package]] name = "syn" version = "1.0.109" @@ -7852,11 +7547,8 @@ checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", - "rustls", "tokio", - "tokio-rustls", "tungstenite", - "webpki-roots", ] [[package]] @@ -8960,12 +8652,6 @@ dependencies = [ "tap", ] -[[package]] -name = "yansi" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" - [[package]] name = "yansi" version = "1.0.1" @@ -9012,45 +8698,6 @@ dependencies = [ "syn 2.0.58", ] -[[package]] -name = "zip" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" -dependencies = [ - "aes", - "byteorder", - "bzip2", - "constant_time_eq 0.1.5", - "crc32fast", - "crossbeam-utils", - "flate2", - "hmac", - "pbkdf2 0.11.0", - "sha1", - "time", - "zstd", -] - -[[package]] -name = "zstd" -version = "0.11.2+zstd.1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "5.0.2+zstd.1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" -dependencies = [ - "libc", - "zstd-sys", -] - [[package]] name = "zstd-sys" version = "2.0.10+zstd.1.5.6" diff --git a/Cargo.toml b/Cargo.toml index 72b684ee12..2d6795d817 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,8 @@ [workspace] -exclude = ["tools/protobuf-compiler"] +exclude = ["tools/protobuf-compiler", "tools/solidity-compiler"] members = [ + "crates/astria-bridge-contracts", "crates/astria-bridge-withdrawer", "crates/astria-build-info", "crates/astria-cli", @@ -25,6 +26,7 @@ members = [ # Specify default members so that cargo invocations in github actions will # not act on lints default-members = [ + "crates/astria-bridge-contracts", "crates/astria-bridge-withdrawer", "crates/astria-build-info", "crates/astria-cli", @@ -58,7 +60,7 @@ celestia-tendermint = "0.32.1" celestia-types = "0.1.1" clap = "4.5.4" const_format = "0.2.32" -ethers = "2.0.11" +ethers = { version = "2.0.11", default-features = false } futures = "0.3" hex = "0.4" hex-literal = "0.4.1" diff --git a/crates/astria-bridge-contracts/Cargo.toml b/crates/astria-bridge-contracts/Cargo.toml new file mode 100644 index 0000000000..72de607667 --- /dev/null +++ b/crates/astria-bridge-contracts/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "astria-bridge-contracts" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +ethers = { workspace = true } diff --git a/crates/astria-bridge-contracts/README.md b/crates/astria-bridge-contracts/README.md new file mode 100644 index 0000000000..bfd9ba5d22 --- /dev/null +++ b/crates/astria-bridge-contracts/README.md @@ -0,0 +1,23 @@ +# Rust bindings for Astria Bridge Contracts + +Rust bindings for Astria's solidity contracts at +[astriaorg/astria-bridge-contracts](https://github.com/astriaorg/astria-bridge-contracts). +The repository is tracked by the +[./astria-bridge-contracts](./astria-bridge-contracts) submodule. + +The bindings are generated using the +[solidity compiler tool](../../tools/solidity-compiler). + +If the upstream repository and its contracts have changed, update the submodule +and re-generate the bindings like so: + +```sh +# inside crates/astria-bridge-contracts +cd ./astria-bridge-contract +git checkout +# navigate to root of repository +cd ../../../ +just compile-solidity-contracts +git add . +git commit -m "chore(bridge-contracts)!: bumped bridge contracts +``` diff --git a/crates/astria-bridge-withdrawer/astria-bridge-contracts b/crates/astria-bridge-contracts/astria-bridge-contracts similarity index 100% rename from crates/astria-bridge-withdrawer/astria-bridge-contracts rename to crates/astria-bridge-contracts/astria-bridge-contracts diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/astria_bridgeable_erc20.rs b/crates/astria-bridge-contracts/src/generated/astria_bridgeable_erc20.rs similarity index 100% rename from crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/astria_bridgeable_erc20.rs rename to crates/astria-bridge-contracts/src/generated/astria_bridgeable_erc20.rs diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/astria_withdrawer.rs b/crates/astria-bridge-contracts/src/generated/astria_withdrawer.rs similarity index 100% rename from crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/astria_withdrawer.rs rename to crates/astria-bridge-contracts/src/generated/astria_withdrawer.rs diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/astria_withdrawer_interface.rs b/crates/astria-bridge-contracts/src/generated/i_astria_withdrawer.rs similarity index 100% rename from crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/astria_withdrawer_interface.rs rename to crates/astria-bridge-contracts/src/generated/i_astria_withdrawer.rs diff --git a/crates/astria-bridge-contracts/src/generated/mod.rs b/crates/astria-bridge-contracts/src/generated/mod.rs new file mode 100644 index 0000000000..711dbe126d --- /dev/null +++ b/crates/astria-bridge-contracts/src/generated/mod.rs @@ -0,0 +1,8 @@ +#![allow(clippy::all)] +//! This module contains abigen! generated bindings for solidity contracts. +//! This is autogenerated code. +//! Do not manually edit these files. +//! These files may be overwritten by the codegen system at any time. +pub mod astria_bridgeable_erc20; +pub mod astria_withdrawer; +pub mod i_astria_withdrawer; diff --git a/crates/astria-bridge-contracts/src/lib.rs b/crates/astria-bridge-contracts/src/lib.rs new file mode 100644 index 0000000000..7ca33aa159 --- /dev/null +++ b/crates/astria-bridge-contracts/src/lib.rs @@ -0,0 +1,4 @@ +#[rustfmt::skip] +#[allow(clippy::pedantic)] +mod generated; +pub use generated::*; diff --git a/crates/astria-bridge-withdrawer/Cargo.toml b/crates/astria-bridge-withdrawer/Cargo.toml index 8a9cf78792..e09b8d7555 100644 --- a/crates/astria-bridge-withdrawer/Cargo.toml +++ b/crates/astria-bridge-withdrawer/Cargo.toml @@ -17,7 +17,7 @@ http = "0.2.9" axum = { workspace = true } futures = { workspace = true } hex = { workspace = true } -ethers = { workspace = true, features = ["ethers-solc", "ws"] } +ethers = { workspace = true, features = ["ws"] } hyper = { workspace = true } humantime = { workspace = true } ibc-types = { workspace = true } @@ -34,6 +34,7 @@ tryhard = { workspace = true } tokio = { workspace = true, features = ["macros", "rt-multi-thread", "signal"] } tokio-util = { workspace = true } +astria-bridge-contracts = { path = "../astria-bridge-contracts" } astria-build-info = { path = "../astria-build-info", features = ["runtime"] } astria-core = { path = "../astria-core", features = ["serde", "server"] } astria-eyre = { path = "../astria-eyre" } @@ -56,4 +57,3 @@ wiremock = { workspace = true } [build-dependencies] astria-build-info = { path = "../astria-build-info", features = ["build"] } -ethers = { workspace = true } diff --git a/crates/astria-bridge-withdrawer/build.rs b/crates/astria-bridge-withdrawer/build.rs index 9aea5056e2..f35d2acb08 100644 --- a/crates/astria-bridge-withdrawer/build.rs +++ b/crates/astria-bridge-withdrawer/build.rs @@ -1,42 +1,4 @@ -use std::path::Path; - -use ethers::contract::Abigen; - -fn emit_rerun_if_changed(file: &str) { - assert!( - Path::new(file).is_file(), - "rerun-if-changed file does not exist at `{file}`" - ); - println!("cargo:rerun-if-changed={file}"); -} - fn main() -> Result<(), Box> { astria_build_info::emit("bridge-withdrawer-v")?; - - emit_rerun_if_changed("astria-bridge-contracts/src/AstriaWithdrawer.sol"); - emit_rerun_if_changed("astria-bridge-contracts/src/IAstriaWithdrawer.sol"); - emit_rerun_if_changed("astria-bridge-contracts/src/AstriaBridgeableERC20.sol"); - - Abigen::new( - "IAstriaWithdrawer", - "astria-bridge-contracts/out/IAstriaWithdrawer.sol/IAstriaWithdrawer.json", - )? - .generate()? - .write_to_file("src/bridge_withdrawer/ethereum/generated/astria_withdrawer_interface.rs")?; - - Abigen::new( - "AstriaWithdrawer", - "astria-bridge-contracts/out/AstriaWithdrawer.sol/AstriaWithdrawer.json", - )? - .generate()? - .write_to_file("src/bridge_withdrawer/ethereum/generated/astria_withdrawer.rs")?; - - Abigen::new( - "AstriaBridgeableERC20", - "astria-bridge-contracts/out/AstriaBridgeableERC20.sol/AstriaBridgeableERC20.json", - )? - .generate()? - .write_to_file("src/bridge_withdrawer/ethereum/generated/astria_bridgeable_erc20.rs")?; - Ok(()) } diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs index b3805f7db5..18d0ce35b4 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs @@ -1,5 +1,9 @@ use std::time::Duration; +use astria_bridge_contracts::i_astria_withdrawer::{ + Ics20WithdrawalFilter, + SequencerWithdrawalFilter, +}; use astria_core::{ bridge::Ics20WithdrawalFromRollupMemo, primitive::v1::{ @@ -32,11 +36,6 @@ use serde::{ Serialize, }; -use crate::bridge_withdrawer::ethereum::astria_withdrawer_interface::{ - Ics20WithdrawalFilter, - SequencerWithdrawalFilter, -}; - #[derive(Debug, PartialEq, Eq)] pub(crate) enum WithdrawalEvent { Sequencer(SequencerWithdrawalFilter), @@ -197,8 +196,9 @@ fn calculate_packet_timeout_time(timeout_delta: Duration) -> eyre::Result { #[cfg(test)] mod tests { + use astria_bridge_contracts::i_astria_withdrawer::SequencerWithdrawalFilter; + use super::*; - use crate::bridge_withdrawer::ethereum::astria_withdrawer_interface::SequencerWithdrawalFilter; fn default_native_asset() -> asset::Denom { "nria".parse().unwrap() diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/mod.rs deleted file mode 100644 index 8f535ed850..0000000000 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/generated/mod.rs +++ /dev/null @@ -1,15 +0,0 @@ -#![allow( - unreachable_pub, - clippy::module_inception, - clippy::module_name_repetitions, - clippy::too_many_lines, - clippy::useless_conversion, - clippy::pedantic -)] - -pub(crate) mod astria_withdrawer_interface; - -#[cfg(test)] -pub(crate) mod astria_bridgeable_erc20; -#[cfg(test)] -pub(crate) mod astria_withdrawer; diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/mod.rs index 7d10cc52c6..8216a66bc7 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/mod.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/mod.rs @@ -1,9 +1,5 @@ pub(crate) mod convert; pub(crate) mod watcher; -#[rustfmt::skip] -mod generated; -pub(crate) use generated::*; - #[cfg(test)] mod test_utils; diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/test_utils.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/test_utils.rs index b916d86f0b..e63fb5b4cd 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/test_utils.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/test_utils.rs @@ -3,14 +3,7 @@ use std::{ time::Duration, }; -use ethers::{ - abi::Tokenizable, - core::utils::Anvil, - prelude::*, - utils::AnvilInstance, -}; - -use crate::bridge_withdrawer::ethereum::{ +use astria_bridge_contracts::{ astria_bridgeable_erc20::{ ASTRIABRIDGEABLEERC20_ABI, ASTRIABRIDGEABLEERC20_BYTECODE, @@ -20,6 +13,12 @@ use crate::bridge_withdrawer::ethereum::{ ASTRIAWITHDRAWER_BYTECODE, }, }; +use ethers::{ + abi::Tokenizable, + core::utils::Anvil, + prelude::*, + utils::AnvilInstance, +}; #[allow(clippy::struct_field_names)] pub(crate) struct ConfigureAstriaWithdrawerDeployer { diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs index e39546b2c0..58c2c1cc16 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs @@ -3,6 +3,11 @@ use std::{ time::Duration, }; +use astria_bridge_contracts::i_astria_withdrawer::{ + IAstriaWithdrawer, + Ics20WithdrawalFilter, + SequencerWithdrawalFilter, +}; use astria_core::{ primitive::v1::{ asset::{ @@ -50,17 +55,10 @@ use tracing::{ use crate::bridge_withdrawer::{ batch::Batch, - ethereum::{ - astria_withdrawer_interface::{ - IAstriaWithdrawer, - Ics20WithdrawalFilter, - SequencerWithdrawalFilter, - }, - convert::{ - event_to_action, - EventWithMetadata, - WithdrawalEvent, - }, + ethereum::convert::{ + event_to_action, + EventWithMetadata, + WithdrawalEvent, }, state::State, submitter, @@ -523,6 +521,14 @@ fn address_from_string(s: &str) -> Result { #[cfg(test)] mod tests { + use astria_bridge_contracts::{ + astria_bridgeable_erc20::AstriaBridgeableERC20, + astria_withdrawer::AstriaWithdrawer, + i_astria_withdrawer::{ + Ics20WithdrawalFilter, + SequencerWithdrawalFilter, + }, + }; use astria_core::{ primitive::v1::{ asset, @@ -548,12 +554,6 @@ mod tests { use super::*; use crate::bridge_withdrawer::ethereum::{ - astria_bridgeable_erc20::AstriaBridgeableERC20, - astria_withdrawer::AstriaWithdrawer, - astria_withdrawer_interface::{ - Ics20WithdrawalFilter, - SequencerWithdrawalFilter, - }, convert::EventWithMetadata, test_utils::{ ConfigureAstriaBridgeableERC20Deployer, diff --git a/justfile b/justfile index 307ac0c4b0..b9a98f653a 100644 --- a/justfile +++ b/justfile @@ -17,6 +17,10 @@ install-cli: compile-protos: cargo run --manifest-path tools/protobuf-compiler/Cargo.toml +# Compiles the generated rust code from protos which are used in crates. +compile-solidity-contracts: + cargo run --manifest-path tools/solidity-compiler/Cargo.toml + #################################################### ## Scripts related to formatting code and linting ## #################################################### diff --git a/tools/solidity-compiler/Cargo.lock b/tools/solidity-compiler/Cargo.lock new file mode 100644 index 0000000000..770155c006 --- /dev/null +++ b/tools/solidity-compiler/Cargo.lock @@ -0,0 +1,1563 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "auto_impl" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" +dependencies = [ + "serde", +] + +[[package]] +name = "camino" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "cc" +version = "1.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74b6a57f98764a267ff415d50a25e6e166f3831a5071af4995296ea97d210490" +dependencies = [ + "jobserver", + "libc", + "once_cell", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "num-traits", +] + +[[package]] +name = "const-hex" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8a24a26d37e1ffd45343323dc9fe6654ceea44c12f2fcb3d7ac29e610bc6" +dependencies = [ + "cfg-if", + "cpufeatures", + "hex", + "proptest", + "serde", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "cpufeatures" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +dependencies = [ + "libc", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "derive_more" +version = "0.99.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "dunce" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest", + "elliptic-curve", + "rfc6979", + "signature", + "spki", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest", + "ff", + "generic-array", + "group", + "pkcs8", + "rand_core", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "ethabi" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" +dependencies = [ + "ethereum-types", + "hex", + "once_cell", + "regex", + "serde", + "serde_json", + "sha3", + "thiserror", + "uint", +] + +[[package]] +name = "ethbloom" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" +dependencies = [ + "crunchy", + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "scale-info", + "tiny-keccak", +] + +[[package]] +name = "ethereum-types" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" +dependencies = [ + "ethbloom", + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "primitive-types", + "scale-info", + "uint", +] + +[[package]] +name = "ethers-contract-abigen" +version = "2.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04ba01fbc2331a38c429eb95d4a570166781f14290ef9fdb144278a90b5a739b" +dependencies = [ + "Inflector", + "const-hex", + "dunce", + "ethers-core", + "eyre", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "serde", + "serde_json", + "syn 2.0.68", + "toml", + "walkdir", +] + +[[package]] +name = "ethers-core" +version = "2.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82d80cc6ad30b14a48ab786523af33b37f28a8623fc06afd55324816ef18fb1f" +dependencies = [ + "arrayvec", + "bytes", + "cargo_metadata", + "chrono", + "const-hex", + "elliptic-curve", + "ethabi", + "generic-array", + "k256", + "num_enum", + "once_cell", + "open-fastrlp", + "rand", + "rlp", + "serde", + "serde_json", + "strum", + "syn 2.0.68", + "tempfile", + "thiserror", + "tiny-keccak", + "unicode-xid", +] + +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + +[[package]] +name = "fastrand" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" + +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "fixed-hash" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" +dependencies = [ + "byteorder", + "rand", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "git2" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" +dependencies = [ + "bitflags", + "libc", + "libgit2-sys", + "log", + "openssl-probe", + "openssl-sys", + "url", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "impl-codec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-rlp" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" +dependencies = [ + "rlp", +] + +[[package]] +name = "impl-serde" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + +[[package]] +name = "indexmap" +version = "2.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "jobserver" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +dependencies = [ + "libc", +] + +[[package]] +name = "k256" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "956ff9b67e26e1a6a866cb758f12c6f8746208489e3e4a4b5580802f2f0a587b" +dependencies = [ + "cfg-if", + "ecdsa", + "elliptic-curve", + "once_cell", + "sha2", +] + +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "libgit2-sys" +version = "0.17.0+1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" +dependencies = [ + "cc", + "libc", + "libssh2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", +] + +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libssh2-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dc8a030b787e2119a731f1951d6a773e2280c660f8ec4b0f5e1505a386e71ee" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-sys" +version = "1.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_enum" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "open-fastrlp" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" +dependencies = [ + "arrayvec", + "auto_impl", + "bytes", + "ethereum-types", + "open-fastrlp-derive", +] + +[[package]] +name = "open-fastrlp-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" +dependencies = [ + "bytes", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parity-scale-codec" +version = "3.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" +dependencies = [ + "arrayvec", + "bitvec", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "3.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "prettyplease" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" +dependencies = [ + "proc-macro2", + "syn 2.0.68", +] + +[[package]] +name = "primitive-types" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "scale-info", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +dependencies = [ + "toml_edit 0.21.1", +] + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "proptest" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" +dependencies = [ + "bitflags", + "lazy_static", + "num-traits", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax", + "unarray", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core", +] + +[[package]] +name = "regex" +version = "1.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + +[[package]] +name = "rlp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" +dependencies = [ + "bytes", + "rlp-derive", + "rustc-hex", +] + +[[package]] +name = "rlp-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustix" +version = "0.38.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "rustversion" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scale-info" +version = "2.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca070c12893629e2cc820a9761bedf6ce1dcddc9852984d1dc734b8bd9bd024" +dependencies = [ + "cfg-if", + "derive_more", + "parity-scale-codec", + "scale-info-derive", +] + +[[package]] +name = "scale-info-derive" +version = "2.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d35494501194174bda522a32605929eefc9ecf7e0a326c26db1fdd85881eb62" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +dependencies = [ + "serde", +] + +[[package]] +name = "serde" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "serde_json" +version = "1.0.120" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +dependencies = [ + "serde", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest", + "keccak", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + +[[package]] +name = "solidity-compiler" +version = "0.1.0" +dependencies = [ + "ethers-contract-abigen", + "git2", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.68", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "901fa70d88b9d6c98022e23b4136f9f3e54e4662c3bc1bd1d84a42a9a0f0c1e9" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand", + "rustix", + "windows-sys", +] + +[[package]] +name = "thiserror" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinyvec" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c55115c6fbe2d2bef26eb09ad74bde02d8255476fc0c7b515ef09fbb35742d82" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "toml" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit 0.22.14", +] + +[[package]] +name = "toml_datetime" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" +dependencies = [ + "indexmap", + "toml_datetime", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.22.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow 0.6.13", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "url" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "winapi-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59b5e5f6c299a3c7890b876a2a587f3115162487e704907d9b6cd29473052ba1" +dependencies = [ + "memchr", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/tools/solidity-compiler/Cargo.toml b/tools/solidity-compiler/Cargo.toml new file mode 100644 index 0000000000..50d04c96f1 --- /dev/null +++ b/tools/solidity-compiler/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "solidity-compiler" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +ethers-contract-abigen = "2.0.14" +git2 = "0.19.0" diff --git a/tools/solidity-compiler/README.md b/tools/solidity-compiler/README.md new file mode 100644 index 0000000000..0cd5583a77 --- /dev/null +++ b/tools/solidity-compiler/README.md @@ -0,0 +1,5 @@ +# The Astria Bridge Contracts to Rust compiler tool + +See +[`crates/astria-bridge-contracts/README.md`](../../crates/astria-bridge-contracts/README.md) +for how to use this tool. diff --git a/tools/solidity-compiler/src/main.rs b/tools/solidity-compiler/src/main.rs new file mode 100644 index 0000000000..4260c52296 --- /dev/null +++ b/tools/solidity-compiler/src/main.rs @@ -0,0 +1,43 @@ +use ethers_contract_abigen::MultiAbigen; + +const CRATE_DIR: &str = concat!( + env!("CARGO_MANIFEST_DIR"), + "/../../crates/astria-bridge-contracts", +); +const SUBMODULE_DIR: &str = concat!( + env!("CARGO_MANIFEST_DIR"), + "/../../crates/astria-bridge-contracts/astria-bridge-contracts", +); +const SUBMODULE_NAME: &str = "crates/astria-bridge-contracts/astria-bridge-contracts"; + +fn init_and_update(submodule_name: &str) -> Result<(), git2::Error> { + println!("updating and initializing contracts submodule `{submodule_name}`"); + let repo = git2::Repository::open_from_env()?; + let mut submodule = repo.find_submodule(submodule_name)?; + submodule.update(true, None)?; + Ok(()) +} + +fn generate_contract_abi(src: &str, dst: &str) -> Result<(), Box> { + println!( + "generating Rust bindings from solidity JSON ABI files\n\tsources: {src}\n\tdestination: \ + {dst}" + ); + + MultiAbigen::from_json_files(src)? + .build()? + .write_to_module(dst, false)?; + + Ok(()) +} + +fn main() -> Result<(), Box> { + init_and_update(SUBMODULE_NAME)?; + + generate_contract_abi( + &format!("{SUBMODULE_DIR}/out"), + &format!("{CRATE_DIR}/src/generated"), + )?; + + Ok(()) +} From 30c562a46462ffaefd534a8fabd75beb90c74e4c Mon Sep 17 00:00:00 2001 From: Lily Johnson <35852084+Lilyjjo@users.noreply.github.com> Date: Mon, 8 Jul 2024 13:56:59 +0200 Subject: [PATCH 02/24] refactor(sequencer): fix prepare proposal metrics (#1211) ## Summary The sequencer's `prepare_proposal()` metrics got shifted to `process_proposal()` across some refactors. ## Changes Put metrics back in proper places and removed unneeded metric. ## Testing Ran locally ## Metrics Removed the `PREPARE_PROPOSAL_EXCLUDED_TRANSACTIONS_DECODE_FAILURE` metric as the mempool should be filtering these transactions out. --- crates/astria-sequencer/src/app/mod.rs | 19 +++++++++++-------- crates/astria-sequencer/src/metrics.rs | 6 ++++-- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/crates/astria-sequencer/src/app/mod.rs b/crates/astria-sequencer/src/app/mod.rs index b87d7b8d7d..e634d71e7b 100644 --- a/crates/astria-sequencer/src/app/mod.rs +++ b/crates/astria-sequencer/src/app/mod.rs @@ -516,6 +516,8 @@ impl App { .fold(0usize, |acc, seq| acc.saturating_add(seq.data.len())); if !block_size_constraints.sequencer_has_space(tx_sequence_data_bytes) { + self.metrics + .increment_prepare_proposal_excluded_transactions_sequencer_space(); debug!( transaction_hash = %tx_hash_base64, block_size_constraints = %json(&block_size_constraints), @@ -545,13 +547,13 @@ impl App { included_signed_txs.push((*tx).clone()); } Err(e) => { - self.metrics.increment_check_tx_removed_failed_execution(); + self.metrics + .increment_prepare_proposal_excluded_transactions_failed_execution(); debug!( transaction_hash = %tx_hash_base64, error = AsRef::::as_ref(&e), "failed to execute transaction, not including in block" ); - failed_tx_count = failed_tx_count.saturating_add(1); if e.downcast_ref::().is_some() { // we re-insert the tx into the mempool if it failed to execute @@ -560,6 +562,8 @@ impl App { // removed from the mempool in `update_mempool_after_finalization`. txs_to_readd_to_mempool.push((enqueued_tx, priority)); } else { + failed_tx_count = failed_tx_count.saturating_add(1); + // the transaction should be removed from the cometbft mempool self.mempool .track_removal_comet_bft( @@ -579,6 +583,11 @@ impl App { "excluded transactions from block due to execution failure" ); } + self.metrics.set_prepare_proposal_excluded_transactions( + txs_to_readd_to_mempool + .len() + .saturating_add(failed_tx_count), + ); self.mempool.insert_all(txs_to_readd_to_mempool).await; let mempool_len = self.mempool.len().await; @@ -626,8 +635,6 @@ impl App { .fold(0usize, |acc, seq| acc.saturating_add(seq.data.len())); if !block_size_constraints.sequencer_has_space(tx_sequence_data_bytes) { - self.metrics - .increment_prepare_proposal_excluded_transactions_sequencer_space(); debug!( transaction_hash = %telemetry::display::base64(&tx_hash), block_size_constraints = %json(&block_size_constraints), @@ -653,8 +660,6 @@ impl App { .context("error growing cometBFT block size")?; } Err(e) => { - self.metrics - .increment_prepare_proposal_excluded_transactions_failed_execution(); debug!( transaction_hash = %telemetry::display::base64(&tx_hash), error = AsRef::::as_ref(&e), @@ -666,8 +671,6 @@ impl App { } if excluded_tx_count > 0.0 { - self.metrics - .set_prepare_proposal_excluded_transactions(excluded_tx_count); info!( excluded_tx_count = excluded_tx_count, included_tx_count = execution_results.len(), diff --git a/crates/astria-sequencer/src/metrics.rs b/crates/astria-sequencer/src/metrics.rs index 1efdeb6f28..7623223f00 100644 --- a/crates/astria-sequencer/src/metrics.rs +++ b/crates/astria-sequencer/src/metrics.rs @@ -167,8 +167,10 @@ impl Metrics { .increment(1); } - pub(crate) fn set_prepare_proposal_excluded_transactions(&self, count: f64) { - self.prepare_proposal_excluded_transactions.set(count); + pub(crate) fn set_prepare_proposal_excluded_transactions(&self, count: usize) { + #[allow(clippy::cast_precision_loss)] + self.prepare_proposal_excluded_transactions + .set(count as f64); } pub(crate) fn record_proposal_deposits(&self, count: usize) { From 38a034b00fd51a3bebef968a76da77f931994a16 Mon Sep 17 00:00:00 2001 From: noot <36753753+noot@users.noreply.github.com> Date: Mon, 8 Jul 2024 12:29:22 -0400 Subject: [PATCH 03/24] fix(sequencer)!: store native asset ibc->trace mapping in init_chain (#1242) ## Summary we need to store the native asset ibc to "trace" mapping in the state, otherwise queries for the native asset using the ID will fail. for example `get_bridge_account_info` where the asset is the native asset fails right now ## Changes - store native asset ibc->trace mapping in `init_chain` - also enforce that the native asset is is "trace" form, as otherwise, we won't be able to map from ibc->trace form for the asset as we don't know the trace form. ## Breaking changes - this is unfortunately breaking since the ibc->trace mapping is stored in app state. --- crates/astria-sequencer/src/app/mod.rs | 11 ++++ ...ransaction_with_every_action_snapshot.snap | 60 +++++++++--------- ..._changes__app_finalize_block_snapshot.snap | 60 +++++++++--------- ...reaking_changes__app_genesis_snapshot.snap | 62 +++++++++---------- 4 files changed, 102 insertions(+), 91 deletions(-) diff --git a/crates/astria-sequencer/src/app/mod.rs b/crates/astria-sequencer/src/app/mod.rs index e634d71e7b..377ccaf4e2 100644 --- a/crates/astria-sequencer/src/app/mod.rs +++ b/crates/astria-sequencer/src/app/mod.rs @@ -14,6 +14,7 @@ use std::{ use anyhow::{ anyhow, + bail, ensure, Context, }; @@ -69,6 +70,7 @@ use crate::{ }, address::StateWriteExt as _, api_state_ext::StateWriteExt as _, + asset::state_ext::StateWriteExt as _, authority::{ component::{ AuthorityComponent, @@ -220,6 +222,15 @@ impl App { state_tx.put_base_prefix(&genesis_state.address_prefixes().base); crate::asset::initialize_native_asset(genesis_state.native_asset_base_denomination()); + let native_asset = crate::asset::get_native_asset(); + if let Some(trace_native_asset) = native_asset.as_trace_prefixed() { + state_tx + .put_ibc_asset(trace_native_asset) + .context("failed to put native asset")?; + } else { + bail!("native asset must not be in ibc/ form") + } + state_tx.put_native_asset_denom(genesis_state.native_asset_base_denomination()); state_tx.put_chain_id_and_revision_number(chain_id.try_into().context("invalid chain ID")?); state_tx.put_block_height(0); diff --git a/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_execute_transaction_with_every_action_snapshot.snap b/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_execute_transaction_with_every_action_snapshot.snap index edafad96cf..ce51df7d23 100644 --- a/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_execute_transaction_with_every_action_snapshot.snap +++ b/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_execute_transaction_with_every_action_snapshot.snap @@ -3,36 +3,36 @@ source: crates/astria-sequencer/src/app/tests_breaking_changes.rs expression: app.app_hash.as_bytes() --- [ - 220, - 23, - 112, - 98, - 180, - 104, - 39, - 254, - 107, + 126, + 205, + 191, + 0, 65, - 159, - 49, - 59, - 7, - 177, - 110, - 184, - 141, - 73, - 165, - 204, - 144, - 102, - 28, - 247, - 177, - 145, - 116, - 114, + 191, + 147, + 26, + 88, 230, - 27, - 186 + 39, + 60, + 192, + 5, + 177, + 11, + 248, + 83, + 115, + 225, + 203, + 17, + 58, + 245, + 71, + 172, + 232, + 75, + 203, + 163, + 32, + 75 ] diff --git a/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_finalize_block_snapshot.snap b/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_finalize_block_snapshot.snap index 10baa0c334..7d4b7f970c 100644 --- a/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_finalize_block_snapshot.snap +++ b/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_finalize_block_snapshot.snap @@ -3,36 +3,36 @@ source: crates/astria-sequencer/src/app/tests_breaking_changes.rs expression: app.app_hash.as_bytes() --- [ - 168, - 40, - 26, - 85, - 142, - 62, - 98, - 97, - 65, - 205, - 249, - 210, - 237, - 174, - 132, - 98, - 101, - 27, - 175, - 0, - 25, - 191, - 31, - 107, - 175, - 139, - 95, + 72, + 87, + 29, + 138, + 91, 221, + 71, + 66, + 219, + 212, + 171, + 126, 223, - 122, - 255, - 48 + 176, + 198, + 154, + 10, + 234, + 253, + 99, + 213, + 78, + 92, + 228, + 89, + 204, + 197, + 193, + 90, + 57, + 205, + 2 ] diff --git a/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_genesis_snapshot.snap b/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_genesis_snapshot.snap index 41ab56cbc9..8d4956d126 100644 --- a/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_genesis_snapshot.snap +++ b/crates/astria-sequencer/src/app/snapshots/astria_sequencer__app__tests_breaking_changes__app_genesis_snapshot.snap @@ -3,36 +3,36 @@ source: crates/astria-sequencer/src/app/tests_breaking_changes.rs expression: app.app_hash.as_bytes() --- [ - 65, - 219, - 201, - 82, - 255, - 77, - 62, - 112, - 134, - 219, - 249, - 17, - 143, - 126, - 194, - 58, - 86, - 113, - 222, - 141, - 69, - 59, - 118, - 127, + 107, + 187, + 174, + 122, + 174, + 31, + 236, + 104, + 223, 121, - 44, - 27, - 128, - 120, - 30, - 110, - 160 + 241, + 170, + 212, + 166, + 199, + 70, + 87, + 126, + 205, + 179, + 18, + 34, + 54, + 131, + 22, + 122, + 240, + 109, + 24, + 15, + 129, + 135 ] From 06ff55e074a2d8271a660f14b97023147e19118b Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Tue, 9 Jul 2024 10:58:07 +0200 Subject: [PATCH 04/24] refactor(core, bridge-withdrawer)!: move bridge-unlock memo to core (#1245) ## Summary Moves the bridge-unlock memo out of `astria-bridge-withdrawer` and into `astria-core`. ## Background This type needs to be publicly readable for `astria-cli` and other consumers. ## Changes - Move the `BridgeUnlockMemo` type to `astria_core::bridge` - Change the public memo fields from ethers types to native Rust types - Encode the contained transaction hash as base64 - Provide snapshot tests ## Testing bridge-withdrawer tests still pass. ## Breaking Changelist The transaction hash field in the memo being encoded as `base64` is a breaking change for bridge-withdrawer. --- .../src/bridge_withdrawer/ethereum/convert.rs | 36 +++++++++---------- .../src/bridge_withdrawer/submitter/mod.rs | 14 ++++---- .../src/bridge_withdrawer/submitter/tests.rs | 12 ++++--- crates/astria-core/src/bridge.rs | 28 +++++++++++++++ ...ge__test__bridge_unlock_memo_snapshot.snap | 8 +++++ 5 files changed, 66 insertions(+), 32 deletions(-) create mode 100644 crates/astria-core/src/snapshots/astria_core__bridge__test__bridge_unlock_memo_snapshot.snap diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs index 18d0ce35b4..fd849f91bb 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs @@ -5,7 +5,10 @@ use astria_bridge_contracts::i_astria_withdrawer::{ SequencerWithdrawalFilter, }; use astria_core::{ - bridge::Ics20WithdrawalFromRollupMemo, + bridge::{ + self, + Ics20WithdrawalFromRollupMemo, + }, primitive::v1::{ asset::{ self, @@ -31,10 +34,6 @@ use ethers::types::{ U64, }; use ibc_types::core::client::Height as IbcHeight; -use serde::{ - Deserialize, - Serialize, -}; #[derive(Debug, PartialEq, Eq)] pub(crate) enum WithdrawalEvent { @@ -83,12 +82,6 @@ pub(crate) fn event_to_action( Ok(action) } -#[derive(Debug, Serialize, Deserialize)] -pub(crate) struct BridgeUnlockMemo { - pub(crate) block_number: U64, - pub(crate) transaction_hash: TxHash, -} - fn event_to_bridge_unlock( event: &SequencerWithdrawalFilter, block_number: U64, @@ -96,9 +89,12 @@ fn event_to_bridge_unlock( fee_asset: asset::Denom, asset_withdrawal_divisor: u128, ) -> eyre::Result { - let memo = BridgeUnlockMemo { - block_number, - transaction_hash, + let memo = bridge::UnlockMemo { + // XXX: The documentation mentions that the ethers U64 type will panic if it cannot be + // converted to u64. However, this is part of a catch-all documentation that does not apply + // to U64. + block_number: block_number.as_u64(), + transaction_hash: transaction_hash.into(), }; let action = BridgeUnlockAction { to: event @@ -232,9 +228,9 @@ mod tests { let expected_action = BridgeUnlockAction { to: crate::astria_address([1u8; 20]), amount: 99, - memo: serde_json::to_vec(&BridgeUnlockMemo { - block_number: 1.into(), - transaction_hash: [2u8; 32].into(), + memo: serde_json::to_vec(&bridge::UnlockMemo { + block_number: 1, + transaction_hash: [2u8; 32], }) .unwrap(), fee_asset: denom, @@ -273,9 +269,9 @@ mod tests { let expected_action = BridgeUnlockAction { to: crate::astria_address([1u8; 20]), amount: 99, - memo: serde_json::to_vec(&BridgeUnlockMemo { - block_number: 1.into(), - transaction_hash: [2u8; 32].into(), + memo: serde_json::to_vec(&bridge::UnlockMemo { + block_number: 1, + transaction_hash: [2u8; 32], }) .unwrap(), fee_asset: denom, diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs index b996374be2..4e37e04910 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs @@ -4,7 +4,10 @@ use std::{ }; use astria_core::{ - bridge::Ics20WithdrawalFromRollupMemo, + bridge::{ + self, + Ics20WithdrawalFromRollupMemo, + }, primitive::v1::asset, protocol::{ asset::v1alpha1::AllowedFeeAssetsResponse, @@ -69,10 +72,7 @@ use super::{ state, SequencerStartupInfo, }; -use crate::{ - bridge_withdrawer::ethereum::convert::BridgeUnlockMemo, - metrics::Metrics, -}; +use crate::metrics::Metrics; mod builder; mod signer; @@ -715,9 +715,9 @@ fn rollup_height_from_signed_transaction( let last_batch_rollup_height = match withdrawal_action { Action::BridgeUnlock(action) => { - let memo: BridgeUnlockMemo = serde_json::from_slice(&action.memo) + let memo: bridge::UnlockMemo = serde_json::from_slice(&action.memo) .wrap_err("failed to parse memo from last transaction by the bridge account")?; - Some(memo.block_number.as_u64()) + Some(memo.block_number) } Action::Ics20Withdrawal(action) => { let memo: Ics20WithdrawalFromRollupMemo = serde_json::from_str(&action.memo) diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs index 6df792ab6a..eb99bc63b1 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs @@ -7,7 +7,10 @@ use std::{ }; use astria_core::{ - bridge::Ics20WithdrawalFromRollupMemo, + bridge::{ + self, + Ics20WithdrawalFromRollupMemo, + }, crypto::SigningKey, generated::protocol::account::v1alpha1::NonceResponse, primitive::v1::asset, @@ -76,7 +79,6 @@ use super::Submitter; use crate::{ bridge_withdrawer::{ batch::Batch, - ethereum::convert::BridgeUnlockMemo, state, submitter, }, @@ -302,9 +304,9 @@ fn make_bridge_unlock_action() -> Action { let inner = BridgeUnlockAction { to: crate::astria_address([0u8; 20]), amount: 99, - memo: serde_json::to_vec(&BridgeUnlockMemo { - block_number: DEFAULT_LAST_ROLLUP_HEIGHT.into(), - transaction_hash: [1u8; 32].into(), + memo: serde_json::to_vec(&bridge::UnlockMemo { + block_number: DEFAULT_LAST_ROLLUP_HEIGHT, + transaction_hash: [1u8; 32], }) .unwrap(), fee_asset: denom, diff --git a/crates/astria-core/src/bridge.rs b/crates/astria-core/src/bridge.rs index 822fde35a7..a8eedce914 100644 --- a/crates/astria-core/src/bridge.rs +++ b/crates/astria-core/src/bridge.rs @@ -1,5 +1,23 @@ use crate::primitive::v1::Address; +#[derive(Clone, Debug)] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize), + derive(serde::Deserialize) +)] +pub struct UnlockMemo { + pub block_number: u64, + #[cfg_attr( + feature = "serde", + serde( + serialize_with = "crate::serde::base64_serialize", + deserialize_with = "crate::serde::base64_deserialize_array" + ) + )] + pub transaction_hash: [u8; 32], +} + /// Memo format for a ICS20 withdrawal from the rollup which is sent to /// an external IBC-enabled chain. #[derive(Debug, Clone)] @@ -40,6 +58,16 @@ pub struct Ics20TransferDepositMemo { mod test { use super::*; + #[test] + fn bridge_unlock_memo_snapshot() { + let memo = UnlockMemo { + block_number: 42, + transaction_hash: [88; 32], + }; + + insta::assert_json_snapshot!(memo); + } + #[test] fn ics20_withdrawal_from_rollup_memo_snapshot() { let memo = Ics20WithdrawalFromRollupMemo { diff --git a/crates/astria-core/src/snapshots/astria_core__bridge__test__bridge_unlock_memo_snapshot.snap b/crates/astria-core/src/snapshots/astria_core__bridge__test__bridge_unlock_memo_snapshot.snap new file mode 100644 index 0000000000..f0f7700ccb --- /dev/null +++ b/crates/astria-core/src/snapshots/astria_core__bridge__test__bridge_unlock_memo_snapshot.snap @@ -0,0 +1,8 @@ +--- +source: crates/astria-core/src/bridge.rs +expression: memo +--- +{ + "block_number": 42, + "transaction_hash": "WFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFg=" +} From 460fc507f3a0f67229918aed2bb1834b1751aa25 Mon Sep 17 00:00:00 2001 From: quasystaty Date: Tue, 9 Jul 2024 14:52:11 +0300 Subject: [PATCH 05/24] fix: rollup archive node configurations (#1249) ## Summary `geth v1.14.0` introduced changes to archive node configurations, now need to set `--state.scheme=hash` to enable archive node --- charts/evm-rollup/Chart.yaml | 2 +- charts/evm-rollup/templates/statefulsets.yaml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/charts/evm-rollup/Chart.yaml b/charts/evm-rollup/Chart.yaml index a2581202a2..c0d1b8ccbb 100644 --- a/charts/evm-rollup/Chart.yaml +++ b/charts/evm-rollup/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.23.0 +version: 0.23.1 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/evm-rollup/templates/statefulsets.yaml b/charts/evm-rollup/templates/statefulsets.yaml index f588696830..4681fce47c 100644 --- a/charts/evm-rollup/templates/statefulsets.yaml +++ b/charts/evm-rollup/templates/statefulsets.yaml @@ -64,9 +64,12 @@ spec: - --maxpeers=0 {{- if .Values.config.rollup.archiveNode }} - --gcmode=archive + - --state.scheme=hash - --history.transactions=0 + - --history.state=0 {{- else }} - --state.scheme=path + - --history.state=540000 {{- end }} {{ if .Values.config.rollup.metrics.enabled }} - --metrics From df768695b026999ba7af38ae7a98b4da4f54a2d3 Mon Sep 17 00:00:00 2001 From: Itamar Reif <9663129+itamarreif@users.noreply.github.com> Date: Tue, 9 Jul 2024 08:00:52 -0400 Subject: [PATCH 06/24] refactor(bridge-withdrawer)!: refactor startup to a separate subtask and remove balance check from startup (#1190) ## Summary This refactors some of the startup logic into a separate startup subtask. It also removes the minimum expected balance check from the startup logic, as that is not necessary. ## Background The original startup logic had the `Submitter` running parts of the startup routine that weren't relevant to it. this moves that logic out to the `Startup` task. The balance check is removed because if a sequencer transaction with insufficient balance is submitted it will just fail execution and the service will restart. Requiring the check in the startup logic results in clunky operation of the service, since launching it before funding it will cause it to crash. ## Changes - Introduce `Startup` object and task - `Watcher` and `Submitter` now wait for the `Startup` to update the global state object with startup info before running their startup logic, whereas before we just had the `Watcher` wait on the `Submitter` to send a message on the `oneshot` channel. - Cleaned up some of the tryhard configs with helper funcs to be less confusing ## Breaking Changes - Remove the balance check from startup and the associated configs. issue linked below ## Testing - Updated some of the `Submitter`-specific tests to reflect changes to the startup logic. closes #1229 --- charts/deploy.just | 6 +- charts/evm-bridge-withdrawer/Chart.yaml | 2 +- .../templates/configmaps.yaml | 2 +- .../local.env.example | 3 - .../src/bridge_withdrawer/ethereum/watcher.rs | 164 +++--- .../src/bridge_withdrawer/mod.rs | 163 ++++-- .../src/bridge_withdrawer/startup.rs | 488 ++++++++++++++++++ .../src/bridge_withdrawer/state.rs | 18 + .../bridge_withdrawer/submitter/builder.rs | 39 +- .../src/bridge_withdrawer/submitter/mod.rs | 470 +---------------- .../src/bridge_withdrawer/submitter/tests.rs | 316 +----------- crates/astria-bridge-withdrawer/src/config.rs | 2 - 12 files changed, 750 insertions(+), 923 deletions(-) create mode 100644 crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs diff --git a/charts/deploy.just b/charts/deploy.just index b502c56ce3..513fc92ae3 100644 --- a/charts/deploy.just +++ b/charts/deploy.just @@ -272,7 +272,7 @@ run-smoke-test: sleep 1 fi done - if [ $CHECKS -eq $MAX_CHECKS ]; then + if [ $CHECKS -gt $MAX_CHECKS ]; then echo "Bridge Out Sequencer failure" exit 1 fi @@ -293,8 +293,8 @@ run-smoke-test: sleep 1 fi done - if [ $CHECKS -eq $MAX_CHECKS ]; then - echo "Bridge Out Sequencer failure" + if [ $CHECKS -gt $MAX_CHECKS ]; then + echo "Finalization failure" exit 1 fi exit 0 diff --git a/charts/evm-bridge-withdrawer/Chart.yaml b/charts/evm-bridge-withdrawer/Chart.yaml index 8c87fbe42d..10054ccec2 100644 --- a/charts/evm-bridge-withdrawer/Chart.yaml +++ b/charts/evm-bridge-withdrawer/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.0.1 +version: 0.0.2 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/evm-bridge-withdrawer/templates/configmaps.yaml b/charts/evm-bridge-withdrawer/templates/configmaps.yaml index c0b0e41901..d0e7c8c0e2 100644 --- a/charts/evm-bridge-withdrawer/templates/configmaps.yaml +++ b/charts/evm-bridge-withdrawer/templates/configmaps.yaml @@ -13,7 +13,6 @@ data: ASTRIA_BRIDGE_WITHDRAWER_SEQUENCER_BRIDGE_ADDRESS: "{{ .Values.config.sequencerBridgeAddress }}" ASTRIA_BRIDGE_WITHDRAWER_FEE_ASSET_DENOMINATION: "{{ .Values.config.feeAssetDenom }}" ASTRIA_BRIDGE_WITHDRAWER_ROLLUP_ASSET_DENOMINATION: "{{ .Values.config.rollupAssetDenom }}" - ASTRIA_BRIDGE_WITHDRAWER_MIN_EXPECTED_FEE_ASSET_BALANCE: "{{ .Values.config.minExpectedFeeAssetBalance }}" ASTRIA_BRIDGE_WITHDRAWER_ETHEREUM_CONTRACT_ADDRESS: "{{ .Values.config.evmContractAddress }}" ASTRIA_BRIDGE_WITHDRAWER_ETHEREUM_RPC_ENDPOINT: "{{ .Values.config.evmRpcEndpoint }}" ASTRIA_BRIDGE_WITHDRAWER_NO_METRICS: "{{ not .Values.metrics.enabled }}" @@ -30,6 +29,7 @@ data: OTEL_EXPORTER_OTLP_TRACE_HEADERS: "{{ .Values.otel.traceHeaders }}" OTEL_SERVICE_NAME: "{{ tpl .Values.otel.serviceName . }}" {{- if not .Values.global.dev }} + ASTRIA_BRIDGE_WITHDRAWER_MIN_EXPECTED_FEE_ASSET_BALANCE: "{{ .Values.config.minExpectedFeeAssetBalance }}" {{- else }} {{- end }} --- diff --git a/crates/astria-bridge-withdrawer/local.env.example b/crates/astria-bridge-withdrawer/local.env.example index b01f3ce07a..e9f0e77145 100644 --- a/crates/astria-bridge-withdrawer/local.env.example +++ b/crates/astria-bridge-withdrawer/local.env.example @@ -37,9 +37,6 @@ ASTRIA_BRIDGE_WITHDRAWER_SEQUENCER_ADDRESS_PREFIX=astria # The fee asset denomination to use for the bridge account's transactions. ASTRIA_BRIDGE_WITHDRAWER_FEE_ASSET_DENOMINATION="nria" -# The minimum expected balance of the fee asset in the bridge account. -ASTRIA_BRIDGE_WITHDRAWER_MIN_EXPECTED_FEE_ASSET_BALANCE=1000000 - # The asset denomination being withdrawn from the rollup. ASTRIA_BRIDGE_WITHDRAWER_ROLLUP_ASSET_DENOMINATION="nria" diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs index 58c2c1cc16..adb7de7457 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs @@ -50,6 +50,7 @@ use tokio_util::sync::CancellationToken; use tracing::{ debug, info, + trace, warn, }; @@ -60,19 +61,20 @@ use crate::bridge_withdrawer::{ EventWithMetadata, WithdrawalEvent, }, + startup, state::State, submitter, - SequencerStartupInfo, }; pub(crate) struct Builder { + pub(crate) shutdown_token: CancellationToken, + pub(crate) startup_handle: startup::InfoHandle, pub(crate) ethereum_contract_address: String, pub(crate) ethereum_rpc_endpoint: String, - pub(crate) submitter_handle: submitter::Handle, - pub(crate) shutdown_token: CancellationToken, pub(crate) state: Arc, pub(crate) rollup_asset_denom: Denom, pub(crate) bridge_address: Address, + pub(crate) submitter_handle: submitter::Handle, pub(crate) sequencer_address_prefix: String, } @@ -81,11 +83,12 @@ impl Builder { let Builder { ethereum_contract_address, ethereum_rpc_endpoint, - submitter_handle, shutdown_token, + startup_handle, state, rollup_asset_denom, bridge_address, + submitter_handle, sequencer_address_prefix, } = self; @@ -105,11 +108,12 @@ impl Builder { Ok(Watcher { contract_address, ethereum_rpc_endpoint: ethereum_rpc_endpoint.to_string(), - submitter_handle, rollup_asset_denom, bridge_address, state, shutdown_token: shutdown_token.clone(), + startup_handle, + submitter_handle, sequencer_address_prefix, }) } @@ -117,13 +121,14 @@ impl Builder { /// Watches for withdrawal events emitted by the `AstriaWithdrawer` contract. pub(crate) struct Watcher { + shutdown_token: CancellationToken, + startup_handle: startup::InfoHandle, + submitter_handle: submitter::Handle, contract_address: ethers::types::Address, ethereum_rpc_endpoint: String, - submitter_handle: submitter::Handle, rollup_asset_denom: Denom, bridge_address: Address, state: Arc, - shutdown_token: CancellationToken, sequencer_address_prefix: String, } @@ -135,14 +140,13 @@ impl Watcher { .wrap_err("watcher failed to start up")?; let Self { - contract_address: _contract_address, - ethereum_rpc_endpoint: _ethereum_rps_endpoint, - submitter_handle, rollup_asset_denom, bridge_address, state, shutdown_token, + submitter_handle, sequencer_address_prefix, + .. } = self; let converter = EventToActionConvertConfig { @@ -192,15 +196,19 @@ impl Watcher { u128, u64, )> { - // wait for submitter to be ready - let SequencerStartupInfo { + let startup::Info { fee_asset, - next_batch_rollup_height, - } = self - .submitter_handle - .recv_startup_info() - .await - .wrap_err("failed to get sequencer startup info")?; + starting_rollup_height, + .. + } = select! { + () = self.shutdown_token.cancelled() => { + return Err(eyre!("watcher received shutdown signal while waiting for startup")); + } + + startup_info = self.startup_handle.get_info() => { + startup_info.wrap_err("failed to receive startup info")? + } + }; // connect to eth node let retry_config = tryhard::RetryFutureConfig::new(1024) @@ -255,7 +263,7 @@ impl Watcher { contract, fee_asset, asset_withdrawal_divisor, - next_batch_rollup_height, + starting_rollup_height, )) } } @@ -405,7 +413,7 @@ async fn get_and_send_events_at_block( } if batch.actions.is_empty() { - debug!("no actions to send at block {block_number}"); + trace!("no actions to send at block {block_number}"); } else { let actions_len = batch.actions.len(); submitter_handle @@ -546,10 +554,9 @@ mod tests { }, utils::hex, }; - use tokio::sync::{ - mpsc, - mpsc::error::TryRecvError::Empty, - oneshot, + use tokio::sync::mpsc::{ + self, + error::TryRecvError, }; use super::*; @@ -641,23 +648,23 @@ mod tests { let value = 1_000_000_000.into(); let recipient = crate::astria_address([1u8; 20]); - let bridge_address = crate::astria_address([1u8; 20]); - let denom = default_native_asset(); - + let denom = "nria".parse::().unwrap(); + + let state = Arc::new(State::new()); + let startup_handle = startup::InfoHandle::new(state.subscribe()); + state.set_startup_info(startup::Info { + starting_rollup_height: 1, + fee_asset: denom.clone(), + chain_id: "astria".to_string(), + }); let (batch_tx, mut batch_rx) = mpsc::channel(100); - let (startup_tx, startup_rx) = oneshot::channel(); - let submitter_handle = submitter::Handle::new(startup_rx, batch_tx); - startup_tx - .send(SequencerStartupInfo { - fee_asset: "nria".parse().unwrap(), - next_batch_rollup_height: 1, - }) - .unwrap(); + let submitter_handle = submitter::Handle::new(batch_tx); let watcher = Builder { ethereum_contract_address: hex::encode(contract_address), ethereum_rpc_endpoint: anvil.ws_endpoint(), + startup_handle, submitter_handle, shutdown_token: CancellationToken::new(), state: Arc::new(State::new()), @@ -669,7 +676,6 @@ mod tests { .unwrap(); tokio::task::spawn(watcher.run()); - let receipt = send_sequencer_withdraw_transaction(&contract, value, recipient).await; let expected_event = EventWithMetadata { event: WithdrawalEvent::Sequencer(SequencerWithdrawalFilter { @@ -702,7 +708,7 @@ mod tests { ); }; assert_eq!(action, &expected_action); - assert_eq!(batch_rx.try_recv().unwrap_err(), Empty); + assert_eq!(batch_rx.try_recv().unwrap_err(), TryRecvError::Empty); } #[tokio::test] @@ -744,24 +750,24 @@ mod tests { panic!("expected action to be BridgeUnlock, got {expected_action:?}"); }; + let state = Arc::new(State::new()); + let startup_handle = startup::InfoHandle::new(state.subscribe()); + state.set_startup_info(startup::Info { + starting_rollup_height: 1, + fee_asset: denom.clone(), + chain_id: "astria".to_string(), + }); let (batch_tx, mut batch_rx) = mpsc::channel(100); - let (startup_tx, startup_rx) = oneshot::channel(); - let submitter_handle = submitter::Handle::new(startup_rx, batch_tx); - startup_tx - .send(SequencerStartupInfo { - fee_asset: denom.clone(), - next_batch_rollup_height: 1, - }) - .unwrap(); let watcher = Builder { ethereum_contract_address: hex::encode(contract_address), ethereum_rpc_endpoint: anvil.ws_endpoint(), - submitter_handle, + startup_handle, shutdown_token: CancellationToken::new(), state: Arc::new(State::new()), rollup_asset_denom: denom.clone(), bridge_address, + submitter_handle: submitter::Handle::new(batch_tx), sequencer_address_prefix: crate::ASTRIA_ADDRESS_PREFIX.into(), } .build() @@ -821,27 +827,28 @@ mod tests { let value = 1_000_000_000.into(); let recipient = "somebech32address".to_string(); + let bridge_address = crate::astria_address([1u8; 20]); let denom = "transfer/channel-0/utia".parse::().unwrap(); + let state = Arc::new(State::new()); + let startup_handle = startup::InfoHandle::new(state.subscribe()); + state.set_startup_info(startup::Info { + starting_rollup_height: 1, + fee_asset: denom.clone(), + chain_id: "astria".to_string(), + }); let (batch_tx, mut batch_rx) = mpsc::channel(100); - let (startup_tx, startup_rx) = oneshot::channel(); - let submitter_handle = submitter::Handle::new(startup_rx, batch_tx); - startup_tx - .send(SequencerStartupInfo { - fee_asset: denom.clone(), - next_batch_rollup_height: 1, - }) - .unwrap(); let watcher = Builder { ethereum_contract_address: hex::encode(contract_address), ethereum_rpc_endpoint: anvil.ws_endpoint(), - submitter_handle, + startup_handle, shutdown_token: CancellationToken::new(), state: Arc::new(State::new()), rollup_asset_denom: denom.clone(), bridge_address, + submitter_handle: submitter::Handle::new(batch_tx), sequencer_address_prefix: crate::ASTRIA_ADDRESS_PREFIX.into(), } .build() @@ -860,6 +867,7 @@ mod tests { block_number: receipt.block_number.unwrap(), transaction_hash: receipt.transaction_hash, }; + let Action::Ics20Withdrawal(mut expected_action) = event_to_action( expected_event, denom.clone(), @@ -883,7 +891,7 @@ mod tests { }; action.timeout_time = 0; // zero this for testing assert_eq!(action, &expected_action); - assert_eq!(batch_rx.try_recv().unwrap_err(), Empty); + assert_eq!(batch_rx.try_recv().unwrap_err(), TryRecvError::Empty); } async fn mint_tokens( @@ -947,27 +955,27 @@ mod tests { let value = 1_000_000_000.into(); let recipient = crate::astria_address([1u8; 20]); - let denom = default_native_asset(); let bridge_address = crate::astria_address([1u8; 20]); + let denom = default_native_asset(); + let state = Arc::new(State::new()); + let startup_handle = startup::InfoHandle::new(state.subscribe()); + state.set_startup_info(startup::Info { + starting_rollup_height: 1, + fee_asset: denom.clone(), + chain_id: "astria".to_string(), + }); let (batch_tx, mut batch_rx) = mpsc::channel(100); - let (startup_tx, startup_rx) = oneshot::channel(); - let submitter_handle = submitter::Handle::new(startup_rx, batch_tx); - startup_tx - .send(SequencerStartupInfo { - fee_asset: "nria".parse().unwrap(), - next_batch_rollup_height: 1, - }) - .unwrap(); let watcher = Builder { ethereum_contract_address: hex::encode(contract_address), ethereum_rpc_endpoint: anvil.ws_endpoint(), - submitter_handle, + startup_handle, shutdown_token: CancellationToken::new(), state: Arc::new(State::new()), rollup_asset_denom: denom.clone(), bridge_address, + submitter_handle: submitter::Handle::new(batch_tx), sequencer_address_prefix: crate::ASTRIA_ADDRESS_PREFIX.into(), } .build() @@ -1007,7 +1015,7 @@ mod tests { ); }; assert_eq!(action, &expected_action); - assert_eq!(batch_rx.try_recv().unwrap_err(), Empty); + assert_eq!(batch_rx.try_recv().unwrap_err(), TryRecvError::Empty); } async fn send_ics20_withdraw_transaction_astria_bridgeable_erc20( @@ -1049,27 +1057,27 @@ mod tests { let value = 1_000_000_000.into(); let recipient = "somebech32address".to_string(); - let denom = "transfer/channel-0/utia".parse::().unwrap(); let bridge_address = crate::astria_address([1u8; 20]); + let denom = "transfer/channel-0/utia".parse::().unwrap(); + let state = Arc::new(State::new()); + let startup_handle = startup::InfoHandle::new(state.subscribe()); + state.set_startup_info(startup::Info { + starting_rollup_height: 1, + fee_asset: denom.clone(), + chain_id: "astria".to_string(), + }); let (batch_tx, mut batch_rx) = mpsc::channel(100); - let (startup_tx, startup_rx) = oneshot::channel(); - let submitter_handle = submitter::Handle::new(startup_rx, batch_tx); - startup_tx - .send(SequencerStartupInfo { - fee_asset: "transfer/channel-0/utia".parse().unwrap(), - next_batch_rollup_height: 1, - }) - .unwrap(); let watcher = Builder { ethereum_contract_address: hex::encode(contract_address), ethereum_rpc_endpoint: anvil.ws_endpoint(), - submitter_handle, + startup_handle, shutdown_token: CancellationToken::new(), state: Arc::new(State::new()), rollup_asset_denom: denom.clone(), bridge_address, + submitter_handle: submitter::Handle::new(batch_tx), sequencer_address_prefix: crate::ASTRIA_ADDRESS_PREFIX.into(), } .build() @@ -1116,6 +1124,6 @@ mod tests { }; action.timeout_time = 0; // zero this for testing assert_eq!(action, &expected_action); - assert_eq!(batch_rx.try_recv().unwrap_err(), Empty); + assert_eq!(batch_rx.try_recv().unwrap_err(), TryRecvError::Empty); } } diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs index 4265fb5eb7..d655f9c645 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs @@ -7,10 +7,7 @@ use std::{ time::Duration, }; -use astria_core::primitive::v1::asset::{ - self, - Denom, -}; +use astria_core::primitive::v1::asset::Denom; use astria_eyre::eyre::{ self, WrapErr as _, @@ -44,6 +41,7 @@ use crate::{ mod batch; mod ethereum; +mod startup; mod state; mod submitter; @@ -53,6 +51,7 @@ pub struct BridgeWithdrawer { api_server: api::ApiServer, submitter: Submitter, ethereum_watcher: watcher::Watcher, + startup: startup::Startup, state: Arc, } @@ -77,42 +76,54 @@ impl BridgeWithdrawer { ethereum_contract_address, ethereum_rpc_endpoint, rollup_asset_denomination, - min_expected_fee_asset_balance, + sequencer_bridge_address, .. } = cfg; let state = Arc::new(State::new()); + let sequencer_bridge_address = sequencer_bridge_address + .parse() + .wrap_err("failed to parse sequencer bridge address")?; + + // make startup object + let startup = startup::Builder { + shutdown_token: shutdown_handle.token(), + state: state.clone(), + sequencer_chain_id, + sequencer_cometbft_endpoint: sequencer_cometbft_endpoint.clone(), + sequencer_bridge_address, + expected_fee_asset: fee_asset_denomination, + } + .build() + .wrap_err("failed to initialize startup")?; + + let startup_handle = startup::InfoHandle::new(state.subscribe()); + // make submitter object let (submitter, submitter_handle) = submitter::Builder { shutdown_token: shutdown_handle.token(), + startup_handle: startup_handle.clone(), sequencer_cometbft_endpoint, - sequencer_chain_id, sequencer_key_path, sequencer_address_prefix: sequencer_address_prefix.clone(), state: state.clone(), - expected_fee_asset: fee_asset_denomination, - min_expected_fee_asset_balance: u128::from(min_expected_fee_asset_balance), metrics, } .build() .wrap_err("failed to initialize submitter")?; - let sequencer_bridge_address = cfg - .sequencer_bridge_address - .parse() - .wrap_err("failed to parse sequencer bridge address")?; - let ethereum_watcher = watcher::Builder { ethereum_contract_address, ethereum_rpc_endpoint, - submitter_handle, + startup_handle, shutdown_token: shutdown_handle.token(), state: state.clone(), rollup_asset_denom: rollup_asset_denomination .parse::() .wrap_err("failed to parse ROLLUP_ASSET_DENOMINATION as Denom")?, bridge_address: sequencer_bridge_address, + submitter_handle, sequencer_address_prefix: sequencer_address_prefix.clone(), } .build() @@ -130,18 +141,22 @@ impl BridgeWithdrawer { api_server, submitter, ethereum_watcher, + startup, state, }; Ok((service, shutdown_handle)) } + // Panic won't happen because `startup_task` is unwraped lazily after checking if it's `Some`. + #[allow(clippy::missing_panics_doc)] pub async fn run(self) { let Self { shutdown_token, api_server, submitter, ethereum_watcher, + startup, state: _state, } = self; @@ -158,54 +173,74 @@ impl BridgeWithdrawer { }); info!("spawned API server"); + let mut startup_task = Some(tokio::spawn(startup.run())); + info!("spawned startup task"); + let mut submitter_task = tokio::spawn(submitter.run()); info!("spawned submitter task"); let mut ethereum_watcher_task = tokio::spawn(ethereum_watcher.run()); info!("spawned ethereum watcher task"); - let shutdown = select!( - o = &mut api_task => { - report_exit("api server", o); - Shutdown { - api_task: None, - submitter_task: Some(submitter_task), - ethereum_watcher_task: Some(ethereum_watcher_task), - api_shutdown_signal, - token: shutdown_token + let shutdown = loop { + select!( + o = async { startup_task.as_mut().unwrap().await }, if startup_task.is_none() => { + match o { + Ok(_) => { + info!(task = "startup", "task has exited"); + startup_task = None; + }, + Err(error) => { + error!(task = "startup", %error, "task returned with error"); + break Shutdown { + api_task: Some(api_task), + submitter_task: Some(submitter_task), + ethereum_watcher_task: Some(ethereum_watcher_task), + startup_task: None, + api_shutdown_signal, + token: shutdown_token, + }; + } + } } - } - o = &mut submitter_task => { - report_exit("submitter", o); - Shutdown { - api_task: Some(api_task), - submitter_task: None, - ethereum_watcher_task:Some(ethereum_watcher_task), - api_shutdown_signal, - token: shutdown_token + o = &mut api_task => { + report_exit("api server", o); + break Shutdown { + api_task: None, + submitter_task: Some(submitter_task), + ethereum_watcher_task: Some(ethereum_watcher_task), + startup_task, + api_shutdown_signal, + token: shutdown_token + } } - } - o = &mut ethereum_watcher_task => { - report_exit("ethereum watcher", o); - Shutdown { - api_task: Some(api_task), - submitter_task: Some(submitter_task), - ethereum_watcher_task: None, - api_shutdown_signal, - token: shutdown_token + o = &mut submitter_task => { + report_exit("submitter", o); + break Shutdown { + api_task: Some(api_task), + submitter_task: None, + ethereum_watcher_task:Some(ethereum_watcher_task), + startup_task, + api_shutdown_signal, + token: shutdown_token + } } - } - - ); + o = &mut ethereum_watcher_task => { + report_exit("ethereum watcher", o); + break Shutdown { + api_task: Some(api_task), + submitter_task: Some(submitter_task), + ethereum_watcher_task: None, + startup_task, + api_shutdown_signal, + token: shutdown_token + } + } + ); + }; shutdown.run().await; } } -#[derive(Debug)] -pub struct SequencerStartupInfo { - pub fee_asset: asset::Denom, - pub next_batch_rollup_height: u64, -} - /// A handle for instructing the [`Service`] to shut down. /// /// It is returned along with its related `Service` from [`Service::new`]. The @@ -264,6 +299,7 @@ struct Shutdown { api_task: Option>>, submitter_task: Option>>, ethereum_watcher_task: Option>>, + startup_task: Option>>, api_shutdown_signal: oneshot::Sender<()>, token: CancellationToken, } @@ -271,19 +307,38 @@ struct Shutdown { impl Shutdown { const API_SHUTDOWN_TIMEOUT_SECONDS: u64 = 4; const ETHEREUM_WATCHER_SHUTDOWN_TIMEOUT_SECONDS: u64 = 5; - const SUBMITTER_SHUTDOWN_TIMEOUT_SECONDS: u64 = 20; + const STARTUP_SHUTDOWN_TIMEOUT_SECONDS: u64 = 1; + const SUBMITTER_SHUTDOWN_TIMEOUT_SECONDS: u64 = 19; async fn run(self) { let Self { api_task, submitter_task, ethereum_watcher_task, + startup_task, api_shutdown_signal, token, } = self; token.cancel(); + // Giving startup 1 second to shutdown because it should be very quick. + if let Some(mut startup_task) = startup_task { + info!("waiting for startup task to shut down"); + let limit = Duration::from_secs(Self::STARTUP_SHUTDOWN_TIMEOUT_SECONDS); + match timeout(limit, &mut startup_task).await.map(flatten_result) { + Ok(Ok(())) => info!("startup exited gracefully"), + Ok(Err(error)) => error!(%error, "startup exited with an error"), + Err(_) => { + error!( + timeout_secs = limit.as_secs(), + "startup did not shut down within timeout; killing it" + ); + startup_task.abort(); + } + } + } + // Giving submitter 20 seconds to shutdown because Kubernetes issues a SIGKILL after 30. if let Some(mut submitter_task) = submitter_task { info!("waiting for submitter task to shut down"); @@ -302,8 +357,6 @@ impl Shutdown { submitter_task.abort(); } } - } else { - info!("submitter task was already dead"); } // Giving ethereum watcher 5 seconds to shutdown because Kubernetes issues a SIGKILL after @@ -325,8 +378,6 @@ impl Shutdown { ethereum_watcher_task.abort(); } } - } else { - info!("watcher task was already dead"); } // Giving the API task 4 seconds. 5s for watcher + 20 for submitter + 4s = 29s (out of 30s @@ -346,8 +397,6 @@ impl Shutdown { api_task.abort(); } } - } else { - info!("API server was already dead"); } } } diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs new file mode 100644 index 0000000000..d68e7392bc --- /dev/null +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs @@ -0,0 +1,488 @@ +use std::{ + sync::Arc, + time::Duration, +}; + +use astria_core::{ + bridge::{ + self, + Ics20WithdrawalFromRollupMemo, + }, + primitive::v1::asset, + protocol::{ + asset::v1alpha1::AllowedFeeAssetsResponse, + bridge::v1alpha1::BridgeAccountLastTxHashResponse, + transaction::v1alpha1::Action, + }, +}; +use astria_eyre::eyre::{ + self, + bail, + ensure, + OptionExt as _, + WrapErr as _, +}; +use prost::{ + Message as _, + Name as _, +}; +use sequencer_client::{ + tendermint_rpc, + Address, + SequencerClientExt as _, + SignedTransaction, +}; +use tendermint_rpc::{ + endpoint::tx, + Client as _, +}; +use tokio::sync::watch; +use tokio_util::sync::CancellationToken; +use tracing::{ + info, + instrument, + warn, +}; +use tryhard::backoff_strategies::ExponentialBackoff; + +use super::state::{ + self, + State, +}; + +pub(super) struct Builder { + pub(super) shutdown_token: CancellationToken, + pub(super) state: Arc, + pub(super) sequencer_chain_id: String, + pub(super) sequencer_cometbft_endpoint: String, + pub(super) sequencer_bridge_address: Address, + pub(super) expected_fee_asset: asset::Denom, +} + +impl Builder { + pub(super) fn build(self) -> eyre::Result { + let Self { + shutdown_token, + state, + sequencer_chain_id, + sequencer_cometbft_endpoint, + sequencer_bridge_address, + expected_fee_asset, + } = self; + + let sequencer_cometbft_client = + sequencer_client::HttpClient::new(&*sequencer_cometbft_endpoint) + .wrap_err("failed constructing cometbft http client")?; + + Ok(Startup { + shutdown_token, + state, + sequencer_chain_id, + sequencer_cometbft_client, + sequencer_bridge_address, + expected_fee_asset, + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize)] +pub(super) struct Info { + pub(super) starting_rollup_height: u64, + pub(super) fee_asset: asset::Denom, + pub(super) chain_id: String, +} + +#[derive(Debug, Clone)] +pub(super) struct InfoHandle { + rx: watch::Receiver, +} + +impl InfoHandle { + pub(super) fn new(rx: watch::Receiver) -> Self { + Self { + rx, + } + } + + pub(super) async fn get_info(&mut self) -> eyre::Result { + let state = self + .rx + .wait_for(|state| state.get_startup_info().is_some()) + .await + .wrap_err("failed to get startup info")?; + + Ok(state + .get_startup_info() + .expect("the previous line guarantes that the state is intialized") + .clone()) + } +} + +pub(super) struct Startup { + shutdown_token: CancellationToken, + state: Arc, + sequencer_chain_id: String, + sequencer_cometbft_client: sequencer_client::HttpClient, + sequencer_bridge_address: Address, + expected_fee_asset: asset::Denom, +} + +impl Startup { + pub(super) async fn run(mut self) -> eyre::Result<()> { + let shutdown_token = self.shutdown_token.clone(); + + let state = self.state.clone(); + let startup_task = async move { + self.confirm_sequencer_config() + .await + .wrap_err("failed to confirm sequencer config")?; + let starting_rollup_height = self + .get_starting_rollup_height() + .await + .wrap_err("failed to get next rollup block height")?; + + // send the startup info to the submitter + let info = Info { + chain_id: self.sequencer_chain_id.clone(), + fee_asset: self.expected_fee_asset, + starting_rollup_height, + }; + + state.set_startup_info(info); + + Ok(()) + }; + + tokio::select!( + () = shutdown_token.cancelled() => { + bail!("startup was cancelled"); + } + res = startup_task => { + res + } + ) + } + + /// Confirms configuration values against the sequencer node. Values checked: + /// + /// - `self.sequencer_chain_id` matches the value returned from the sequencer node's genesis + /// - `self.fee_asset_id` is a valid fee asset on the sequencer node + /// - `self.sequencer_key.address` has a sufficient balance of `self.fee_asset_id` + /// + /// # Errors + /// + /// - `self.chain_id` does not match the value returned from the sequencer node + /// - `self.fee_asset_id` is not a valid fee asset on the sequencer node + /// - `self.sequencer_key.address` does not have a sufficient balance of `self.fee_asset_id`. + async fn confirm_sequencer_config(&self) -> eyre::Result<()> { + // confirm the sequencer chain id + let actual_chain_id = + get_sequencer_chain_id(self.sequencer_cometbft_client.clone(), self.state.clone()) + .await + .wrap_err("failed to get chain id from sequencer")?; + ensure!( + self.sequencer_chain_id == actual_chain_id.to_string(), + "sequencer_chain_id provided in config does not match chain_id returned from sequencer" + ); + + // confirm that the fee asset ID is valid + let allowed_fee_asset_ids_resp = + get_allowed_fee_asset_ids(self.sequencer_cometbft_client.clone(), self.state.clone()) + .await + .wrap_err("failed to get allowed fee asset ids from sequencer")?; + let expected_fee_asset_ibc = self.expected_fee_asset.to_ibc_prefixed(); + ensure!( + allowed_fee_asset_ids_resp + .fee_assets + .iter() + .any(|asset| asset.to_ibc_prefixed() == expected_fee_asset_ibc), + "fee_asset provided in config is not a valid fee asset on the sequencer" + ); + + Ok(()) + } + + /// Gets the last transaction by the bridge account on the sequencer. This is used to + /// determine the starting rollup height for syncing to the latest on-chain state. + /// + /// # Returns + /// The last transaction by the bridge account on the sequencer, if it exists. + /// + /// # Errors + /// + /// 1. Failing to fetch the last transaction hash by the bridge account. + /// 2. Failing to convert the last transaction hash to a tendermint hash. + /// 3. Failing to fetch the last transaction by the bridge account. + /// 4. The last transaction by the bridge account failed to execute (this should not happen + /// in the sequencer logic). + /// 5. Failing to convert the transaction data from bytes to proto. + /// 6. Failing to convert the transaction data from proto to `SignedTransaction`. + async fn get_last_transaction(&self) -> eyre::Result> { + // get last transaction hash by the bridge account, if it exists + let last_transaction_hash_resp = get_bridge_account_last_transaction_hash( + self.sequencer_cometbft_client.clone(), + self.state.clone(), + self.sequencer_bridge_address, + ) + .await + .wrap_err("failed to fetch last transaction hash by the bridge account")?; + + let Some(tx_hash) = last_transaction_hash_resp.tx_hash else { + return Ok(None); + }; + + let tx_hash = tendermint::Hash::try_from(tx_hash.to_vec()) + .wrap_err("failed to convert last transaction hash to tendermint hash")?; + + // get the corresponding transaction + let last_transaction = get_sequencer_transaction_at_hash( + self.sequencer_cometbft_client.clone(), + self.state.clone(), + tx_hash, + ) + .await + .wrap_err("failed to fetch last transaction by the bridge account")?; + + // check that the transaction actually executed + ensure!( + last_transaction.tx_result.code == tendermint::abci::Code::Ok, + "last transaction by the bridge account failed to execute. this should not happen in \ + the sequencer logic." + ); + + let proto_tx = + astria_core::generated::protocol::transaction::v1alpha1::SignedTransaction::decode( + &*last_transaction.tx, + ) + .wrap_err_with(|| format!( + "failed to decode data in Sequencer CometBFT transaction as `{}`", + astria_core::generated::protocol::transaction::v1alpha1::SignedTransaction::full_name(), + ))?; + + let tx = SignedTransaction::try_from_raw(proto_tx) + .wrap_err_with(|| format!("failed to verify {}", astria_core::generated::protocol::transaction::v1alpha1::SignedTransaction::full_name()))?; + + info!( + last_bridge_account_tx.hash = %telemetry::display::hex(&tx_hash), + last_bridge_account_tx.height = %last_transaction.height, + "fetched last transaction by the bridge account" + ); + + Ok(Some(tx)) + } + + /// Gets the data necessary for syncing to the latest on-chain state from the sequencer. + /// Since we batch all events from a given rollup block into a single sequencer + /// transaction, we get the last tx finalized by the bridge account on the sequencer + /// and extract the rollup height from it. + /// + /// The rollup height is extracted from the block height value in the memo of one of the + /// actions in the batch. + /// + /// # Returns + /// The next batch rollup height to process. + /// + /// # Errors + /// + /// 1. Failing to get and deserialize a valid last transaction by the bridge account from the + /// sequencer. + /// 2. The last transaction by the bridge account failed to execute (this should not happen in + /// the sequencer logic) + /// 3. The last transaction by the bridge account did not contain a withdrawal action + /// 4. The memo of the last transaction by the bridge account could not be parsed + async fn get_starting_rollup_height(&mut self) -> eyre::Result { + let signed_transaction = self + .get_last_transaction() + .await + .wrap_err("failed to get the bridge account's last sequencer transaction")?; + let starting_rollup_height = if let Some(signed_transaction) = signed_transaction { + rollup_height_from_signed_transaction(&signed_transaction) + .wrap_err( + "failed to extract rollup height from last transaction by the bridge account", + )? + .checked_add(1) + .ok_or_eyre("failed to increment rollup height by 1")? + } else { + 1 + }; + Ok(starting_rollup_height) + } +} + +/// Extracts the rollup height from the last transaction by the bridge account on the sequencer. +/// Since all the withdrawals from a rollup block are batched into a single sequencer transaction, +/// he rollup height can be extracted from the memo of any withdrawal action in the batch. +/// +/// # Returns +/// +/// The rollup height of the last batch of withdrawals. +/// +/// # Errors +/// +/// 1. The last transaction by the bridge account did not contain a withdrawal action. +/// 2. The memo of the last transaction by the bridge account could not be parsed. +/// 3. The block number in the memo of the last transaction by the bridge account could not be +/// converted to a u64. +fn rollup_height_from_signed_transaction( + signed_transaction: &SignedTransaction, +) -> eyre::Result { + // find the last batch's rollup block height + let withdrawal_action = signed_transaction + .actions() + .iter() + .find(|action| matches!(action, Action::BridgeUnlock(_) | Action::Ics20Withdrawal(_))) + .ok_or_eyre("last transaction by the bridge account did not contain a withdrawal action")?; + + let last_batch_rollup_height = match withdrawal_action { + Action::BridgeUnlock(action) => { + let memo: bridge::UnlockMemo = serde_json::from_slice(&action.memo) + .wrap_err("failed to parse memo from last transaction by the bridge account")?; + Some(memo.block_number) + } + Action::Ics20Withdrawal(action) => { + let memo: Ics20WithdrawalFromRollupMemo = serde_json::from_str(&action.memo) + .wrap_err("failed to parse memo from last transaction by the bridge account")?; + Some(memo.block_number) + } + _ => None, + } + .expect("action is already checked to be either BridgeUnlock or Ics20Withdrawal"); + + info!( + last_batch.tx_hash = %telemetry::display::hex(&signed_transaction.sha256_of_proto_encoding()), + last_batch.rollup_height = last_batch_rollup_height, + "extracted rollup height from last batch of withdrawals", + ); + + Ok(last_batch_rollup_height) +} + +#[instrument(skip_all)] +async fn get_bridge_account_last_transaction_hash( + client: sequencer_client::HttpClient, + state: Arc, + address: Address, +) -> eyre::Result { + let res = tryhard::retry_fn(|| client.get_bridge_account_last_transaction_hash(address)) + .with_config(make_sequencer_retry_config( + "attempt to fetch last bridge account's transaction hash from Sequencer; retrying \ + after backoff", + )) + .await + .wrap_err( + "failed to fetch last bridge account's transaction hash from Sequencer after a lot of \ + attempts", + ); + + state.set_sequencer_connected(res.is_ok()); + + res +} + +#[instrument(skip_all)] +async fn get_sequencer_transaction_at_hash( + client: sequencer_client::HttpClient, + state: Arc, + tx_hash: tendermint::Hash, +) -> eyre::Result { + let res = tryhard::retry_fn(|| client.tx(tx_hash, false)) + .with_config(make_cometbft_retry_config( + "attempt to get transaction from CometBFT; retrying after backoff", + )) + .await + .wrap_err("failed to get transaction from Sequencer after a lot of attempts"); + + state.set_sequencer_connected(res.is_ok()); + + res +} + +#[instrument(skip_all)] +async fn get_sequencer_chain_id( + client: sequencer_client::HttpClient, + state: Arc, +) -> eyre::Result { + let genesis: tendermint::Genesis = tryhard::retry_fn(|| client.genesis()) + .with_config(make_cometbft_retry_config( + "attempt to get genesis from CometBFT; retrying after backoff", + )) + .await + .wrap_err("failed to get genesis info from Sequencer after a lot of attempts")?; + + state.set_sequencer_connected(true); + + Ok(genesis.chain_id) +} + +#[instrument(skip_all)] +async fn get_allowed_fee_asset_ids( + client: sequencer_client::HttpClient, + state: Arc, +) -> eyre::Result { + let res = tryhard::retry_fn(|| client.get_allowed_fee_assets()) + .with_config(make_sequencer_retry_config( + "attempt to get allowed fee assets from Sequencer; retrying after backoff", + )) + .await + .wrap_err("failed to get allowed fee asset ids from Sequencer after a lot of attempts"); + + state.set_sequencer_connected(res.is_ok()); + + res +} + +fn make_cometbft_retry_config( + retry_message: &'static str, +) -> tryhard::RetryFutureConfig< + ExponentialBackoff, + impl Fn(u32, Option, &tendermint_rpc::Error) -> futures::future::Ready<()>, +> { + tryhard::RetryFutureConfig::new(u32::MAX) + .exponential_backoff(Duration::from_millis(100)) + .max_delay(Duration::from_secs(20)) + .on_retry( + move |attempt: u32, next_delay: Option, error: &tendermint_rpc::Error| { + let wait_duration = next_delay + .map(humantime::format_duration) + .map(tracing::field::display); + warn!( + attempt, + wait_duration, + error = error as &dyn std::error::Error, + retry_message, + ); + futures::future::ready(()) + }, + ) +} + +fn make_sequencer_retry_config( + retry_message: &'static str, +) -> tryhard::RetryFutureConfig< + ExponentialBackoff, + impl Fn( + u32, + Option, + &sequencer_client::extension_trait::Error, + ) -> futures::future::Ready<()>, +> { + tryhard::RetryFutureConfig::new(u32::MAX) + .exponential_backoff(Duration::from_millis(100)) + .max_delay(Duration::from_secs(20)) + .on_retry( + move |attempt: u32, + next_delay: Option, + error: &sequencer_client::extension_trait::Error| { + let wait_duration = next_delay + .map(humantime::format_duration) + .map(tracing::field::display); + warn!( + attempt, + wait_duration, + error = error as &dyn std::error::Error, + retry_message, + ); + futures::future::ready(()) + }, + ) +} diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/state.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/state.rs index 647bb489b5..bbdc4c561b 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/state.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/state.rs @@ -1,5 +1,7 @@ use tokio::sync::watch; +use super::startup; + pub(crate) struct State { inner: tokio::sync::watch::Sender, } @@ -39,6 +41,7 @@ macro_rules! forward_setter { } forward_setter!( + [set_startup_info <- startup::Info], [set_sequencer_connected <- bool], [set_last_rollup_height_submitted <- u64], [set_last_sequencer_height <- u64], @@ -47,6 +50,8 @@ forward_setter!( #[derive(Clone, Debug, Default, PartialEq, Eq, serde::Serialize)] pub(crate) struct StateSnapshot { + startup_info: Option, + watcher_ready: bool, submitter_ready: bool, @@ -58,6 +63,19 @@ pub(crate) struct StateSnapshot { } impl StateSnapshot { + pub(super) fn get_startup_info(&self) -> Option { + self.startup_info.clone() + } + + pub(super) fn set_startup_info(&mut self, startup_info: startup::Info) -> bool { + if self.startup_info.is_none() { + self.startup_info = Some(startup_info); + true + } else { + false + } + } + pub(crate) fn set_watcher_ready(&mut self) { self.watcher_ready = true; } diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/builder.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/builder.rs index 5332c96129..8e61704e53 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/builder.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/builder.rs @@ -1,22 +1,18 @@ use std::sync::Arc; -use astria_core::primitive::v1::asset; use astria_eyre::eyre::{ self, Context as _, }; -use tokio::sync::{ - mpsc, - oneshot, -}; +use tokio::sync::mpsc; use tokio_util::sync::CancellationToken; use tracing::info; use super::state::State; use crate::{ bridge_withdrawer::{ + startup, submitter::Batch, - SequencerStartupInfo, }, metrics::Metrics, }; @@ -24,29 +20,16 @@ use crate::{ const BATCH_QUEUE_SIZE: usize = 256; pub(crate) struct Handle { - startup_info_rx: Option>, batches_tx: mpsc::Sender, } impl Handle { - pub(crate) fn new( - startup_info_rx: oneshot::Receiver, - batches_tx: mpsc::Sender, - ) -> Self { + pub(crate) fn new(batches_tx: mpsc::Sender) -> Self { Self { - startup_info_rx: Some(startup_info_rx), batches_tx, } } - pub(crate) async fn recv_startup_info(&mut self) -> eyre::Result { - self.startup_info_rx - .take() - .expect("startup info should only be taken once - this is a bug") - .await - .wrap_err("failed to get startup info from submitter. channel was dropped.") - } - pub(crate) async fn send_batch(&self, batch: Batch) -> eyre::Result<()> { self.batches_tx .send(batch) @@ -57,13 +40,11 @@ impl Handle { pub(crate) struct Builder { pub(crate) shutdown_token: CancellationToken, + pub(crate) startup_handle: startup::InfoHandle, pub(crate) sequencer_key_path: String, pub(crate) sequencer_address_prefix: String, - pub(crate) sequencer_chain_id: String, pub(crate) sequencer_cometbft_endpoint: String, pub(crate) state: Arc, - pub(crate) expected_fee_asset: asset::Denom, - pub(crate) min_expected_fee_asset_balance: u128, pub(crate) metrics: &'static Metrics, } @@ -72,13 +53,11 @@ impl Builder { pub(crate) fn build(self) -> eyre::Result<(super::Submitter, Handle)> { let Self { shutdown_token, + startup_handle, sequencer_key_path, sequencer_address_prefix, - sequencer_chain_id, sequencer_cometbft_endpoint, state, - expected_fee_asset, - min_expected_fee_asset_balance, metrics, } = self; @@ -94,20 +73,16 @@ impl Builder { .wrap_err("failed constructing cometbft http client")?; let (batches_tx, batches_rx) = tokio::sync::mpsc::channel(BATCH_QUEUE_SIZE); - let (startup_tx, startup_rx) = tokio::sync::oneshot::channel(); - let handle = Handle::new(startup_rx, batches_tx); + let handle = Handle::new(batches_tx); Ok(( super::Submitter { shutdown_token, + startup_handle, state, batches_rx, sequencer_cometbft_client, signer, - sequencer_chain_id, - startup_tx, - expected_fee_asset, - min_expected_fee_asset_balance, metrics, }, handle, diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs index 4e37e04910..b869d7c7f3 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs @@ -3,56 +3,29 @@ use std::{ time::Duration, }; -use astria_core::{ - bridge::{ - self, - Ics20WithdrawalFromRollupMemo, - }, - primitive::v1::asset, - protocol::{ - asset::v1alpha1::AllowedFeeAssetsResponse, - bridge::v1alpha1::BridgeAccountLastTxHashResponse, - transaction::v1alpha1::{ - Action, - TransactionParams, - UnsignedTransaction, - }, - }, +use astria_core::protocol::transaction::v1alpha1::{ + Action, + TransactionParams, + UnsignedTransaction, }; use astria_eyre::eyre::{ self, - ensure, eyre, Context, - OptionExt, }; pub(crate) use builder::Builder; pub(super) use builder::Handle; -use prost::Message as _; use sequencer_client::{ - tendermint_rpc::{ - self, - endpoint::broadcast::tx_commit, - }, + tendermint_rpc::endpoint::broadcast::tx_commit, Address, - BalanceResponse, SequencerClientExt, SignedTransaction, }; use signer::SequencerKey; use state::State; -use tendermint_rpc::{ - endpoint::tx, - Client, -}; use tokio::{ select, - sync::{ - mpsc, - oneshot::{ - self, - }, - }, + sync::mpsc, time::Instant, }; use tokio_util::sync::CancellationToken; @@ -69,39 +42,40 @@ use tracing::{ use super::{ batch::Batch, + startup, state, - SequencerStartupInfo, }; use crate::metrics::Metrics; mod builder; -mod signer; +pub(crate) mod signer; #[cfg(test)] mod tests; pub(super) struct Submitter { shutdown_token: CancellationToken, + startup_handle: startup::InfoHandle, state: Arc, batches_rx: mpsc::Receiver, sequencer_cometbft_client: sequencer_client::HttpClient, signer: SequencerKey, - sequencer_chain_id: String, - startup_tx: oneshot::Sender, - expected_fee_asset: asset::Denom, - min_expected_fee_asset_balance: u128, metrics: &'static Metrics, } impl Submitter { pub(super) async fn run(mut self) -> eyre::Result<()> { - // call startup - let startup = self - .startup() - .await - .wrap_err("submitter failed to start up")?; - self.startup_tx - .send(startup) - .map_err(|_startup| eyre!("failed to send startup info to watcher"))?; + let sequencer_chain_id = select! { + () = self.shutdown_token.cancelled() => { + info!("submitter received shutdown signal while waiting for startup"); + return Ok(()); + } + + startup_info = self.startup_handle.get_info() => { + let startup::Info { chain_id, .. } = startup_info.wrap_err("submitter failed to get startup info")?; + self.state.set_submitter_ready(); + chain_id + } + }; let reason = loop { select!( @@ -122,10 +96,10 @@ impl Submitter { self.sequencer_cometbft_client.clone(), &self.signer, self.state.clone(), - &self.sequencer_chain_id, + &sequencer_chain_id, actions, rollup_height, - self.metrics + self.metrics, ).await { break Err(e); } @@ -148,175 +122,6 @@ impl Submitter { Ok(()) } - - /// Confirms configuration values against the sequencer node and then syncs the next sequencer - /// nonce and rollup block according to the latest on-chain state. - /// - /// Configuration values checked: - /// - `self.chain_id` matches the value returned from the sequencer node's genesis - /// - `self.fee_asset_id` is a valid fee asset on the sequencer node - /// - `self.sequencer_key.address` has a sufficient balance of `self.fee_asset_id` - /// - /// Sync process: - /// - Fetch the last transaction hash by the bridge account from the sequencer - /// - Fetch the corresponding transaction - /// - Extract the last nonce used from the transaction - /// - Extract the rollup block height from the memo of one of the withdraw actions in the - /// transaction - /// - /// # Returns - /// A struct with the information collected and validated during startup: - /// - `fee_asset_id` - /// - `next_batch_rollup_height` - /// - /// # Errors - /// - /// - `self.chain_id` does not match the value returned from the sequencer node - /// - `self.fee_asset_id` is not a valid fee asset on the sequencer node - /// - `self.sequencer_key.address` does not have a sufficient balance of `self.fee_asset_id`. - async fn startup(&mut self) -> eyre::Result { - let actual_chain_id = - get_sequencer_chain_id(self.sequencer_cometbft_client.clone(), self.state.clone()) - .await - .wrap_err("failed to get chain id from sequencer")?; - ensure!( - self.sequencer_chain_id == actual_chain_id.to_string(), - "sequencer_chain_id provided in config does not match chain_id returned from sequencer" - ); - - let expected_fee_asset_ibc = self.expected_fee_asset.to_ibc_prefixed(); - // confirm that the fee asset ID is valid - let allowed_fee_assets_resp = - get_allowed_fee_assets(self.sequencer_cometbft_client.clone(), self.state.clone()) - .await - .wrap_err("failed to get allowed fee asset ids from sequencer")?; - ensure!( - allowed_fee_assets_resp - .fee_assets - .iter() - .any(|asset| asset.to_ibc_prefixed() == expected_fee_asset_ibc), - "fee_asset_id provided in config is not a valid fee asset on the sequencer" - ); - - // confirm that the sequencer key has a sufficient balance of the fee asset - let fee_asset_balances = get_latest_balance( - self.sequencer_cometbft_client.clone(), - self.state.clone(), - *self.signer.address(), - ) - .await - .wrap_err("failed to get latest balance")?; - let fee_asset_balance = fee_asset_balances - .balances - .into_iter() - .find(|balance| balance.denom.to_ibc_prefixed() == expected_fee_asset_ibc) - .ok_or_eyre("withdrawer's account does not have the minimum balance of the fee asset")? - .balance; - ensure!( - fee_asset_balance >= self.min_expected_fee_asset_balance, - "sequencer key does not have a sufficient balance of the fee asset" - ); - - // sync to latest on-chain state - let next_batch_rollup_height = self - .get_next_rollup_height() - .await - .wrap_err("failed to get next rollup block height")?; - - self.state.set_submitter_ready(); - - // send startup info to watcher - let startup = SequencerStartupInfo { - fee_asset: self.expected_fee_asset.clone(), - next_batch_rollup_height, - }; - Ok(startup) - } - - /// Gets the data necessary for syncing to the latest on-chain state from the sequencer. Since - /// we batch all events from a given rollup block into a single sequencer transaction, we - /// get the last tx finalized by the bridge account on the sequencer and extract the rollup - /// height from it. - /// - /// The rollup height is extracted from the block height value in the memo of one of the actions - /// in the batch. - /// - /// # Returns - /// The next batch rollup height to process. - /// - /// # Errors - /// - /// 1. Failing to get and deserialize a valid last transaction by the bridge account from the - /// sequencer. - /// 2. The last transaction by the bridge account failed to execute (this should not happen in - /// the sequencer logic) - /// 3. The last transaction by the bridge account did not contain a withdrawal action - /// 4. The memo of the last transaction by the bridge account could not be parsed - async fn get_next_rollup_height(&mut self) -> eyre::Result { - let signed_transaction = self - .get_last_transaction() - .await - .wrap_err("failed to get the bridge account's last sequencer transaction")?; - let next_batch_rollup_height = if let Some(signed_transaction) = signed_transaction { - rollup_height_from_signed_transaction(&signed_transaction).wrap_err( - "failed to extract rollup height from last transaction by the bridge account", - )? - } else { - 1 - }; - Ok(next_batch_rollup_height) - } - - async fn get_last_transaction(&self) -> eyre::Result> { - // get last transaction hash by the bridge account, if it exists - let last_transaction_hash_resp = get_bridge_account_last_transaction_hash( - self.sequencer_cometbft_client.clone(), - self.state.clone(), - *self.signer.address(), - ) - .await - .wrap_err("failed to fetch last transaction hash by the bridge account")?; - - let Some(tx_hash) = last_transaction_hash_resp.tx_hash else { - return Ok(None); - }; - - let tx_hash = tendermint::Hash::try_from(tx_hash.to_vec()) - .wrap_err("failed to convert last transaction hash to tendermint hash")?; - - // get the corresponding transaction - let last_transaction = get_tx( - self.sequencer_cometbft_client.clone(), - self.state.clone(), - tx_hash, - ) - .await - .wrap_err("failed to fetch last transaction by the bridge account")?; - - // check that the transaction actually executed - ensure!( - last_transaction.tx_result.code == tendermint::abci::Code::Ok, - "last transaction by the bridge account failed to execute. this should not happen in \ - the sequencer logic." - ); - - let proto_tx = - astria_core::generated::protocol::transaction::v1alpha1::SignedTransaction::decode( - &*last_transaction.tx, - ) - .wrap_err("failed to convert transaction data from CometBFT to proto")?; - - let tx = SignedTransaction::try_from_raw(proto_tx) - .wrap_err("failed to convert transaction data from proto to SignedTransaction")?; - - info!( - last_bridge_account_tx.hash = %telemetry::display::hex(&tx_hash), - last_bridge_account_tx.height = i64::from(last_transaction.height), - "fetched last transaction by the bridge account" - ); - - Ok(Some(tx)) - } } async fn process_batch( @@ -507,232 +312,3 @@ async fn submit_tx( res } - -#[instrument(skip_all)] -async fn get_sequencer_chain_id( - client: sequencer_client::HttpClient, - state: Arc, -) -> eyre::Result { - use sequencer_client::Client as _; - - let retry_config = tryhard::RetryFutureConfig::new(u32::MAX) - .exponential_backoff(Duration::from_millis(100)) - .max_delay(Duration::from_secs(20)) - .on_retry( - |attempt: u32, next_delay: Option, error: &tendermint_rpc::Error| { - let state = Arc::clone(&state); - state.set_sequencer_connected(false); - - let wait_duration = next_delay - .map(humantime::format_duration) - .map(tracing::field::display); - warn!( - attempt, - wait_duration, - error = error as &dyn std::error::Error, - "attempt to fetch sequencer genesis info; retrying after backoff", - ); - futures::future::ready(()) - }, - ); - - let genesis: tendermint::Genesis = tryhard::retry_fn(|| client.genesis()) - .with_config(retry_config) - .await - .wrap_err("failed to get genesis info from Sequencer after a lot of attempts")?; - - state.set_sequencer_connected(true); - - Ok(genesis.chain_id) -} - -#[instrument(skip_all)] -async fn get_allowed_fee_assets( - client: sequencer_client::HttpClient, - state: Arc, -) -> eyre::Result { - let retry_config = tryhard::RetryFutureConfig::new(u32::MAX) - .exponential_backoff(Duration::from_millis(100)) - .max_delay(Duration::from_secs(20)) - .on_retry( - |attempt: u32, - next_delay: Option, - error: &sequencer_client::extension_trait::Error| { - let state = Arc::clone(&state); - state.set_sequencer_connected(false); - - let wait_duration = next_delay - .map(humantime::format_duration) - .map(tracing::field::display); - warn!( - attempt, - wait_duration, - error = error as &dyn std::error::Error, - "attempt to fetch sequencer allowed fee asset ids; retrying after backoff", - ); - futures::future::ready(()) - }, - ); - - let res = tryhard::retry_fn(|| client.get_allowed_fee_assets()) - .with_config(retry_config) - .await - .wrap_err("failed to get allowed fee asset ids from Sequencer after a lot of attempts"); - - state.set_sequencer_connected(res.is_ok()); - - res -} - -#[instrument(skip_all)] -async fn get_latest_balance( - client: sequencer_client::HttpClient, - state: Arc, - address: Address, -) -> eyre::Result { - let retry_config = tryhard::RetryFutureConfig::new(u32::MAX) - .exponential_backoff(Duration::from_millis(100)) - .max_delay(Duration::from_secs(20)) - .on_retry( - |attempt: u32, - next_delay: Option, - error: &sequencer_client::extension_trait::Error| { - let state = Arc::clone(&state); - state.set_sequencer_connected(false); - - let wait_duration = next_delay - .map(humantime::format_duration) - .map(tracing::field::display); - warn!( - attempt, - wait_duration, - error = error as &dyn std::error::Error, - "attempt to get latest balance; retrying after backoff", - ); - futures::future::ready(()) - }, - ); - - let res = tryhard::retry_fn(|| client.get_latest_balance(address)) - .with_config(retry_config) - .await - .wrap_err("failed to get latest balance from Sequencer after a lot of attempts"); - - state.set_sequencer_connected(res.is_ok()); - - res -} - -#[instrument(skip_all)] -async fn get_bridge_account_last_transaction_hash( - client: sequencer_client::HttpClient, - state: Arc, - address: Address, -) -> eyre::Result { - let retry_config = tryhard::RetryFutureConfig::new(u32::MAX) - .exponential_backoff(Duration::from_millis(100)) - .max_delay(Duration::from_secs(20)) - .on_retry( - |attempt: u32, - next_delay: Option, - error: &sequencer_client::extension_trait::Error| { - let state = Arc::clone(&state); - state.set_sequencer_connected(false); - - let wait_duration = next_delay - .map(humantime::format_duration) - .map(tracing::field::display); - warn!( - attempt, - wait_duration, - error = error as &dyn std::error::Error, - "attempt to fetch last bridge account's transaction hash; retrying after \ - backoff", - ); - futures::future::ready(()) - }, - ); - - let res = tryhard::retry_fn(|| client.get_bridge_account_last_transaction_hash(address)) - .with_config(retry_config) - .await - .wrap_err( - "failed to fetch last bridge account's transaction hash from Sequencer after a lot of \ - attempts", - ); - - state.set_sequencer_connected(res.is_ok()); - - res -} - -#[instrument(skip_all)] -async fn get_tx( - client: sequencer_client::HttpClient, - state: Arc, - tx_hash: tendermint::Hash, -) -> eyre::Result { - let retry_config = tryhard::RetryFutureConfig::new(u32::MAX) - .exponential_backoff(Duration::from_millis(100)) - .max_delay(Duration::from_secs(20)) - .on_retry( - |attempt: u32, next_delay: Option, error: &tendermint_rpc::Error| { - let state = Arc::clone(&state); - state.set_sequencer_connected(false); - - let wait_duration = next_delay - .map(humantime::format_duration) - .map(tracing::field::display); - warn!( - attempt, - wait_duration, - error = error as &dyn std::error::Error, - "attempt to get transaction from Sequencer; retrying after backoff", - ); - futures::future::ready(()) - }, - ); - - let res = tryhard::retry_fn(|| client.tx(tx_hash, false)) - .with_config(retry_config) - .await - .wrap_err("failed to get transaction from Sequencer after a lot of attempts"); - - state.set_sequencer_connected(res.is_ok()); - - res -} - -fn rollup_height_from_signed_transaction( - signed_transaction: &SignedTransaction, -) -> eyre::Result { - // find the last batch's rollup block height - let withdrawal_action = signed_transaction - .actions() - .iter() - .find(|action| matches!(action, Action::BridgeUnlock(_) | Action::Ics20Withdrawal(_))) - .ok_or_eyre("last transaction by the bridge account did not contain a withdrawal action")?; - - let last_batch_rollup_height = match withdrawal_action { - Action::BridgeUnlock(action) => { - let memo: bridge::UnlockMemo = serde_json::from_slice(&action.memo) - .wrap_err("failed to parse memo from last transaction by the bridge account")?; - Some(memo.block_number) - } - Action::Ics20Withdrawal(action) => { - let memo: Ics20WithdrawalFromRollupMemo = serde_json::from_str(&action.memo) - .wrap_err("failed to parse memo from last transaction by the bridge account")?; - Some(memo.block_number) - } - _ => None, - } - .expect("action is already checked to be either BridgeUnlock or Ics20Withdrawal"); - - info!( - last_batch.tx_hash = %telemetry::display::hex(&signed_transaction.sha256_of_proto_encoding()), - last_batch.rollup_height = last_batch_rollup_height, - "extracted rollup height from last batch of withdrawals", - ); - - Ok(last_batch_rollup_height) -} diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs index eb99bc63b1..edc2615b6c 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs @@ -1,5 +1,4 @@ use std::{ - collections::HashMap, io::Write as _, sync::Arc, time::Duration, @@ -11,26 +10,18 @@ use astria_core::{ self, Ics20WithdrawalFromRollupMemo, }, - crypto::SigningKey, generated::protocol::account::v1alpha1::NonceResponse, primitive::v1::asset, - protocol::{ - account::v1alpha1::AssetBalance, - bridge::v1alpha1::BridgeAccountLastTxHashResponse, - transaction::v1alpha1::{ - action::{ - BridgeUnlockAction, - Ics20Withdrawal, - }, - Action, - TransactionParams, - UnsignedTransaction, + protocol::transaction::v1alpha1::{ + action::{ + BridgeUnlockAction, + Ics20Withdrawal, }, + Action, }, }; use astria_eyre::eyre::{ self, - Context, }; use ibc_types::core::client::Height as IbcHeight; use once_cell::sync::Lazy; @@ -46,18 +37,13 @@ use serde_json::json; use tempfile::NamedTempFile; use tendermint::{ abci::{ - self, response::CheckTx, types::ExecTxResult, }, block::Height, - chain, }; use tendermint_rpc::{ - endpoint::{ - broadcast::tx_sync, - tx, - }, + endpoint::broadcast::tx_sync, request, }; use tokio::task::JoinHandle; @@ -79,6 +65,7 @@ use super::Submitter; use crate::{ bridge_withdrawer::{ batch::Batch, + startup, state, submitter, }, @@ -87,8 +74,6 @@ use crate::{ const SEQUENCER_CHAIN_ID: &str = "test_sequencer-1000"; const DEFAULT_LAST_ROLLUP_HEIGHT: u64 = 1; -const DEFAULT_LAST_SEQUENCER_HEIGHT: u64 = 0; -const DEFAULT_SEQUENCER_NONCE: u32 = 0; const DEFAULT_IBC_DENOM: &str = "transfer/channel-0/utia"; fn default_native_asset() -> asset::Denom { @@ -141,22 +126,19 @@ impl TestSubmitter { let cometbft_mock = MockServer::start().await; let sequencer_cometbft_endpoint = format!("http://{}", cometbft_mock.address()); - // withdrawer state let state = Arc::new(state::State::new()); - // not testing watcher here so just set it to ready + let startup_handle = startup::InfoHandle::new(state.subscribe()); state.set_watcher_ready(); let metrics = Box::leak(Box::new(Metrics::new())); let (submitter, submitter_handle) = submitter::Builder { shutdown_token: shutdown_token.clone(), + startup_handle, sequencer_key_path, sequencer_address_prefix: "astria".into(), - sequencer_chain_id: SEQUENCER_CHAIN_ID.to_string(), sequencer_cometbft_endpoint, state, - expected_fee_asset: default_native_asset(), - min_expected_fee_asset_balance: 1_000_000, metrics, } .build() @@ -170,23 +152,18 @@ impl TestSubmitter { } } - async fn startup_and_spawn_with_guards(&mut self, startup_guards: HashMap) { + async fn startup(&mut self) { let submitter = self.submitter.take().unwrap(); let mut state = submitter.state.subscribe(); - self.submitter_task_handle = Some(tokio::spawn(submitter.run())); - - // wait for all startup guards to be satisfied - for (name, guard) in startup_guards { - tokio::time::timeout(Duration::from_millis(100), guard.wait_until_satisfied()) - .await - .wrap_err(format!("{name} guard not satisfied in time.")) - .unwrap(); - } + submitter.state.set_startup_info(startup::Info { + fee_asset: "fee-asset".parse::().unwrap(), + starting_rollup_height: 1, + chain_id: SEQUENCER_CHAIN_ID.to_string(), + }); - // consume the startup info in place of the watcher - self.submitter_handle.recv_startup_info().await.unwrap(); + self.submitter_task_handle = Some(tokio::spawn(submitter.run())); // wait for the submitter to be ready state @@ -195,85 +172,13 @@ impl TestSubmitter { .unwrap(); } - async fn startup_and_spawn(&mut self) { - let startup_guards = register_startup_guards(&self.cometbft_mock).await; - let sync_guards = register_sync_guards(&self.cometbft_mock).await; - self.startup_and_spawn_with_guards( - startup_guards - .into_iter() - .chain(sync_guards.into_iter()) - .collect(), - ) - .await; - } - async fn spawn() -> Self { let mut submitter = Self::setup().await; - submitter.startup_and_spawn().await; + submitter.startup().await; submitter } } -async fn register_default_chain_id_guard(cometbft_mock: &MockServer) -> MockGuard { - register_genesis_chain_id_response(SEQUENCER_CHAIN_ID, cometbft_mock).await -} - -async fn register_default_fee_assets_guard(cometbft_mock: &MockServer) -> MockGuard { - let fee_assets = vec![default_native_asset()]; - register_allowed_fee_assets_response(fee_assets, cometbft_mock).await -} - -async fn register_default_min_expected_fee_asset_balance_guard( - cometbft_mock: &MockServer, -) -> MockGuard { - register_get_latest_balance( - vec![AssetBalance { - denom: default_native_asset(), - balance: 1_000_000u128, - }], - cometbft_mock, - ) - .await -} - -async fn register_default_last_bridge_tx_hash_guard(cometbft_mock: &MockServer) -> MockGuard { - register_last_bridge_tx_hash_guard(cometbft_mock, make_last_bridge_tx_hash_response()).await -} - -async fn register_default_last_bridge_tx_guard(cometbft_mock: &MockServer) -> MockGuard { - register_tx_guard(cometbft_mock, make_tx_response()).await -} - -async fn register_startup_guards(cometbft_mock: &MockServer) -> HashMap { - HashMap::from([ - ( - "chain_id".to_string(), - register_default_chain_id_guard(cometbft_mock).await, - ), - ( - "fee_assets".to_string(), - register_default_fee_assets_guard(cometbft_mock).await, - ), - ( - "min_expected_fee_asset_balance".to_string(), - register_default_min_expected_fee_asset_balance_guard(cometbft_mock).await, - ), - ]) -} - -async fn register_sync_guards(cometbft_mock: &MockServer) -> HashMap { - HashMap::from([ - ( - "tx_hash".to_string(), - register_default_last_bridge_tx_hash_guard(cometbft_mock).await, - ), - ( - "last_bridge_tx".to_string(), - register_default_last_bridge_tx_guard(cometbft_mock).await, - ), - ]) -} - fn make_ics20_withdrawal_action() -> Action { let denom = DEFAULT_IBC_DENOM.parse::().unwrap(); let destination_chain_address = "address".to_string(); @@ -355,47 +260,6 @@ fn make_tx_commit_deliver_tx_failure_response() -> tx_commit::Response { } } -fn make_last_bridge_tx_hash_response() -> BridgeAccountLastTxHashResponse { - BridgeAccountLastTxHashResponse { - height: DEFAULT_LAST_ROLLUP_HEIGHT, - tx_hash: Some([0u8; 32]), - } -} - -fn make_signed_bridge_transaction() -> SignedTransaction { - let alice_secret_bytes: [u8; 32] = - hex::decode("2bd806c97f0e00af1a1fc3328fa763a9269723c8db8fac4f93af71db186d6e90") - .unwrap() - .try_into() - .unwrap(); - let alice_key = SigningKey::from(alice_secret_bytes); - - let actions = vec![make_bridge_unlock_action(), make_ics20_withdrawal_action()]; - UnsignedTransaction { - params: TransactionParams::builder() - .nonce(DEFAULT_SEQUENCER_NONCE) - .chain_id(SEQUENCER_CHAIN_ID) - .build(), - actions, - } - .into_signed(&alice_key) -} - -fn make_tx_response() -> tx::Response { - let tx = make_signed_bridge_transaction(); - tx::Response { - hash: tx.sha256_of_proto_encoding().to_vec().try_into().unwrap(), - height: DEFAULT_LAST_SEQUENCER_HEIGHT.try_into().unwrap(), - index: 0, - tx_result: ExecTxResult { - code: abci::Code::Ok, - ..ExecTxResult::default() - }, - tx: tx.into_raw().encode_to_vec(), - proof: None, - } -} - /// Convert a `Request` object to a `SignedTransaction` fn signed_tx_from_request(request: &Request) -> SignedTransaction { use astria_core::generated::protocol::transaction::v1alpha1::SignedTransaction as RawSignedTransaction; @@ -413,139 +277,6 @@ fn signed_tx_from_request(request: &Request) -> SignedTransaction { signed_tx } -async fn register_genesis_chain_id_response(chain_id: &str, server: &MockServer) -> MockGuard { - use tendermint::{ - consensus::{ - params::{ - AbciParams, - ValidatorParams, - }, - Params, - }, - genesis::Genesis, - time::Time, - }; - let response = tendermint_rpc::endpoint::genesis::Response:: { - genesis: Genesis { - genesis_time: Time::from_unix_timestamp(1, 1).unwrap(), - chain_id: chain::Id::try_from(chain_id).unwrap(), - initial_height: 1, - consensus_params: Params { - block: tendermint::block::Size { - max_bytes: 1024, - max_gas: 1024, - time_iota_ms: 1000, - }, - evidence: tendermint::evidence::Params { - max_age_num_blocks: 1000, - max_age_duration: tendermint::evidence::Duration(Duration::from_secs(3600)), - max_bytes: 1_048_576, - }, - validator: ValidatorParams { - pub_key_types: vec![tendermint::public_key::Algorithm::Ed25519], - }, - version: None, - abci: AbciParams::default(), - }, - validators: vec![], - app_hash: tendermint::hash::AppHash::default(), - app_state: serde_json::Value::Null, - }, - }; - - let wrapper = response::Wrapper::new_with_id(tendermint_rpc::Id::Num(1), Some(response), None); - Mock::given(body_partial_json(json!({"method": "genesis"}))) - .respond_with( - ResponseTemplate::new(200) - .set_body_json(&wrapper) - .append_header("Content-Type", "application/json"), - ) - .up_to_n_times(1) - .expect(1) - .mount_as_scoped(server) - .await -} - -async fn register_allowed_fee_assets_response( - fee_assets: Vec, - cometbft_mock: &MockServer, -) -> MockGuard { - let response = tendermint_rpc::endpoint::abci_query::Response { - response: tendermint_rpc::endpoint::abci_query::AbciQuery { - value: astria_core::protocol::asset::v1alpha1::AllowedFeeAssetsResponse { - fee_assets, - height: 1, - } - .into_raw() - .encode_to_vec(), - ..Default::default() - }, - }; - let wrapper = response::Wrapper::new_with_id(tendermint_rpc::Id::Num(1), Some(response), None); - Mock::given(body_partial_json(json!({"method": "abci_query"}))) - .and(body_string_contains("asset/allowed_fee_assets")) - .respond_with( - ResponseTemplate::new(200) - .set_body_json(&wrapper) - .append_header("Content-Type", "application/json"), - ) - .expect(1) - .mount_as_scoped(cometbft_mock) - .await -} - -async fn register_get_latest_balance( - balances: Vec, - server: &MockServer, -) -> MockGuard { - let response = tendermint_rpc::endpoint::abci_query::Response { - response: tendermint_rpc::endpoint::abci_query::AbciQuery { - value: astria_core::protocol::account::v1alpha1::BalanceResponse { - balances, - height: 1, - } - .into_raw() - .encode_to_vec(), - ..Default::default() - }, - }; - - let wrapper = response::Wrapper::new_with_id(tendermint_rpc::Id::Num(1), Some(response), None); - Mock::given(body_partial_json(json!({"method": "abci_query"}))) - .and(body_string_contains("accounts/balance")) - .respond_with( - ResponseTemplate::new(200) - .set_body_json(&wrapper) - .append_header("Content-Type", "application/json"), - ) - .expect(1) - .mount_as_scoped(server) - .await -} - -async fn register_last_bridge_tx_hash_guard( - server: &MockServer, - response: BridgeAccountLastTxHashResponse, -) -> MockGuard { - let response = tendermint_rpc::endpoint::abci_query::Response { - response: tendermint_rpc::endpoint::abci_query::AbciQuery { - value: response.into_raw().encode_to_vec(), - ..Default::default() - }, - }; - let wrapper = response::Wrapper::new_with_id(tendermint_rpc::Id::Num(1), Some(response), None); - Mock::given(body_partial_json(json!({"method": "abci_query"}))) - .and(body_string_contains("bridge/account_last_tx_hash")) - .respond_with( - ResponseTemplate::new(200) - .set_body_json(&wrapper) - .append_header("Content-Type", "application/json"), - ) - .expect(1) - .mount_as_scoped(server) - .await -} - async fn register_get_nonce_response(server: &MockServer, response: NonceResponse) -> MockGuard { let response = tendermint_rpc::endpoint::abci_query::Response { response: tendermint_rpc::endpoint::abci_query::AbciQuery { @@ -566,19 +297,6 @@ async fn register_get_nonce_response(server: &MockServer, response: NonceRespons .await } -async fn register_tx_guard(server: &MockServer, response: tx::Response) -> MockGuard { - let wrapper = response::Wrapper::new_with_id(tendermint_rpc::Id::Num(1), Some(response), None); - Mock::given(body_partial_json(json!({"method": "tx"}))) - .respond_with( - ResponseTemplate::new(200) - .set_body_json(&wrapper) - .append_header("Content-Type", "application/json"), - ) - .expect(1) - .mount_as_scoped(server) - .await -} - async fn register_broadcast_tx_commit_response( server: &MockServer, response: tx_commit::Response, diff --git a/crates/astria-bridge-withdrawer/src/config.rs b/crates/astria-bridge-withdrawer/src/config.rs index ad9ed3d1ad..7a2f33f754 100644 --- a/crates/astria-bridge-withdrawer/src/config.rs +++ b/crates/astria-bridge-withdrawer/src/config.rs @@ -18,8 +18,6 @@ pub struct Config { pub sequencer_key_path: String, // The fee asset denomination to use for the bridge account's transactions. pub fee_asset_denomination: asset::Denom, - // The minimum expected balance of the fee asset in the bridge account. - pub min_expected_fee_asset_balance: u64, // The asset denomination being withdrawn from the rollup. pub rollup_asset_denomination: String, // The bridge address corresponding to the bridged rollup asset on the sequencer. From f69306f3e92513cd925cc25b21cc0192c4bc7528 Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Tue, 9 Jul 2024 14:34:22 +0200 Subject: [PATCH 07/24] feat(core)!: lowerCamelCase for protobuf json mapping (#1250) ## Summary Use `lowerCamelCase` for protobuf message field names mapped to JSON object keys. ## Background The Protobuf to JSON mapping prescribes that protobuf message fields must use `lowerCamelCase` when mapped to JSON object keys. The protobuf-compiler tool was erroneously preserving the Rust field names which re `snake_case`. ## Changes - Remove `pbson_build::Builder::preserve_proto_field_names` from its build chain - Regenerate serde `Serialize` and `Deserialize` impls for all protobuf Rust types - Fix Conductor test mocks to use the correct case ## Testing All tests still run. ## Breaking Changelist This is neither network breaking nor service breaking because the Astria stack only uses protobuf (not its JSON counterparts) on the wire. However, this is still marked as breaking due to external consumers being affected. --- .../tests/blackbox/helpers/macros.rs | 4 +- .../astria.execution.v1alpha2.serde.rs | 18 +++---- .../generated/astria.primitive.v1.serde.rs | 8 +-- .../astria.sequencerblock.v1alpha1.serde.rs | 50 +++++++++---------- .../src/generated/celestia.blob.v1.serde.rs | 10 ++-- .../generated/cosmos.auth.v1beta1.serde.rs | 14 +++--- .../cosmos.base.abci.v1beta1.serde.rs | 8 +-- .../cosmos.base.node.v1beta1.serde.rs | 2 +- .../cosmos.base.tendermint.v1beta1.serde.rs | 16 +++--- .../cosmos.crypto.multisig.v1beta1.serde.rs | 2 +- .../src/generated/cosmos.tx.v1beta1.serde.rs | 32 ++++++------ .../src/generated/tendermint.p2p.serde.rs | 10 ++-- .../src/generated/tendermint.types.serde.rs | 8 +-- tools/protobuf-compiler/src/main.rs | 1 - 14 files changed, 91 insertions(+), 92 deletions(-) diff --git a/crates/astria-conductor/tests/blackbox/helpers/macros.rs b/crates/astria-conductor/tests/blackbox/helpers/macros.rs index 4f6779c7d0..5191e6b878 100644 --- a/crates/astria-conductor/tests/blackbox/helpers/macros.rs +++ b/crates/astria-conductor/tests/blackbox/helpers/macros.rs @@ -241,8 +241,8 @@ macro_rules! mount_executed_block { $test_env.mount_execute_block( $mock_name.into(), ::serde_json::json!({ - "prev_block_hash": BASE64_STANDARD.encode($parent), - "transactions": [{"sequenced_data": BASE64_STANDARD.encode($crate::helpers::data())}], + "prevBlockHash": BASE64_STANDARD.encode($parent), + "transactions": [{"sequencedData": BASE64_STANDARD.encode($crate::helpers::data())}], }), $crate::block!( number: $number, diff --git a/crates/astria-core/src/generated/astria.execution.v1alpha2.serde.rs b/crates/astria-core/src/generated/astria.execution.v1alpha2.serde.rs index 90671b8292..9fd7ef9815 100644 --- a/crates/astria-core/src/generated/astria.execution.v1alpha2.serde.rs +++ b/crates/astria-core/src/generated/astria.execution.v1alpha2.serde.rs @@ -210,7 +210,7 @@ impl serde::Serialize for Block { } if !self.parent_block_hash.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("parent_block_hash", pbjson::private::base64::encode(&self.parent_block_hash).as_str())?; + struct_ser.serialize_field("parentBlockHash", pbjson::private::base64::encode(&self.parent_block_hash).as_str())?; } if let Some(v) = self.timestamp.as_ref() { struct_ser.serialize_field("timestamp", v)?; @@ -346,11 +346,11 @@ impl serde::Serialize for BlockIdentifier { if let Some(v) = self.identifier.as_ref() { match v { block_identifier::Identifier::BlockNumber(v) => { - struct_ser.serialize_field("block_number", v)?; + struct_ser.serialize_field("blockNumber", v)?; } block_identifier::Identifier::BlockHash(v) => { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("block_hash", pbjson::private::base64::encode(&v).as_str())?; + struct_ser.serialize_field("blockHash", pbjson::private::base64::encode(&v).as_str())?; } } } @@ -467,7 +467,7 @@ impl serde::Serialize for CommitmentState { } if self.base_celestia_height != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("base_celestia_height", ToString::to_string(&self.base_celestia_height).as_str())?; + struct_ser.serialize_field("baseCelestiaHeight", ToString::to_string(&self.base_celestia_height).as_str())?; } struct_ser.end() } @@ -590,7 +590,7 @@ impl serde::Serialize for ExecuteBlockRequest { let mut struct_ser = serializer.serialize_struct("astria.execution.v1alpha2.ExecuteBlockRequest", len)?; if !self.prev_block_hash.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("prev_block_hash", pbjson::private::base64::encode(&self.prev_block_hash).as_str())?; + struct_ser.serialize_field("prevBlockHash", pbjson::private::base64::encode(&self.prev_block_hash).as_str())?; } if !self.transactions.is_empty() { struct_ser.serialize_field("transactions", &self.transactions)?; @@ -719,14 +719,14 @@ impl serde::Serialize for GenesisInfo { let mut struct_ser = serializer.serialize_struct("astria.execution.v1alpha2.GenesisInfo", len)?; if !self.rollup_id.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("rollup_id", pbjson::private::base64::encode(&self.rollup_id).as_str())?; + struct_ser.serialize_field("rollupId", pbjson::private::base64::encode(&self.rollup_id).as_str())?; } if self.sequencer_genesis_block_height != 0 { - struct_ser.serialize_field("sequencer_genesis_block_height", &self.sequencer_genesis_block_height)?; + struct_ser.serialize_field("sequencerGenesisBlockHeight", &self.sequencer_genesis_block_height)?; } if self.celestia_block_variance != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("celestia_block_variance", ToString::to_string(&self.celestia_block_variance).as_str())?; + struct_ser.serialize_field("celestiaBlockVariance", ToString::to_string(&self.celestia_block_variance).as_str())?; } struct_ser.end() } @@ -1081,7 +1081,7 @@ impl serde::Serialize for UpdateCommitmentStateRequest { } let mut struct_ser = serializer.serialize_struct("astria.execution.v1alpha2.UpdateCommitmentStateRequest", len)?; if let Some(v) = self.commitment_state.as_ref() { - struct_ser.serialize_field("commitment_state", v)?; + struct_ser.serialize_field("commitmentState", v)?; } struct_ser.end() } diff --git a/crates/astria-core/src/generated/astria.primitive.v1.serde.rs b/crates/astria-core/src/generated/astria.primitive.v1.serde.rs index 0f75ad9029..99febc360d 100644 --- a/crates/astria-core/src/generated/astria.primitive.v1.serde.rs +++ b/crates/astria-core/src/generated/astria.primitive.v1.serde.rs @@ -109,7 +109,7 @@ impl serde::Serialize for Denom { struct_ser.serialize_field("id", pbjson::private::base64::encode(&self.id).as_str())?; } if !self.base_denom.is_empty() { - struct_ser.serialize_field("base_denom", &self.base_denom)?; + struct_ser.serialize_field("baseDenom", &self.base_denom)?; } struct_ser.end() } @@ -221,15 +221,15 @@ impl serde::Serialize for Proof { let mut struct_ser = serializer.serialize_struct("astria.primitive.v1.Proof", len)?; if !self.audit_path.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("audit_path", pbjson::private::base64::encode(&self.audit_path).as_str())?; + struct_ser.serialize_field("auditPath", pbjson::private::base64::encode(&self.audit_path).as_str())?; } if self.leaf_index != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("leaf_index", ToString::to_string(&self.leaf_index).as_str())?; + struct_ser.serialize_field("leafIndex", ToString::to_string(&self.leaf_index).as_str())?; } if self.tree_size != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("tree_size", ToString::to_string(&self.tree_size).as_str())?; + struct_ser.serialize_field("treeSize", ToString::to_string(&self.tree_size).as_str())?; } struct_ser.end() } diff --git a/crates/astria-core/src/generated/astria.sequencerblock.v1alpha1.serde.rs b/crates/astria-core/src/generated/astria.sequencerblock.v1alpha1.serde.rs index 4a4e811243..f59a624817 100644 --- a/crates/astria-core/src/generated/astria.sequencerblock.v1alpha1.serde.rs +++ b/crates/astria-core/src/generated/astria.sequencerblock.v1alpha1.serde.rs @@ -23,10 +23,10 @@ impl serde::Serialize for Deposit { } let mut struct_ser = serializer.serialize_struct("astria.sequencerblock.v1alpha1.Deposit", len)?; if let Some(v) = self.bridge_address.as_ref() { - struct_ser.serialize_field("bridge_address", v)?; + struct_ser.serialize_field("bridgeAddress", v)?; } if let Some(v) = self.rollup_id.as_ref() { - struct_ser.serialize_field("rollup_id", v)?; + struct_ser.serialize_field("rollupId", v)?; } if let Some(v) = self.amount.as_ref() { struct_ser.serialize_field("amount", v)?; @@ -35,7 +35,7 @@ impl serde::Serialize for Deposit { struct_ser.serialize_field("asset", &self.asset)?; } if !self.destination_chain_address.is_empty() { - struct_ser.serialize_field("destination_chain_address", &self.destination_chain_address)?; + struct_ser.serialize_field("destinationChainAddress", &self.destination_chain_address)?; } struct_ser.end() } @@ -189,22 +189,22 @@ impl serde::Serialize for FilteredSequencerBlock { let mut struct_ser = serializer.serialize_struct("astria.sequencerblock.v1alpha1.FilteredSequencerBlock", len)?; if !self.block_hash.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("block_hash", pbjson::private::base64::encode(&self.block_hash).as_str())?; + struct_ser.serialize_field("blockHash", pbjson::private::base64::encode(&self.block_hash).as_str())?; } if let Some(v) = self.header.as_ref() { struct_ser.serialize_field("header", v)?; } if !self.rollup_transactions.is_empty() { - struct_ser.serialize_field("rollup_transactions", &self.rollup_transactions)?; + struct_ser.serialize_field("rollupTransactions", &self.rollup_transactions)?; } if let Some(v) = self.rollup_transactions_proof.as_ref() { - struct_ser.serialize_field("rollup_transactions_proof", v)?; + struct_ser.serialize_field("rollupTransactionsProof", v)?; } if !self.all_rollup_ids.is_empty() { - struct_ser.serialize_field("all_rollup_ids", &self.all_rollup_ids.iter().map(pbjson::private::base64::encode).collect::>())?; + struct_ser.serialize_field("allRollupIds", &self.all_rollup_ids.iter().map(pbjson::private::base64::encode).collect::>())?; } if let Some(v) = self.rollup_ids_proof.as_ref() { - struct_ser.serialize_field("rollup_ids_proof", v)?; + struct_ser.serialize_field("rollupIdsProof", v)?; } struct_ser.end() } @@ -367,7 +367,7 @@ impl serde::Serialize for GetFilteredSequencerBlockRequest { struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if !self.rollup_ids.is_empty() { - struct_ser.serialize_field("rollup_ids", &self.rollup_ids)?; + struct_ser.serialize_field("rollupIds", &self.rollup_ids)?; } struct_ser.end() } @@ -753,7 +753,7 @@ impl serde::Serialize for RollupData { match v { rollup_data::Value::SequencedData(v) => { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("sequenced_data", pbjson::private::base64::encode(&v).as_str())?; + struct_ser.serialize_field("sequencedData", pbjson::private::base64::encode(&v).as_str())?; } rollup_data::Value::Deposit(v) => { struct_ser.serialize_field("deposit", v)?; @@ -866,7 +866,7 @@ impl serde::Serialize for RollupTransactions { } let mut struct_ser = serializer.serialize_struct("astria.sequencerblock.v1alpha1.RollupTransactions", len)?; if let Some(v) = self.rollup_id.as_ref() { - struct_ser.serialize_field("rollup_id", v)?; + struct_ser.serialize_field("rollupId", v)?; } if !self.transactions.is_empty() { struct_ser.serialize_field("transactions", &self.transactions.iter().map(pbjson::private::base64::encode).collect::>())?; @@ -1004,17 +1004,17 @@ impl serde::Serialize for SequencerBlock { struct_ser.serialize_field("header", v)?; } if !self.rollup_transactions.is_empty() { - struct_ser.serialize_field("rollup_transactions", &self.rollup_transactions)?; + struct_ser.serialize_field("rollupTransactions", &self.rollup_transactions)?; } if let Some(v) = self.rollup_transactions_proof.as_ref() { - struct_ser.serialize_field("rollup_transactions_proof", v)?; + struct_ser.serialize_field("rollupTransactionsProof", v)?; } if let Some(v) = self.rollup_ids_proof.as_ref() { - struct_ser.serialize_field("rollup_ids_proof", v)?; + struct_ser.serialize_field("rollupIdsProof", v)?; } if !self.block_hash.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("block_hash", pbjson::private::base64::encode(&self.block_hash).as_str())?; + struct_ser.serialize_field("blockHash", pbjson::private::base64::encode(&self.block_hash).as_str())?; } struct_ser.end() } @@ -1170,7 +1170,7 @@ impl serde::Serialize for SequencerBlockHeader { } let mut struct_ser = serializer.serialize_struct("astria.sequencerblock.v1alpha1.SequencerBlockHeader", len)?; if !self.chain_id.is_empty() { - struct_ser.serialize_field("chain_id", &self.chain_id)?; + struct_ser.serialize_field("chainId", &self.chain_id)?; } if self.height != 0 { #[allow(clippy::needless_borrow)] @@ -1181,15 +1181,15 @@ impl serde::Serialize for SequencerBlockHeader { } if !self.data_hash.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("data_hash", pbjson::private::base64::encode(&self.data_hash).as_str())?; + struct_ser.serialize_field("dataHash", pbjson::private::base64::encode(&self.data_hash).as_str())?; } if !self.proposer_address.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("proposer_address", pbjson::private::base64::encode(&self.proposer_address).as_str())?; + struct_ser.serialize_field("proposerAddress", pbjson::private::base64::encode(&self.proposer_address).as_str())?; } if !self.rollup_transactions_root.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("rollup_transactions_root", pbjson::private::base64::encode(&self.rollup_transactions_root).as_str())?; + struct_ser.serialize_field("rollupTransactionsRoot", pbjson::private::base64::encode(&self.rollup_transactions_root).as_str())?; } struct_ser.end() } @@ -1360,19 +1360,19 @@ impl serde::Serialize for SubmittedMetadata { let mut struct_ser = serializer.serialize_struct("astria.sequencerblock.v1alpha1.SubmittedMetadata", len)?; if !self.block_hash.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("block_hash", pbjson::private::base64::encode(&self.block_hash).as_str())?; + struct_ser.serialize_field("blockHash", pbjson::private::base64::encode(&self.block_hash).as_str())?; } if let Some(v) = self.header.as_ref() { struct_ser.serialize_field("header", v)?; } if !self.rollup_ids.is_empty() { - struct_ser.serialize_field("rollup_ids", &self.rollup_ids)?; + struct_ser.serialize_field("rollupIds", &self.rollup_ids)?; } if let Some(v) = self.rollup_transactions_proof.as_ref() { - struct_ser.serialize_field("rollup_transactions_proof", v)?; + struct_ser.serialize_field("rollupTransactionsProof", v)?; } if let Some(v) = self.rollup_ids_proof.as_ref() { - struct_ser.serialize_field("rollup_ids_proof", v)?; + struct_ser.serialize_field("rollupIdsProof", v)?; } struct_ser.end() } @@ -1614,10 +1614,10 @@ impl serde::Serialize for SubmittedRollupData { let mut struct_ser = serializer.serialize_struct("astria.sequencerblock.v1alpha1.SubmittedRollupData", len)?; if !self.sequencer_block_hash.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("sequencer_block_hash", pbjson::private::base64::encode(&self.sequencer_block_hash).as_str())?; + struct_ser.serialize_field("sequencerBlockHash", pbjson::private::base64::encode(&self.sequencer_block_hash).as_str())?; } if let Some(v) = self.rollup_id.as_ref() { - struct_ser.serialize_field("rollup_id", v)?; + struct_ser.serialize_field("rollupId", v)?; } if !self.transactions.is_empty() { struct_ser.serialize_field("transactions", &self.transactions.iter().map(pbjson::private::base64::encode).collect::>())?; diff --git a/crates/astria-core/src/generated/celestia.blob.v1.serde.rs b/crates/astria-core/src/generated/celestia.blob.v1.serde.rs index 954bd364db..8caa2e9da2 100644 --- a/crates/astria-core/src/generated/celestia.blob.v1.serde.rs +++ b/crates/astria-core/src/generated/celestia.blob.v1.serde.rs @@ -29,13 +29,13 @@ impl serde::Serialize for MsgPayForBlobs { struct_ser.serialize_field("namespaces", &self.namespaces.iter().map(pbjson::private::base64::encode).collect::>())?; } if !self.blob_sizes.is_empty() { - struct_ser.serialize_field("blob_sizes", &self.blob_sizes)?; + struct_ser.serialize_field("blobSizes", &self.blob_sizes)?; } if !self.share_commitments.is_empty() { - struct_ser.serialize_field("share_commitments", &self.share_commitments.iter().map(pbjson::private::base64::encode).collect::>())?; + struct_ser.serialize_field("shareCommitments", &self.share_commitments.iter().map(pbjson::private::base64::encode).collect::>())?; } if !self.share_versions.is_empty() { - struct_ser.serialize_field("share_versions", &self.share_versions)?; + struct_ser.serialize_field("shareVersions", &self.share_versions)?; } struct_ser.end() } @@ -188,11 +188,11 @@ impl serde::Serialize for Params { } let mut struct_ser = serializer.serialize_struct("celestia.blob.v1.Params", len)?; if self.gas_per_blob_byte != 0 { - struct_ser.serialize_field("gas_per_blob_byte", &self.gas_per_blob_byte)?; + struct_ser.serialize_field("gasPerBlobByte", &self.gas_per_blob_byte)?; } if self.gov_max_square_size != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("gov_max_square_size", ToString::to_string(&self.gov_max_square_size).as_str())?; + struct_ser.serialize_field("govMaxSquareSize", ToString::to_string(&self.gov_max_square_size).as_str())?; } struct_ser.end() } diff --git a/crates/astria-core/src/generated/cosmos.auth.v1beta1.serde.rs b/crates/astria-core/src/generated/cosmos.auth.v1beta1.serde.rs index 7d488b65ee..d0c61a3e98 100644 --- a/crates/astria-core/src/generated/cosmos.auth.v1beta1.serde.rs +++ b/crates/astria-core/src/generated/cosmos.auth.v1beta1.serde.rs @@ -23,11 +23,11 @@ impl serde::Serialize for BaseAccount { struct_ser.serialize_field("address", &self.address)?; } if let Some(v) = self.pub_key.as_ref() { - struct_ser.serialize_field("pub_key", v)?; + struct_ser.serialize_field("pubKey", v)?; } if self.account_number != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("account_number", ToString::to_string(&self.account_number).as_str())?; + struct_ser.serialize_field("accountNumber", ToString::to_string(&self.account_number).as_str())?; } if self.sequence != 0 { #[allow(clippy::needless_borrow)] @@ -174,23 +174,23 @@ impl serde::Serialize for Params { let mut struct_ser = serializer.serialize_struct("cosmos.auth.v1beta1.Params", len)?; if self.max_memo_characters != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("max_memo_characters", ToString::to_string(&self.max_memo_characters).as_str())?; + struct_ser.serialize_field("maxMemoCharacters", ToString::to_string(&self.max_memo_characters).as_str())?; } if self.tx_sig_limit != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("tx_sig_limit", ToString::to_string(&self.tx_sig_limit).as_str())?; + struct_ser.serialize_field("txSigLimit", ToString::to_string(&self.tx_sig_limit).as_str())?; } if self.tx_size_cost_per_byte != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("tx_size_cost_per_byte", ToString::to_string(&self.tx_size_cost_per_byte).as_str())?; + struct_ser.serialize_field("txSizeCostPerByte", ToString::to_string(&self.tx_size_cost_per_byte).as_str())?; } if self.sig_verify_cost_ed25519 != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("sig_verify_cost_ed25519", ToString::to_string(&self.sig_verify_cost_ed25519).as_str())?; + struct_ser.serialize_field("sigVerifyCostEd25519", ToString::to_string(&self.sig_verify_cost_ed25519).as_str())?; } if self.sig_verify_cost_secp256k1 != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("sig_verify_cost_secp256k1", ToString::to_string(&self.sig_verify_cost_secp256k1).as_str())?; + struct_ser.serialize_field("sigVerifyCostSecp256k1", ToString::to_string(&self.sig_verify_cost_secp256k1).as_str())?; } struct_ser.end() } diff --git a/crates/astria-core/src/generated/cosmos.base.abci.v1beta1.serde.rs b/crates/astria-core/src/generated/cosmos.base.abci.v1beta1.serde.rs index 4bf2cc8df6..81b0d79afd 100644 --- a/crates/astria-core/src/generated/cosmos.base.abci.v1beta1.serde.rs +++ b/crates/astria-core/src/generated/cosmos.base.abci.v1beta1.serde.rs @@ -17,7 +17,7 @@ impl serde::Serialize for AbciMessageLog { } let mut struct_ser = serializer.serialize_struct("cosmos.base.abci.v1beta1.ABCIMessageLog", len)?; if self.msg_index != 0 { - struct_ser.serialize_field("msg_index", &self.msg_index)?; + struct_ser.serialize_field("msgIndex", &self.msg_index)?; } if !self.log.is_empty() { struct_ser.serialize_field("log", &self.log)?; @@ -407,7 +407,7 @@ impl serde::Serialize for TxResponse { struct_ser.serialize_field("data", &self.data)?; } if !self.raw_log.is_empty() { - struct_ser.serialize_field("raw_log", &self.raw_log)?; + struct_ser.serialize_field("rawLog", &self.raw_log)?; } if !self.logs.is_empty() { struct_ser.serialize_field("logs", &self.logs)?; @@ -417,11 +417,11 @@ impl serde::Serialize for TxResponse { } if self.gas_wanted != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("gas_wanted", ToString::to_string(&self.gas_wanted).as_str())?; + struct_ser.serialize_field("gasWanted", ToString::to_string(&self.gas_wanted).as_str())?; } if self.gas_used != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("gas_used", ToString::to_string(&self.gas_used).as_str())?; + struct_ser.serialize_field("gasUsed", ToString::to_string(&self.gas_used).as_str())?; } if let Some(v) = self.tx.as_ref() { struct_ser.serialize_field("tx", v)?; diff --git a/crates/astria-core/src/generated/cosmos.base.node.v1beta1.serde.rs b/crates/astria-core/src/generated/cosmos.base.node.v1beta1.serde.rs index e94f573ea3..acfb44f514 100644 --- a/crates/astria-core/src/generated/cosmos.base.node.v1beta1.serde.rs +++ b/crates/astria-core/src/generated/cosmos.base.node.v1beta1.serde.rs @@ -82,7 +82,7 @@ impl serde::Serialize for ConfigResponse { } let mut struct_ser = serializer.serialize_struct("cosmos.base.node.v1beta1.ConfigResponse", len)?; if !self.minimum_gas_price.is_empty() { - struct_ser.serialize_field("minimum_gas_price", &self.minimum_gas_price)?; + struct_ser.serialize_field("minimumGasPrice", &self.minimum_gas_price)?; } struct_ser.end() } diff --git a/crates/astria-core/src/generated/cosmos.base.tendermint.v1beta1.serde.rs b/crates/astria-core/src/generated/cosmos.base.tendermint.v1beta1.serde.rs index 1c8572b0c8..64729253f5 100644 --- a/crates/astria-core/src/generated/cosmos.base.tendermint.v1beta1.serde.rs +++ b/crates/astria-core/src/generated/cosmos.base.tendermint.v1beta1.serde.rs @@ -85,10 +85,10 @@ impl serde::Serialize for GetNodeInfoResponse { } let mut struct_ser = serializer.serialize_struct("cosmos.base.tendermint.v1beta1.GetNodeInfoResponse", len)?; if let Some(v) = self.default_node_info.as_ref() { - struct_ser.serialize_field("default_node_info", v)?; + struct_ser.serialize_field("defaultNodeInfo", v)?; } if let Some(v) = self.application_version.as_ref() { - struct_ser.serialize_field("application_version", v)?; + struct_ser.serialize_field("applicationVersion", v)?; } struct_ser.end() } @@ -341,25 +341,25 @@ impl serde::Serialize for VersionInfo { struct_ser.serialize_field("name", &self.name)?; } if !self.app_name.is_empty() { - struct_ser.serialize_field("app_name", &self.app_name)?; + struct_ser.serialize_field("appName", &self.app_name)?; } if !self.version.is_empty() { struct_ser.serialize_field("version", &self.version)?; } if !self.git_commit.is_empty() { - struct_ser.serialize_field("git_commit", &self.git_commit)?; + struct_ser.serialize_field("gitCommit", &self.git_commit)?; } if !self.build_tags.is_empty() { - struct_ser.serialize_field("build_tags", &self.build_tags)?; + struct_ser.serialize_field("buildTags", &self.build_tags)?; } if !self.go_version.is_empty() { - struct_ser.serialize_field("go_version", &self.go_version)?; + struct_ser.serialize_field("goVersion", &self.go_version)?; } if !self.build_deps.is_empty() { - struct_ser.serialize_field("build_deps", &self.build_deps)?; + struct_ser.serialize_field("buildDeps", &self.build_deps)?; } if !self.cosmos_sdk_version.is_empty() { - struct_ser.serialize_field("cosmos_sdk_version", &self.cosmos_sdk_version)?; + struct_ser.serialize_field("cosmosSdkVersion", &self.cosmos_sdk_version)?; } struct_ser.end() } diff --git a/crates/astria-core/src/generated/cosmos.crypto.multisig.v1beta1.serde.rs b/crates/astria-core/src/generated/cosmos.crypto.multisig.v1beta1.serde.rs index e7b08aac86..5021a9d568 100644 --- a/crates/astria-core/src/generated/cosmos.crypto.multisig.v1beta1.serde.rs +++ b/crates/astria-core/src/generated/cosmos.crypto.multisig.v1beta1.serde.rs @@ -14,7 +14,7 @@ impl serde::Serialize for CompactBitArray { } let mut struct_ser = serializer.serialize_struct("cosmos.crypto.multisig.v1beta1.CompactBitArray", len)?; if self.extra_bits_stored != 0 { - struct_ser.serialize_field("extra_bits_stored", &self.extra_bits_stored)?; + struct_ser.serialize_field("extraBitsStored", &self.extra_bits_stored)?; } if !self.elems.is_empty() { #[allow(clippy::needless_borrow)] diff --git a/crates/astria-core/src/generated/cosmos.tx.v1beta1.serde.rs b/crates/astria-core/src/generated/cosmos.tx.v1beta1.serde.rs index febf724454..a9bcd1bd78 100644 --- a/crates/astria-core/src/generated/cosmos.tx.v1beta1.serde.rs +++ b/crates/astria-core/src/generated/cosmos.tx.v1beta1.serde.rs @@ -17,7 +17,7 @@ impl serde::Serialize for AuthInfo { } let mut struct_ser = serializer.serialize_struct("cosmos.tx.v1beta1.AuthInfo", len)?; if !self.signer_infos.is_empty() { - struct_ser.serialize_field("signer_infos", &self.signer_infos)?; + struct_ser.serialize_field("signerInfos", &self.signer_infos)?; } if let Some(v) = self.fee.as_ref() { struct_ser.serialize_field("fee", v)?; @@ -218,7 +218,7 @@ impl serde::Serialize for BroadcastTxRequest { let mut struct_ser = serializer.serialize_struct("cosmos.tx.v1beta1.BroadcastTxRequest", len)?; if !self.tx_bytes.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("tx_bytes", pbjson::private::base64::encode(&self.tx_bytes).as_str())?; + struct_ser.serialize_field("txBytes", pbjson::private::base64::encode(&self.tx_bytes).as_str())?; } if self.mode != 0 { let v = BroadcastMode::try_from(self.mode) @@ -328,7 +328,7 @@ impl serde::Serialize for BroadcastTxResponse { } let mut struct_ser = serializer.serialize_struct("cosmos.tx.v1beta1.BroadcastTxResponse", len)?; if let Some(v) = self.tx_response.as_ref() { - struct_ser.serialize_field("tx_response", v)?; + struct_ser.serialize_field("txResponse", v)?; } struct_ser.end() } @@ -433,7 +433,7 @@ impl serde::Serialize for Fee { } if self.gas_limit != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("gas_limit", ToString::to_string(&self.gas_limit).as_str())?; + struct_ser.serialize_field("gasLimit", ToString::to_string(&self.gas_limit).as_str())?; } if !self.payer.is_empty() { struct_ser.serialize_field("payer", &self.payer)?; @@ -663,7 +663,7 @@ impl serde::Serialize for GetTxResponse { struct_ser.serialize_field("tx", v)?; } if let Some(v) = self.tx_response.as_ref() { - struct_ser.serialize_field("tx_response", v)?; + struct_ser.serialize_field("txResponse", v)?; } struct_ser.end() } @@ -881,7 +881,7 @@ impl serde::Serialize for mode_info::Multi { struct_ser.serialize_field("bitarray", v)?; } if !self.mode_infos.is_empty() { - struct_ser.serialize_field("mode_infos", &self.mode_infos)?; + struct_ser.serialize_field("modeInfos", &self.mode_infos)?; } struct_ser.end() } @@ -1087,18 +1087,18 @@ impl serde::Serialize for SignDoc { let mut struct_ser = serializer.serialize_struct("cosmos.tx.v1beta1.SignDoc", len)?; if !self.body_bytes.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("body_bytes", pbjson::private::base64::encode(&self.body_bytes).as_str())?; + struct_ser.serialize_field("bodyBytes", pbjson::private::base64::encode(&self.body_bytes).as_str())?; } if !self.auth_info_bytes.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("auth_info_bytes", pbjson::private::base64::encode(&self.auth_info_bytes).as_str())?; + struct_ser.serialize_field("authInfoBytes", pbjson::private::base64::encode(&self.auth_info_bytes).as_str())?; } if !self.chain_id.is_empty() { - struct_ser.serialize_field("chain_id", &self.chain_id)?; + struct_ser.serialize_field("chainId", &self.chain_id)?; } if self.account_number != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("account_number", ToString::to_string(&self.account_number).as_str())?; + struct_ser.serialize_field("accountNumber", ToString::to_string(&self.account_number).as_str())?; } struct_ser.end() } @@ -1238,10 +1238,10 @@ impl serde::Serialize for SignerInfo { } let mut struct_ser = serializer.serialize_struct("cosmos.tx.v1beta1.SignerInfo", len)?; if let Some(v) = self.public_key.as_ref() { - struct_ser.serialize_field("public_key", v)?; + struct_ser.serialize_field("publicKey", v)?; } if let Some(v) = self.mode_info.as_ref() { - struct_ser.serialize_field("mode_info", v)?; + struct_ser.serialize_field("modeInfo", v)?; } if self.sequence != 0 { #[allow(clippy::needless_borrow)] @@ -1479,7 +1479,7 @@ impl serde::Serialize for Tx { struct_ser.serialize_field("body", v)?; } if let Some(v) = self.auth_info.as_ref() { - struct_ser.serialize_field("auth_info", v)?; + struct_ser.serialize_field("authInfo", v)?; } if !self.signatures.is_empty() { struct_ser.serialize_field("signatures", &self.signatures.iter().map(pbjson::private::base64::encode).collect::>())?; @@ -1618,13 +1618,13 @@ impl serde::Serialize for TxBody { } if self.timeout_height != 0 { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("timeout_height", ToString::to_string(&self.timeout_height).as_str())?; + struct_ser.serialize_field("timeoutHeight", ToString::to_string(&self.timeout_height).as_str())?; } if !self.extension_options.is_empty() { - struct_ser.serialize_field("extension_options", &self.extension_options)?; + struct_ser.serialize_field("extensionOptions", &self.extension_options)?; } if !self.non_critical_extension_options.is_empty() { - struct_ser.serialize_field("non_critical_extension_options", &self.non_critical_extension_options)?; + struct_ser.serialize_field("nonCriticalExtensionOptions", &self.non_critical_extension_options)?; } struct_ser.end() } diff --git a/crates/astria-core/src/generated/tendermint.p2p.serde.rs b/crates/astria-core/src/generated/tendermint.p2p.serde.rs index 4cabad651f..c77f90712e 100644 --- a/crates/astria-core/src/generated/tendermint.p2p.serde.rs +++ b/crates/astria-core/src/generated/tendermint.p2p.serde.rs @@ -32,13 +32,13 @@ impl serde::Serialize for DefaultNodeInfo { } let mut struct_ser = serializer.serialize_struct("tendermint.p2p.DefaultNodeInfo", len)?; if let Some(v) = self.protocol_version.as_ref() { - struct_ser.serialize_field("protocol_version", v)?; + struct_ser.serialize_field("protocolVersion", v)?; } if !self.default_node_id.is_empty() { - struct_ser.serialize_field("default_node_id", &self.default_node_id)?; + struct_ser.serialize_field("defaultNodeId", &self.default_node_id)?; } if !self.listen_addr.is_empty() { - struct_ser.serialize_field("listen_addr", &self.listen_addr)?; + struct_ser.serialize_field("listenAddr", &self.listen_addr)?; } if !self.network.is_empty() { struct_ser.serialize_field("network", &self.network)?; @@ -230,10 +230,10 @@ impl serde::Serialize for DefaultNodeInfoOther { } let mut struct_ser = serializer.serialize_struct("tendermint.p2p.DefaultNodeInfoOther", len)?; if !self.tx_index.is_empty() { - struct_ser.serialize_field("tx_index", &self.tx_index)?; + struct_ser.serialize_field("txIndex", &self.tx_index)?; } if !self.rpc_address.is_empty() { - struct_ser.serialize_field("rpc_address", &self.rpc_address)?; + struct_ser.serialize_field("rpcAddress", &self.rpc_address)?; } struct_ser.end() } diff --git a/crates/astria-core/src/generated/tendermint.types.serde.rs b/crates/astria-core/src/generated/tendermint.types.serde.rs index 8f122c8e62..2e22d6d05c 100644 --- a/crates/astria-core/src/generated/tendermint.types.serde.rs +++ b/crates/astria-core/src/generated/tendermint.types.serde.rs @@ -21,17 +21,17 @@ impl serde::Serialize for Blob { let mut struct_ser = serializer.serialize_struct("tendermint.types.Blob", len)?; if !self.namespace_id.is_empty() { #[allow(clippy::needless_borrow)] - struct_ser.serialize_field("namespace_id", pbjson::private::base64::encode(&self.namespace_id).as_str())?; + struct_ser.serialize_field("namespaceId", pbjson::private::base64::encode(&self.namespace_id).as_str())?; } if !self.data.is_empty() { #[allow(clippy::needless_borrow)] struct_ser.serialize_field("data", pbjson::private::base64::encode(&self.data).as_str())?; } if self.share_version != 0 { - struct_ser.serialize_field("share_version", &self.share_version)?; + struct_ser.serialize_field("shareVersion", &self.share_version)?; } if self.namespace_version != 0 { - struct_ser.serialize_field("namespace_version", &self.namespace_version)?; + struct_ser.serialize_field("namespaceVersion", &self.namespace_version)?; } struct_ser.end() } @@ -179,7 +179,7 @@ impl serde::Serialize for BlobTx { struct_ser.serialize_field("blobs", &self.blobs)?; } if !self.type_id.is_empty() { - struct_ser.serialize_field("type_id", &self.type_id)?; + struct_ser.serialize_field("typeId", &self.type_id)?; } struct_ser.end() } diff --git a/tools/protobuf-compiler/src/main.rs b/tools/protobuf-compiler/src/main.rs index 9cd50655c8..a83cb25e1e 100644 --- a/tools/protobuf-compiler/src/main.rs +++ b/tools/protobuf-compiler/src/main.rs @@ -107,7 +107,6 @@ fn main() { pbjson_build::Builder::new() .register_descriptors(&descriptor_set) .unwrap() - .preserve_proto_field_names() .out_dir(&out_dir) .build(&[ ".astria.execution.v1alpha2", From c7f209e9474773d36bc6ac15183fc06e19c9dd21 Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Tue, 9 Jul 2024 15:47:07 +0200 Subject: [PATCH 08/24] fix(conductor): don't panic during panic (#1252) ## Summary Avoid panicking if a test is already panicking. ## Background The Conductor test environment is checking if it shut down cleanly for every test. If not, it's panicking to signal that the test should fail. However, if a test was already failing (panicking) due to another bad condition, this lead to the test being immediately aborted without printing any useful diagnostics. This patch fixes this by only panicking if the test is not already panicking, and issuing a debug level message otherwise. ## Changes - Change the `Drop` impl for the conductor test environment to only panic if the test is not already panicking. ## Testing This is to fix bad tests not providing useful information, which was encountered while writing another PR. --- .../tests/blackbox/helpers/mod.rs | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/crates/astria-conductor/tests/blackbox/helpers/mod.rs b/crates/astria-conductor/tests/blackbox/helpers/mod.rs index 66f415c2b3..6ea3c75e72 100644 --- a/crates/astria-conductor/tests/blackbox/helpers/mod.rs +++ b/crates/astria-conductor/tests/blackbox/helpers/mod.rs @@ -37,6 +37,7 @@ mod mock_grpc; use astria_eyre; pub use mock_grpc::MockGrpc; use serde_json::json; +use tracing::debug; pub const CELESTIA_BEARER_TOKEN: &str = "ABCDEFGH"; @@ -113,10 +114,22 @@ pub struct TestConductor { impl Drop for TestConductor { fn drop(&mut self) { futures::executor::block_on(async { - tokio::time::timeout(Duration::from_secs(2), self.conductor.shutdown()) - .await - .expect("timed out waiting for conductor to shut down") - .expect("conductor shut down with an error"); + let err_msg = + match tokio::time::timeout(Duration::from_secs(2), self.conductor.shutdown()).await + { + Ok(Ok(())) => None, + Ok(Err(conductor_err)) => Some(format!( + "conductor shut down with an error:\n{conductor_err:?}" + )), + Err(_timeout) => Some("timed out waiting for conductor to shut down".into()), + }; + if let Some(err_msg) = err_msg { + if std::thread::panicking() { + debug!("{err_msg}"); + } else { + panic!("{err_msg}"); + } + } }); } } From afd8bcb3da295dc7206da05c5a8e37fd7d15a029 Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Tue, 9 Jul 2024 16:53:11 +0200 Subject: [PATCH 09/24] feat(core, proto)!: make bridge unlock memo string (#1244) ## Summary Changes the memo field of bridge unlock actions from `bytes` to `string`. ## Background IBC convention is to format all memos as json and encode as `string` rather than `bytes` when sending them over the wire. The `astria.protocol.transactions.v1alpha1.Ics20Withdrawal.memo` field is also `string`, so this change brings `astria.protocol.transactions.v1alpha1.BridgeUnlockAction.memo` in line. ## Changes - Change `astria.protocol.transactions.v1alpha1.BridgeUnlockAction.memo` from `bytes` to `string` ## Testing All tests are updated and pass. ## Breaking Changelist This constitutes a breaking protobuf-change (as flagged by `buf`) but very likely neither a deployment nor network breaking change: + `bytes` and `string` are wire-compatible (from a protobuf point of view) + `sequencer` only passes the memo field through + `bridge-withdrawer` will be able to parse both formats as a Rust `String` is just `Vec` with extra checks for utf8 under the hood. This change is only cosmetic, but marked as breaking due to `buf`. --- .../src/bridge_withdrawer/ethereum/convert.rs | 6 +++--- .../src/bridge_withdrawer/startup.rs | 2 +- .../src/bridge_withdrawer/submitter/tests.rs | 2 +- .../astria.protocol.transactions.v1alpha1.rs | 4 ++-- .../src/protocol/transaction/v1alpha1/action.rs | 2 +- .../src/app/tests_breaking_changes.rs | 2 +- .../src/app/tests_execute_transaction.rs | 2 +- .../src/bridge/bridge_unlock_action.rs | 14 +++++++------- .../protocol/transactions/v1alpha1/types.proto | 2 +- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs index fd849f91bb..da9c51938d 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs @@ -108,7 +108,7 @@ fn event_to_bridge_unlock( .ok_or(eyre::eyre!( "failed to divide amount by asset withdrawal multiplier" ))?, - memo: serde_json::to_vec(&memo).wrap_err("failed to serialize memo to json")?, + memo: serde_json::to_string(&memo).wrap_err("failed to serialize memo to json")?, fee_asset, bridge_address: None, }; @@ -228,7 +228,7 @@ mod tests { let expected_action = BridgeUnlockAction { to: crate::astria_address([1u8; 20]), amount: 99, - memo: serde_json::to_vec(&bridge::UnlockMemo { + memo: serde_json::to_string(&bridge::UnlockMemo { block_number: 1, transaction_hash: [2u8; 32], }) @@ -269,7 +269,7 @@ mod tests { let expected_action = BridgeUnlockAction { to: crate::astria_address([1u8; 20]), amount: 99, - memo: serde_json::to_vec(&bridge::UnlockMemo { + memo: serde_json::to_string(&bridge::UnlockMemo { block_number: 1, transaction_hash: [2u8; 32], }) diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs index d68e7392bc..2959ccd324 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs @@ -335,7 +335,7 @@ fn rollup_height_from_signed_transaction( let last_batch_rollup_height = match withdrawal_action { Action::BridgeUnlock(action) => { - let memo: bridge::UnlockMemo = serde_json::from_slice(&action.memo) + let memo: bridge::UnlockMemo = serde_json::from_str(&action.memo) .wrap_err("failed to parse memo from last transaction by the bridge account")?; Some(memo.block_number) } diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs index edc2615b6c..b0e681897f 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs @@ -209,7 +209,7 @@ fn make_bridge_unlock_action() -> Action { let inner = BridgeUnlockAction { to: crate::astria_address([0u8; 20]), amount: 99, - memo: serde_json::to_vec(&bridge::UnlockMemo { + memo: serde_json::to_string(&bridge::UnlockMemo { block_number: DEFAULT_LAST_ROLLUP_HEIGHT, transaction_hash: [1u8; 32], }) diff --git a/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.rs b/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.rs index e6f250ebec..3c0f529e9f 100644 --- a/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.rs +++ b/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.rs @@ -385,8 +385,8 @@ pub struct BridgeUnlockAction { #[prost(string, tag = "3")] pub fee_asset: ::prost::alloc::string::String, /// memo for double spend prevention - #[prost(bytes = "vec", tag = "4")] - pub memo: ::prost::alloc::vec::Vec, + #[prost(string, tag = "4")] + pub memo: ::prost::alloc::string::String, /// the address of the bridge account to transfer from, /// if the bridge account's withdrawer address is not the same as the bridge address. /// if unset, the signer of the transaction is used. diff --git a/crates/astria-core/src/protocol/transaction/v1alpha1/action.rs b/crates/astria-core/src/protocol/transaction/v1alpha1/action.rs index 6a06ba6359..7e41a97de2 100644 --- a/crates/astria-core/src/protocol/transaction/v1alpha1/action.rs +++ b/crates/astria-core/src/protocol/transaction/v1alpha1/action.rs @@ -1337,7 +1337,7 @@ pub struct BridgeUnlockAction { // asset to use for fee payment. pub fee_asset: asset::Denom, // memo for double spend protection. - pub memo: Vec, + pub memo: String, // the address of the bridge account to transfer from, // if the bridge account's withdrawer address is not the same as the bridge address. // if unset, the signer of the transaction is used. diff --git a/crates/astria-sequencer/src/app/tests_breaking_changes.rs b/crates/astria-sequencer/src/app/tests_breaking_changes.rs index 088a4899b8..2ca7d70d52 100644 --- a/crates/astria-sequencer/src/app/tests_breaking_changes.rs +++ b/crates/astria-sequencer/src/app/tests_breaking_changes.rs @@ -281,7 +281,7 @@ async fn app_execute_transaction_with_every_action_snapshot() { to: bob_address, amount: 10, fee_asset: asset.clone(), - memo: vec![0u8; 32], + memo: "{}".into(), bridge_address: None, } .into(), diff --git a/crates/astria-sequencer/src/app/tests_execute_transaction.rs b/crates/astria-sequencer/src/app/tests_execute_transaction.rs index 3d641d1274..93e8d0d2d6 100644 --- a/crates/astria-sequencer/src/app/tests_execute_transaction.rs +++ b/crates/astria-sequencer/src/app/tests_execute_transaction.rs @@ -1014,7 +1014,7 @@ async fn app_execute_transaction_bridge_lock_unlock_action_ok() { to: alice_address, amount, fee_asset: asset.clone(), - memo: b"lilywashere".to_vec(), + memo: "{ \"msg\": \"lilywashere\" }".into(), bridge_address: None, }; diff --git a/crates/astria-sequencer/src/bridge/bridge_unlock_action.rs b/crates/astria-sequencer/src/bridge/bridge_unlock_action.rs index 16688d51d5..d29dbbff54 100644 --- a/crates/astria-sequencer/src/bridge/bridge_unlock_action.rs +++ b/crates/astria-sequencer/src/bridge/bridge_unlock_action.rs @@ -137,7 +137,7 @@ mod test { to: to_address, amount: transfer_amount, fee_asset: asset, - memo: vec![0u8; 32], + memo: "{}".into(), bridge_address: None, }; @@ -174,7 +174,7 @@ mod test { to: to_address, amount: transfer_amount, fee_asset: asset.clone(), - memo: vec![0u8; 32], + memo: "{}".into(), bridge_address: Some(bridge_address), }; @@ -212,7 +212,7 @@ mod test { to: to_address, amount: transfer_amount, fee_asset: asset, - memo: vec![0u8; 32], + memo: "{}".into(), bridge_address: Some(bridge_address), }; @@ -253,7 +253,7 @@ mod test { to: to_address, amount: transfer_amount, fee_asset: asset.clone(), - memo: vec![0u8; 32], + memo: "{}".into(), bridge_address: None, }; @@ -308,7 +308,7 @@ mod test { to: to_address, amount: transfer_amount, fee_asset: asset.clone(), - memo: vec![0u8; 32], + memo: "{}".into(), bridge_address: Some(bridge_address), }; @@ -360,7 +360,7 @@ mod test { to: to_address, amount: transfer_amount, fee_asset: asset.clone(), - memo: vec![0u8; 32], + memo: "{}".into(), bridge_address: None, }; @@ -412,7 +412,7 @@ mod test { to: to_address, amount: transfer_amount, fee_asset: asset.clone(), - memo: vec![0u8; 32], + memo: "{}".into(), bridge_address: Some(bridge_address), }; diff --git a/proto/protocolapis/astria/protocol/transactions/v1alpha1/types.proto b/proto/protocolapis/astria/protocol/transactions/v1alpha1/types.proto index 406d5424b8..6d52627815 100644 --- a/proto/protocolapis/astria/protocol/transactions/v1alpha1/types.proto +++ b/proto/protocolapis/astria/protocol/transactions/v1alpha1/types.proto @@ -210,7 +210,7 @@ message BridgeUnlockAction { // the asset used to pay the transaction fee string fee_asset = 3; // memo for double spend prevention - bytes memo = 4; + string memo = 4; // the address of the bridge account to transfer from, // if the bridge account's withdrawer address is not the same as the bridge address. // if unset, the signer of the transaction is used. From da40cc226ab782f5768fcbdb98c7ef16f6035706 Mon Sep 17 00:00:00 2001 From: Fraser Hutchison <190532+Fraser999@users.noreply.github.com> Date: Tue, 9 Jul 2024 16:27:15 +0100 Subject: [PATCH 10/24] fix(sequencer-utils): fixes issue in `parse_blob` tests (#1243) ## Summary This PR fixes a bug in the `parse_blob` tests. ## Background The existing tests use `colour::force_no_colour()` to avoid `sequencer-utils` from generating coloured output in order to make the snapshots more legible. However, setting that actually has no effect (under the hood, `colour` was deciding to not use coloured output due to `io::stdout().is_terminal()` returning `false`). By updating to `colour` v2.1.0 this becomes apparent, as that version also takes the `TERM` env var into account. `TERM` is set to a value other than `dumb`, so coloured output is emitted. ## Changes - The fix is to set the `NO_COLOR` env var for the subprocess, as this has highest priority in disabling coloured output. ## Testing The tests broken after updating `colour` now pass. --- Cargo.lock | 7 +++++-- crates/astria-sequencer-utils/Cargo.toml | 2 +- crates/astria-sequencer-utils/tests/parse_blob.rs | 5 ++--- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cacf34f979..6893001c60 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1898,9 +1898,12 @@ checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] name = "colour" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692e39ee1f14432931eb080a5e8e8930a7eff3d19e638cd04091534ebc40b928" +checksum = "b536eebcabe54980476d120a182f7da2268fe02d22575cca99cee5fdda178280" +dependencies = [ + "winapi", +] [[package]] name = "compiletest_rs" diff --git a/crates/astria-sequencer-utils/Cargo.toml b/crates/astria-sequencer-utils/Cargo.toml index 1317e08d62..9a06b1cd04 100644 --- a/crates/astria-sequencer-utils/Cargo.toml +++ b/crates/astria-sequencer-utils/Cargo.toml @@ -16,7 +16,7 @@ clap = { workspace = true, features = [ "derive", "wrap_help", ] } -colour = "2.0.0" +colour = "2.1.0" ethers-core = "2.0.14" hex = { workspace = true } indenter = "0.3.3" diff --git a/crates/astria-sequencer-utils/tests/parse_blob.rs b/crates/astria-sequencer-utils/tests/parse_blob.rs index c6064d7482..1f5ddc37a4 100644 --- a/crates/astria-sequencer-utils/tests/parse_blob.rs +++ b/crates/astria-sequencer-utils/tests/parse_blob.rs @@ -26,8 +26,6 @@ impl Resources { /// Reads the contents of the files in the `tests/resources/parse_blob/` folder to /// the respective fields of `Self`. fn new(test_case: &str) -> Result { - // Disable colored output to make the snapshots more legible. - colour::force_no_colour().unwrap(); let dir = Path::new(env!("CARGO_MANIFEST_DIR")) .join("tests") .join("resources") @@ -103,7 +101,8 @@ impl Resources { fn new_command() -> Result { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; - cmd.arg("parse-blob"); + // Disable colored output to make the snapshots more legible. + cmd.arg("parse-blob").env("NO_COLOR", "1"); Ok(cmd) } From 0fe870c71ed5694212ed669b4e19d029f0f5b375 Mon Sep 17 00:00:00 2001 From: Fraser Hutchison <190532+Fraser999@users.noreply.github.com> Date: Tue, 9 Jul 2024 16:37:43 +0100 Subject: [PATCH 11/24] chore(sequencer)!: add metrics (#1248) ## Summary This adds further metrics to the sequencer. ## Background This should help diagnose block production slowdown when the sequencer is stress-tested. ## Changes - Added metrics (see below for list). - Enabled `cnidarium` metrics. Note that all histograms are still rendered as Prometheus summaries for now. I have [an open PR](https://github.com/astriaorg/astria/pull/1192) which will make it simple to provide buckets for histograms, after which they will be rendered as true histograms. ## Testing Testing will likewise be relatively simple once #1192 is merged. In the meantime, I ran the smoke test using the code in this PR and manually checked the new metrics are available and appear sane. ## Metrics - Added `astria_sequencer_check_tx_duration_seconds` histograms with the following labels: - `length check and parse raw tx` - `stateless check` - `nonce check` - `chain id check` - `balance check` - `check for removal` - `insert to app mempool` - Added `astria_sequencer_actions_per_transaction_in_mempool` histogram - Added `astria_sequencer_transaction_in_mempool_size_bytes` histogram - Added `astria_sequencer_transactions_in_mempool_total` gauge - Enabled `cnidarium_get_raw_duration_seconds` histogram - Enabled `cnidarium_nonverifiable_get_raw_duration_seconds` histogram ## Related Issues Closes #1247. --- crates/astria-sequencer/Cargo.toml | 4 +- crates/astria-sequencer/src/app/mod.rs | 1 + crates/astria-sequencer/src/metrics.rs | 154 ++++++++++++++++++ crates/astria-sequencer/src/sequencer.rs | 3 + .../astria-sequencer/src/service/mempool.rs | 49 +++++- 5 files changed, 208 insertions(+), 3 deletions(-) diff --git a/crates/astria-sequencer/Cargo.toml b/crates/astria-sequencer/Cargo.toml index 47de9ce8de..d8f719eb3b 100644 --- a/crates/astria-sequencer/Cargo.toml +++ b/crates/astria-sequencer/Cargo.toml @@ -30,7 +30,9 @@ priority-queue = "2.0.2" tower = "0.4" tower-abci = "0.12.0" tower-actor = "0.1.0" -cnidarium = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0" } +cnidarium = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0", features = [ + "metrics", +] } cnidarium-component = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0" } async-trait = { workspace = true } diff --git a/crates/astria-sequencer/src/app/mod.rs b/crates/astria-sequencer/src/app/mod.rs index 377ccaf4e2..b892037ac0 100644 --- a/crates/astria-sequencer/src/app/mod.rs +++ b/crates/astria-sequencer/src/app/mod.rs @@ -603,6 +603,7 @@ impl App { self.mempool.insert_all(txs_to_readd_to_mempool).await; let mempool_len = self.mempool.len().await; debug!(mempool_len, "finished executing transactions from mempool"); + self.metrics.set_transactions_in_mempool_total(mempool_len); self.execution_results = Some(execution_results); Ok((validated_txs, included_signed_txs)) diff --git a/crates/astria-sequencer/src/metrics.rs b/crates/astria-sequencer/src/metrics.rs index 7623223f00..eadd2365f8 100644 --- a/crates/astria-sequencer/src/metrics.rs +++ b/crates/astria-sequencer/src/metrics.rs @@ -1,3 +1,5 @@ +use std::time::Duration; + use metrics::{ counter, describe_counter, @@ -12,6 +14,8 @@ use metrics::{ }; use telemetry::metric_names; +const CHECK_TX_STAGE: &str = "stage"; + pub(crate) struct Metrics { prepare_proposal_excluded_transactions_cometbft_space: Counter, prepare_proposal_excluded_transactions_sequencer_space: Counter, @@ -26,6 +30,16 @@ pub(crate) struct Metrics { check_tx_removed_failed_stateless: Counter, check_tx_removed_stale_nonce: Counter, check_tx_removed_account_balance: Counter, + check_tx_duration_seconds_parse_tx: Histogram, + check_tx_duration_seconds_check_stateless: Histogram, + check_tx_duration_seconds_check_nonce: Histogram, + check_tx_duration_seconds_check_chain_id: Histogram, + check_tx_duration_seconds_check_balance: Histogram, + check_tx_duration_seconds_check_removed: Histogram, + check_tx_duration_seconds_insert_to_app_mempool: Histogram, + actions_per_transaction_in_mempool: Histogram, + transaction_in_mempool_size_bytes: Histogram, + transactions_in_mempool_total: Gauge, } impl Metrics { @@ -135,6 +149,62 @@ impl Metrics { ); let check_tx_removed_expired = counter!(CHECK_TX_REMOVED_EXPIRED); + describe_histogram!( + CHECK_TX_DURATION_SECONDS, + Unit::Seconds, + "The amount of time taken in seconds to successfully complete the various stages of \ + check_tx" + ); + let check_tx_duration_seconds_parse_tx = histogram!( + CHECK_TX_DURATION_SECONDS, + CHECK_TX_STAGE => "length check and parse raw tx" + ); + let check_tx_duration_seconds_check_stateless = histogram!( + CHECK_TX_DURATION_SECONDS, + CHECK_TX_STAGE => "stateless check" + ); + let check_tx_duration_seconds_check_nonce = histogram!( + CHECK_TX_DURATION_SECONDS, + CHECK_TX_STAGE => "nonce check" + ); + let check_tx_duration_seconds_check_chain_id = histogram!( + CHECK_TX_DURATION_SECONDS, + CHECK_TX_STAGE => "chain id check" + ); + let check_tx_duration_seconds_check_balance = histogram!( + CHECK_TX_DURATION_SECONDS, + CHECK_TX_STAGE => "balance check" + ); + let check_tx_duration_seconds_check_removed = histogram!( + CHECK_TX_DURATION_SECONDS, + CHECK_TX_STAGE => "check for removal" + ); + let check_tx_duration_seconds_insert_to_app_mempool = histogram!( + CHECK_TX_DURATION_SECONDS, + CHECK_TX_STAGE => "insert to app mempool" + ); + + describe_histogram!( + ACTIONS_PER_TRANSACTION_IN_MEMPOOL, + Unit::Count, + "The number of actions in a transaction added to the app mempool" + ); + let actions_per_transaction_in_mempool = histogram!(ACTIONS_PER_TRANSACTION_IN_MEMPOOL); + + describe_histogram!( + TRANSACTION_IN_MEMPOOL_SIZE_BYTES, + Unit::Bytes, + "The number of bytes in a transaction added to the app mempool" + ); + let transaction_in_mempool_size_bytes = histogram!(TRANSACTION_IN_MEMPOOL_SIZE_BYTES); + + describe_gauge!( + TRANSACTIONS_IN_MEMPOOL_TOTAL, + Unit::Count, + "The number of transactions in the app mempool" + ); + let transactions_in_mempool_total = gauge!(TRANSACTIONS_IN_MEMPOOL_TOTAL); + Self { prepare_proposal_excluded_transactions_cometbft_space, prepare_proposal_excluded_transactions_sequencer_space, @@ -149,6 +219,16 @@ impl Metrics { check_tx_removed_failed_stateless, check_tx_removed_stale_nonce, check_tx_removed_account_balance, + check_tx_duration_seconds_parse_tx, + check_tx_duration_seconds_check_stateless, + check_tx_duration_seconds_check_nonce, + check_tx_duration_seconds_check_chain_id, + check_tx_duration_seconds_check_balance, + check_tx_duration_seconds_check_removed, + check_tx_duration_seconds_insert_to_app_mempool, + actions_per_transaction_in_mempool, + transaction_in_mempool_size_bytes, + transactions_in_mempool_total, } } @@ -212,6 +292,59 @@ impl Metrics { pub(crate) fn increment_check_tx_removed_account_balance(&self) { self.check_tx_removed_account_balance.increment(1); } + + pub(crate) fn record_check_tx_duration_seconds_parse_tx(&self, duration: Duration) { + self.check_tx_duration_seconds_parse_tx.record(duration); + } + + pub(crate) fn record_check_tx_duration_seconds_check_stateless(&self, duration: Duration) { + self.check_tx_duration_seconds_check_stateless + .record(duration); + } + + pub(crate) fn record_check_tx_duration_seconds_check_nonce(&self, duration: Duration) { + self.check_tx_duration_seconds_check_nonce.record(duration); + } + + pub(crate) fn record_check_tx_duration_seconds_check_chain_id(&self, duration: Duration) { + self.check_tx_duration_seconds_check_chain_id + .record(duration); + } + + pub(crate) fn record_check_tx_duration_seconds_check_balance(&self, duration: Duration) { + self.check_tx_duration_seconds_check_balance + .record(duration); + } + + pub(crate) fn record_check_tx_duration_seconds_check_removed(&self, duration: Duration) { + self.check_tx_duration_seconds_check_removed + .record(duration); + } + + pub(crate) fn record_check_tx_duration_seconds_insert_to_app_mempool( + &self, + duration: Duration, + ) { + self.check_tx_duration_seconds_insert_to_app_mempool + .record(duration); + } + + pub(crate) fn record_actions_per_transaction_in_mempool(&self, count: usize) { + // allow: precision loss is unlikely (values too small) but also unimportant in histograms. + #[allow(clippy::cast_precision_loss)] + self.actions_per_transaction_in_mempool.record(count as f64); + } + + pub(crate) fn record_transaction_in_mempool_size_bytes(&self, count: usize) { + // allow: precision loss is unlikely (values too small) but also unimportant in histograms. + #[allow(clippy::cast_precision_loss)] + self.transaction_in_mempool_size_bytes.record(count as f64); + } + + pub(crate) fn set_transactions_in_mempool_total(&self, count: usize) { + #[allow(clippy::cast_precision_loss)] + self.transactions_in_mempool_total.set(count as f64); + } } metric_names!(pub const METRICS_NAMES: @@ -228,11 +361,17 @@ metric_names!(pub const METRICS_NAMES: CHECK_TX_REMOVED_FAILED_STATELESS, CHECK_TX_REMOVED_STALE_NONCE, CHECK_TX_REMOVED_ACCOUNT_BALANCE, + CHECK_TX_DURATION_SECONDS, + ACTIONS_PER_TRANSACTION_IN_MEMPOOL, + TRANSACTION_IN_MEMPOOL_SIZE_BYTES, + TRANSACTIONS_IN_MEMPOOL_TOTAL ); #[cfg(test)] mod tests { use super::{ + ACTIONS_PER_TRANSACTION_IN_MEMPOOL, + CHECK_TX_DURATION_SECONDS, CHECK_TX_REMOVED_ACCOUNT_BALANCE, CHECK_TX_REMOVED_EXPIRED, CHECK_TX_REMOVED_FAILED_EXECUTION, @@ -246,6 +385,8 @@ mod tests { PROCESS_PROPOSAL_SKIPPED_PROPOSAL, PROPOSAL_DEPOSITS, PROPOSAL_TRANSACTIONS, + TRANSACTIONS_IN_MEMPOOL_TOTAL, + TRANSACTION_IN_MEMPOOL_SIZE_BYTES, }; #[track_caller] @@ -295,5 +436,18 @@ mod tests { CHECK_TX_REMOVED_ACCOUNT_BALANCE, "check_tx_removed_account_balance", ); + assert_const(CHECK_TX_DURATION_SECONDS, "check_tx_duration_seconds"); + assert_const( + ACTIONS_PER_TRANSACTION_IN_MEMPOOL, + "actions_per_transaction_in_mempool", + ); + assert_const( + TRANSACTION_IN_MEMPOOL_SIZE_BYTES, + "transaction_in_mempool_size_bytes", + ); + assert_const( + TRANSACTIONS_IN_MEMPOOL_TOTAL, + "transactions_in_mempool_total", + ); } } diff --git a/crates/astria-sequencer/src/sequencer.rs b/crates/astria-sequencer/src/sequencer.rs index e78b7585d8..cfcfb39123 100644 --- a/crates/astria-sequencer/src/sequencer.rs +++ b/crates/astria-sequencer/src/sequencer.rs @@ -49,6 +49,9 @@ impl Sequencer { pub async fn run_until_stopped(config: Config) -> Result<()> { static METRICS: OnceLock = OnceLock::new(); let metrics = METRICS.get_or_init(Metrics::new); + cnidarium::register_metrics(); + metrics::histogram!("cnidarium_get_raw_duration_seconds"); + metrics::histogram!("cnidarium_nonverifiable_get_raw_duration_seconds"); if config .db_filepath diff --git a/crates/astria-sequencer/src/service/mempool.rs b/crates/astria-sequencer/src/service/mempool.rs index 0469819b2a..dc93c3e991 100644 --- a/crates/astria-sequencer/src/service/mempool.rs +++ b/crates/astria-sequencer/src/service/mempool.rs @@ -4,6 +4,7 @@ use std::{ Context, Poll, }, + time::Instant, }; use astria_core::{ @@ -105,12 +106,16 @@ async fn handle_check_tx( ) -> response::CheckTx { use sha2::Digest as _; - let tx_hash = sha2::Sha256::digest(&req.tx).into(); + let start_parsing = Instant::now(); let request::CheckTx { tx, .. } = req; - if tx.len() > MAX_TX_SIZE { + + let tx_hash = sha2::Sha256::digest(&tx).into(); + let tx_len = tx.len(); + + if tx_len > MAX_TX_SIZE { mempool.remove(tx_hash).await; metrics.increment_check_tx_removed_too_large(); return response::CheckTx { @@ -151,6 +156,11 @@ async fn handle_check_tx( } }; + let finished_parsing = Instant::now(); + metrics.record_check_tx_duration_seconds_parse_tx( + finished_parsing.saturating_duration_since(start_parsing), + ); + if let Err(e) = transaction::check_stateless(&signed_tx).await { mempool.remove(tx_hash).await; metrics.increment_check_tx_removed_failed_stateless(); @@ -162,6 +172,11 @@ async fn handle_check_tx( }; }; + let finished_check_stateless = Instant::now(); + metrics.record_check_tx_duration_seconds_check_stateless( + finished_check_stateless.saturating_duration_since(finished_parsing), + ); + if let Err(e) = transaction::check_nonce_mempool(&signed_tx, &state).await { mempool.remove(tx_hash).await; metrics.increment_check_tx_removed_stale_nonce(); @@ -173,6 +188,11 @@ async fn handle_check_tx( }; }; + let finished_check_nonce = Instant::now(); + metrics.record_check_tx_duration_seconds_check_nonce( + finished_check_nonce.saturating_duration_since(finished_check_stateless), + ); + if let Err(e) = transaction::check_chain_id_mempool(&signed_tx, &state).await { mempool.remove(tx_hash).await; return response::CheckTx { @@ -183,6 +203,11 @@ async fn handle_check_tx( }; } + let finished_check_chain_id = Instant::now(); + metrics.record_check_tx_duration_seconds_check_chain_id( + finished_check_chain_id.saturating_duration_since(finished_check_nonce), + ); + if let Err(e) = transaction::check_balance_mempool(&signed_tx, &state).await { mempool.remove(tx_hash).await; metrics.increment_check_tx_removed_account_balance(); @@ -194,6 +219,11 @@ async fn handle_check_tx( }; }; + let finished_check_balance = Instant::now(); + metrics.record_check_tx_duration_seconds_check_balance( + finished_check_balance.saturating_duration_since(finished_check_chain_id), + ); + if let Some(removal_reason) = mempool.check_removed_comet_bft(tx_hash).await { mempool.remove(tx_hash).await; @@ -219,6 +249,11 @@ async fn handle_check_tx( } }; + let finished_check_removed = Instant::now(); + metrics.record_check_tx_duration_seconds_check_removed( + finished_check_removed.saturating_duration_since(finished_check_balance), + ); + // tx is valid, push to mempool let current_account_nonce = state .get_account_nonce(crate::address::base_prefixed( @@ -227,6 +262,8 @@ async fn handle_check_tx( .await .expect("can fetch account nonce"); + let actions_count = signed_tx.actions().len(); + mempool .insert(signed_tx, current_account_nonce) .await @@ -234,5 +271,13 @@ async fn handle_check_tx( "tx nonce is greater than or equal to current account nonce; this was checked in \ check_nonce_mempool", ); + let mempool_len = mempool.len().await; + + metrics + .record_check_tx_duration_seconds_insert_to_app_mempool(finished_check_removed.elapsed()); + metrics.record_actions_per_transaction_in_mempool(actions_count); + metrics.record_transaction_in_mempool_size_bytes(tx_len); + metrics.set_transactions_in_mempool_total(mempool_len); + response::CheckTx::default() } From 69db1bd9b887251ee4229e6af02d9d60917f20b8 Mon Sep 17 00:00:00 2001 From: Jordan Oroshiba Date: Tue, 9 Jul 2024 21:49:22 +0200 Subject: [PATCH 12/24] fix(charts): add resources for sequencer/cometbft (#1254) ## Summary Adds resource request and limit defaults + lower for local dev values. ## Background Want to ensure sequencer and cometbft get adequate resources --- charts/sequencer/Chart.yaml | 2 +- charts/sequencer/templates/statefulsets.yaml | 14 ++++++++++++++ charts/sequencer/values.yaml | 16 ++++++++++++++++ dev/values/validators/all.yml | 16 ++++++++++++++++ 4 files changed, 47 insertions(+), 1 deletion(-) diff --git a/charts/sequencer/Chart.yaml b/charts/sequencer/Chart.yaml index f127768d48..64f5b4cb6d 100644 --- a/charts/sequencer/Chart.yaml +++ b/charts/sequencer/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.18.0 +version: 0.18.1 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. diff --git a/charts/sequencer/templates/statefulsets.yaml b/charts/sequencer/templates/statefulsets.yaml index 588f75a5d7..5e87fbcf21 100644 --- a/charts/sequencer/templates/statefulsets.yaml +++ b/charts/sequencer/templates/statefulsets.yaml @@ -52,6 +52,13 @@ spec: - containerPort: {{ .Values.ports.sequencerMetrics }} name: seq-metric {{- end }} + resources: + requests: + cpu: {{ .Values.resources.sequencer.requests.cpu }} + memory: {{ .Values.resources.sequencer.requests.memory }} + limits: + cpu: {{ .Values.resources.sequencer.limits.cpu }} + memory: {{ .Values.resources.sequencer.limits.memory }} - name: cometbft imagePullPolicy: IfNotPresent command: [ "cometbft", "start" ] @@ -78,6 +85,13 @@ spec: - containerPort: {{ .Values.ports.cometbftMetrics }} name: cometbft-metric {{- end }} + resources: + requests: + cpu: {{ .Values.resources.cometbft.requests.cpu }} + memory: {{ .Values.resources.cometbft.requests.memory }} + limits: + cpu: {{ .Values.resources.cometbft.limits.cpu }} + memory: {{ .Values.resources.cometbft.limits.memory }} volumes: - name: cometbft-config-volume configMap: diff --git a/charts/sequencer/values.yaml b/charts/sequencer/values.yaml index ef5b620cb1..62478cda7c 100644 --- a/charts/sequencer/values.yaml +++ b/charts/sequencer/values.yaml @@ -179,6 +179,22 @@ cometbft: secret: resourceName: "projects/$PROJECT_ID/secrets/privValidatorKey/versions/latest" +resources: + cometbft: + requests: + cpu: 4000m + memory: 4Gi + limits: + cpu: 4000m + memory: 4Gi + sequencer: + requests: + cpu: 4000m + memory: 4Gi + limits: + cpu: 4000m + memory: 4Gi + sequencer-relayer: enabled: false diff --git a/dev/values/validators/all.yml b/dev/values/validators/all.yml index 7e09b12ef6..621ce1d11e 100644 --- a/dev/values/validators/all.yml +++ b/dev/values/validators/all.yml @@ -34,5 +34,21 @@ genesis: - address: astria13ahqz4pjqfmynk9ylrqv4fwe4957x2p0h5782u balance: "48" +resources: + cometbft: + requests: + cpu: 1000m + memory: 500Mi + limits: + cpu: 1000m + memory: 500Mi + sequencer: + requests: + cpu: 1000m + memory: 500Mi + limits: + cpu: 1000m + memory: 500Mi + storage: enabled: false From 5757558e04250965772e07477586ffc6207502fc Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Thu, 11 Jul 2024 07:48:20 +0200 Subject: [PATCH 13/24] (core, sequencer)!: generate serde traits impls for all protocol protobufs (#1260) ## Summary This patch ensures extends our generated serde `Deserialize` and `Serialize` trait implementations to all protobuf messages defined in `astria.protocol`. ## Background So far the protocol-specs did not have serde traits generated, but these are necessary to write out bridge-withdrawer actions as JSON. A side-effect of this patch is that all of `proto/protocolapis/astria_vendored` is now also commited to the repository (they were previously filtered). Very likely this is not desired but should be addressed in a followup. It is not clear to the author of this patch in how far these types might be necessary. ## Changes - Generate serde `Deserialize` and `Serialize` impls for Rust types generated from `astria.protocol` and `astria_vendored` protobufs - Use a vendored `astria_vendored::tendermint::abci::ValidatorUpdate` instead of `tendermint_proto`: `tendermint_proto@v0.34` does not yet have serde impls for its types, which blocks us from having an easy way to use `pbjson` generated serde impls - Introduce a new `ValidatorUpdate` action to replace `tendermint::validator::Update` to transform to/from the vendored protobuf type (again necessary to implement serde and foreign type restrictions) - Update sequencer to use the new `ValidatorUpdate` type ## Testing All tests have been upgraded and still pass. Specifically, ingestion of validator updates happens through cometbft, which were already tested end-to-end. ## Breaking Changelist - Marked as breaking even though none of the breaking change tests are affected: the serialization shape of the validator updates has changed. As they are commited to state using json, this should be breaking. --- Cargo.lock | 1 + crates/astria-cli/src/commands/sequencer.rs | 17 +- crates/astria-core/src/crypto.rs | 2 +- .../astria.composer.v1alpha1.serde.rs | 186 ++ .../astria.execution.v1alpha1.serde.rs | 559 ++++ ...astria.protocol.accounts.v1alpha1.serde.rs | 332 +++ .../astria.protocol.asset.v1alpha1.serde.rs | 223 ++ .../astria.protocol.bridge.v1alpha1.serde.rs | 280 ++ .../astria.protocol.transactions.v1alpha1.rs | 4 +- ...ia.protocol.transactions.v1alpha1.serde.rs | 2598 +++++++++++++++++ ...ed.penumbra.core.component.ibc.v1.serde.rs | 92 + .../astria_vendored.tendermint.abci.rs | 15 + .../astria_vendored.tendermint.abci.serde.rs | 112 + .../astria_vendored.tendermint.crypto.rs | 24 + ...astria_vendored.tendermint.crypto.serde.rs | 109 + .../astria_vendored.tendermint.types.rs | 74 + .../astria_vendored.tendermint.types.serde.rs | 578 ++++ .../astria_vendored.tendermint.version.rs | 15 + ...stria_vendored.tendermint.version.serde.rs | 114 + crates/astria-core/src/generated/mod.rs | 49 +- .../protocol/transaction/v1alpha1/action.rs | 177 +- crates/astria-sequencer/Cargo.toml | 1 + crates/astria-sequencer/src/app/mod.rs | 7 +- crates/astria-sequencer/src/app/test_utils.rs | 9 +- crates/astria-sequencer/src/app/tests_app.rs | 83 +- .../src/app/tests_breaking_changes.rs | 8 +- .../src/app/tests_execute_transaction.rs | 13 +- .../astria-sequencer/src/authority/action.rs | 8 +- .../src/authority/component.rs | 24 +- .../src/authority/state_ext.rs | 184 +- crates/astria-sequencer/src/lib.rs | 2 + .../astria-sequencer/src/service/consensus.rs | 10 +- crates/astria-sequencer/src/test_utils.rs | 11 + crates/astria-sequencer/src/utils.rs | 74 + tools/protobuf-compiler/src/main.rs | 24 +- 35 files changed, 5804 insertions(+), 215 deletions(-) create mode 100644 crates/astria-core/src/generated/astria.composer.v1alpha1.serde.rs create mode 100644 crates/astria-core/src/generated/astria.execution.v1alpha1.serde.rs create mode 100644 crates/astria-core/src/generated/astria.protocol.accounts.v1alpha1.serde.rs create mode 100644 crates/astria-core/src/generated/astria.protocol.asset.v1alpha1.serde.rs create mode 100644 crates/astria-core/src/generated/astria.protocol.bridge.v1alpha1.serde.rs create mode 100644 crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.serde.rs create mode 100644 crates/astria-core/src/generated/astria_vendored.penumbra.core.component.ibc.v1.serde.rs create mode 100644 crates/astria-core/src/generated/astria_vendored.tendermint.abci.rs create mode 100644 crates/astria-core/src/generated/astria_vendored.tendermint.abci.serde.rs create mode 100644 crates/astria-core/src/generated/astria_vendored.tendermint.crypto.rs create mode 100644 crates/astria-core/src/generated/astria_vendored.tendermint.crypto.serde.rs create mode 100644 crates/astria-core/src/generated/astria_vendored.tendermint.types.rs create mode 100644 crates/astria-core/src/generated/astria_vendored.tendermint.types.serde.rs create mode 100644 crates/astria-core/src/generated/astria_vendored.tendermint.version.rs create mode 100644 crates/astria-core/src/generated/astria_vendored.tendermint.version.serde.rs create mode 100644 crates/astria-sequencer/src/test_utils.rs diff --git a/Cargo.lock b/Cargo.lock index 6893001c60..21684370bd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -784,6 +784,7 @@ dependencies = [ "priority-queue", "prost", "rand 0.8.5", + "rand_chacha 0.3.1", "regex", "serde", "serde_json", diff --git a/crates/astria-cli/src/commands/sequencer.rs b/crates/astria-cli/src/commands/sequencer.rs index feacf4cd28..63311c7df5 100644 --- a/crates/astria-cli/src/commands/sequencer.rs +++ b/crates/astria-cli/src/commands/sequencer.rs @@ -13,13 +13,13 @@ use astria_core::{ InitBridgeAccountAction, SudoAddressChangeAction, TransferAction, + ValidatorUpdate, }, TransactionParams, UnsignedTransaction, }, }; use astria_sequencer_client::{ - tendermint, tendermint_rpc::endpoint, Client, HttpClient, @@ -421,13 +421,14 @@ pub(crate) async fn sudo_address_change(args: &SudoAddressChangeArgs) -> eyre::R /// * If the http client cannot be created /// * If the transaction failed to be submitted pub(crate) async fn validator_update(args: &ValidatorUpdateArgs) -> eyre::Result<()> { - let public_key_raw = hex::decode(args.validator_public_key.as_str()) - .wrap_err("failed to decode public key into bytes")?; - let pub_key = tendermint::PublicKey::from_raw_ed25519(&public_key_raw) - .expect("failed to parse public key from parsed bytes"); - let validator_update = tendermint::validator::Update { - pub_key, - power: args.power.into(), + let verification_key = astria_core::crypto::VerificationKey::try_from( + &*hex::decode(&args.validator_public_key) + .wrap_err("failed to decode public key bytes from argument")?, + ) + .wrap_err("failed to construct public key from bytes")?; + let validator_update = ValidatorUpdate { + power: args.power, + verification_key, }; let res = submit_transaction( diff --git a/crates/astria-core/src/crypto.rs b/crates/astria-core/src/crypto.rs index 6150751913..4146650de0 100644 --- a/crates/astria-core/src/crypto.rs +++ b/crates/astria-core/src/crypto.rs @@ -114,7 +114,7 @@ impl From<[u8; 32]> for SigningKey { } /// An Ed25519 verification key. -#[derive(Clone)] +#[derive(Clone, Copy)] pub struct VerificationKey { key: Ed25519VerificationKey, } diff --git a/crates/astria-core/src/generated/astria.composer.v1alpha1.serde.rs b/crates/astria-core/src/generated/astria.composer.v1alpha1.serde.rs new file mode 100644 index 0000000000..91e8502f9a --- /dev/null +++ b/crates/astria-core/src/generated/astria.composer.v1alpha1.serde.rs @@ -0,0 +1,186 @@ +impl serde::Serialize for SubmitRollupTransactionRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.rollup_id.is_empty() { + len += 1; + } + if !self.data.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.composer.v1alpha1.SubmitRollupTransactionRequest", len)?; + if !self.rollup_id.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("rollupId", pbjson::private::base64::encode(&self.rollup_id).as_str())?; + } + if !self.data.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("data", pbjson::private::base64::encode(&self.data).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SubmitRollupTransactionRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "rollup_id", + "rollupId", + "data", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + RollupId, + Data, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "rollupId" | "rollup_id" => Ok(GeneratedField::RollupId), + "data" => Ok(GeneratedField::Data), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SubmitRollupTransactionRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.composer.v1alpha1.SubmitRollupTransactionRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut rollup_id__ = None; + let mut data__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::RollupId => { + if rollup_id__.is_some() { + return Err(serde::de::Error::duplicate_field("rollupId")); + } + rollup_id__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Data => { + if data__.is_some() { + return Err(serde::de::Error::duplicate_field("data")); + } + data__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + } + } + Ok(SubmitRollupTransactionRequest { + rollup_id: rollup_id__.unwrap_or_default(), + data: data__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.composer.v1alpha1.SubmitRollupTransactionRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SubmitRollupTransactionResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let len = 0; + let struct_ser = serializer.serialize_struct("astria.composer.v1alpha1.SubmitRollupTransactionResponse", len)?; + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SubmitRollupTransactionResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + Err(serde::de::Error::unknown_field(value, FIELDS)) + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SubmitRollupTransactionResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.composer.v1alpha1.SubmitRollupTransactionResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + while map_.next_key::()?.is_some() { + let _ = map_.next_value::()?; + } + Ok(SubmitRollupTransactionResponse { + }) + } + } + deserializer.deserialize_struct("astria.composer.v1alpha1.SubmitRollupTransactionResponse", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria.execution.v1alpha1.serde.rs b/crates/astria-core/src/generated/astria.execution.v1alpha1.serde.rs new file mode 100644 index 0000000000..b47490c3c4 --- /dev/null +++ b/crates/astria-core/src/generated/astria.execution.v1alpha1.serde.rs @@ -0,0 +1,559 @@ +impl serde::Serialize for DoBlockRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.prev_block_hash.is_empty() { + len += 1; + } + if !self.transactions.is_empty() { + len += 1; + } + if self.timestamp.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.execution.v1alpha1.DoBlockRequest", len)?; + if !self.prev_block_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("prevBlockHash", pbjson::private::base64::encode(&self.prev_block_hash).as_str())?; + } + if !self.transactions.is_empty() { + struct_ser.serialize_field("transactions", &self.transactions.iter().map(pbjson::private::base64::encode).collect::>())?; + } + if let Some(v) = self.timestamp.as_ref() { + struct_ser.serialize_field("timestamp", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for DoBlockRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "prev_block_hash", + "prevBlockHash", + "transactions", + "timestamp", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + PrevBlockHash, + Transactions, + Timestamp, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "prevBlockHash" | "prev_block_hash" => Ok(GeneratedField::PrevBlockHash), + "transactions" => Ok(GeneratedField::Transactions), + "timestamp" => Ok(GeneratedField::Timestamp), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = DoBlockRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.execution.v1alpha1.DoBlockRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut prev_block_hash__ = None; + let mut transactions__ = None; + let mut timestamp__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::PrevBlockHash => { + if prev_block_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("prevBlockHash")); + } + prev_block_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Transactions => { + if transactions__.is_some() { + return Err(serde::de::Error::duplicate_field("transactions")); + } + transactions__ = + Some(map_.next_value::>>()? + .into_iter().map(|x| x.0).collect()) + ; + } + GeneratedField::Timestamp => { + if timestamp__.is_some() { + return Err(serde::de::Error::duplicate_field("timestamp")); + } + timestamp__ = map_.next_value()?; + } + } + } + Ok(DoBlockRequest { + prev_block_hash: prev_block_hash__.unwrap_or_default(), + transactions: transactions__.unwrap_or_default(), + timestamp: timestamp__, + }) + } + } + deserializer.deserialize_struct("astria.execution.v1alpha1.DoBlockRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for DoBlockResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.block_hash.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.execution.v1alpha1.DoBlockResponse", len)?; + if !self.block_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("blockHash", pbjson::private::base64::encode(&self.block_hash).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for DoBlockResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "block_hash", + "blockHash", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + BlockHash, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "blockHash" | "block_hash" => Ok(GeneratedField::BlockHash), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = DoBlockResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.execution.v1alpha1.DoBlockResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut block_hash__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::BlockHash => { + if block_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("blockHash")); + } + block_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + } + } + Ok(DoBlockResponse { + block_hash: block_hash__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.execution.v1alpha1.DoBlockResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for FinalizeBlockRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.block_hash.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.execution.v1alpha1.FinalizeBlockRequest", len)?; + if !self.block_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("blockHash", pbjson::private::base64::encode(&self.block_hash).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for FinalizeBlockRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "block_hash", + "blockHash", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + BlockHash, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "blockHash" | "block_hash" => Ok(GeneratedField::BlockHash), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = FinalizeBlockRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.execution.v1alpha1.FinalizeBlockRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut block_hash__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::BlockHash => { + if block_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("blockHash")); + } + block_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + } + } + Ok(FinalizeBlockRequest { + block_hash: block_hash__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.execution.v1alpha1.FinalizeBlockRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for FinalizeBlockResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let len = 0; + let struct_ser = serializer.serialize_struct("astria.execution.v1alpha1.FinalizeBlockResponse", len)?; + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for FinalizeBlockResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + Err(serde::de::Error::unknown_field(value, FIELDS)) + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = FinalizeBlockResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.execution.v1alpha1.FinalizeBlockResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + while map_.next_key::()?.is_some() { + let _ = map_.next_value::()?; + } + Ok(FinalizeBlockResponse { + }) + } + } + deserializer.deserialize_struct("astria.execution.v1alpha1.FinalizeBlockResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for InitStateRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let len = 0; + let struct_ser = serializer.serialize_struct("astria.execution.v1alpha1.InitStateRequest", len)?; + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for InitStateRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + Err(serde::de::Error::unknown_field(value, FIELDS)) + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = InitStateRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.execution.v1alpha1.InitStateRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + while map_.next_key::()?.is_some() { + let _ = map_.next_value::()?; + } + Ok(InitStateRequest { + }) + } + } + deserializer.deserialize_struct("astria.execution.v1alpha1.InitStateRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for InitStateResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.block_hash.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.execution.v1alpha1.InitStateResponse", len)?; + if !self.block_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("blockHash", pbjson::private::base64::encode(&self.block_hash).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for InitStateResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "block_hash", + "blockHash", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + BlockHash, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "blockHash" | "block_hash" => Ok(GeneratedField::BlockHash), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = InitStateResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.execution.v1alpha1.InitStateResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut block_hash__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::BlockHash => { + if block_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("blockHash")); + } + block_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + } + } + Ok(InitStateResponse { + block_hash: block_hash__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.execution.v1alpha1.InitStateResponse", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria.protocol.accounts.v1alpha1.serde.rs b/crates/astria-core/src/generated/astria.protocol.accounts.v1alpha1.serde.rs new file mode 100644 index 0000000000..06bf3dfbd2 --- /dev/null +++ b/crates/astria-core/src/generated/astria.protocol.accounts.v1alpha1.serde.rs @@ -0,0 +1,332 @@ +impl serde::Serialize for AssetBalance { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.denom.is_empty() { + len += 1; + } + if self.balance.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.accounts.v1alpha1.AssetBalance", len)?; + if !self.denom.is_empty() { + struct_ser.serialize_field("denom", &self.denom)?; + } + if let Some(v) = self.balance.as_ref() { + struct_ser.serialize_field("balance", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for AssetBalance { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "denom", + "balance", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Denom, + Balance, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "denom" => Ok(GeneratedField::Denom), + "balance" => Ok(GeneratedField::Balance), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = AssetBalance; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.accounts.v1alpha1.AssetBalance") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut denom__ = None; + let mut balance__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Denom => { + if denom__.is_some() { + return Err(serde::de::Error::duplicate_field("denom")); + } + denom__ = Some(map_.next_value()?); + } + GeneratedField::Balance => { + if balance__.is_some() { + return Err(serde::de::Error::duplicate_field("balance")); + } + balance__ = map_.next_value()?; + } + } + } + Ok(AssetBalance { + denom: denom__.unwrap_or_default(), + balance: balance__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.accounts.v1alpha1.AssetBalance", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for BalanceResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.height != 0 { + len += 1; + } + if !self.balances.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.accounts.v1alpha1.BalanceResponse", len)?; + if self.height != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; + } + if !self.balances.is_empty() { + struct_ser.serialize_field("balances", &self.balances)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for BalanceResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "height", + "balances", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Height, + Balances, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "height" => Ok(GeneratedField::Height), + "balances" => Ok(GeneratedField::Balances), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = BalanceResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.accounts.v1alpha1.BalanceResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut height__ = None; + let mut balances__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Height => { + if height__.is_some() { + return Err(serde::de::Error::duplicate_field("height")); + } + height__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Balances => { + if balances__.is_some() { + return Err(serde::de::Error::duplicate_field("balances")); + } + balances__ = Some(map_.next_value()?); + } + } + } + Ok(BalanceResponse { + height: height__.unwrap_or_default(), + balances: balances__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.accounts.v1alpha1.BalanceResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for NonceResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.height != 0 { + len += 1; + } + if self.nonce != 0 { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.accounts.v1alpha1.NonceResponse", len)?; + if self.height != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; + } + if self.nonce != 0 { + struct_ser.serialize_field("nonce", &self.nonce)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for NonceResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "height", + "nonce", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Height, + Nonce, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "height" => Ok(GeneratedField::Height), + "nonce" => Ok(GeneratedField::Nonce), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = NonceResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.accounts.v1alpha1.NonceResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut height__ = None; + let mut nonce__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Height => { + if height__.is_some() { + return Err(serde::de::Error::duplicate_field("height")); + } + height__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Nonce => { + if nonce__.is_some() { + return Err(serde::de::Error::duplicate_field("nonce")); + } + nonce__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + } + } + Ok(NonceResponse { + height: height__.unwrap_or_default(), + nonce: nonce__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.accounts.v1alpha1.NonceResponse", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria.protocol.asset.v1alpha1.serde.rs b/crates/astria-core/src/generated/astria.protocol.asset.v1alpha1.serde.rs new file mode 100644 index 0000000000..ed97eb6bae --- /dev/null +++ b/crates/astria-core/src/generated/astria.protocol.asset.v1alpha1.serde.rs @@ -0,0 +1,223 @@ +impl serde::Serialize for AllowedFeeAssetsResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.height != 0 { + len += 1; + } + if !self.fee_assets.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.asset.v1alpha1.AllowedFeeAssetsResponse", len)?; + if self.height != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; + } + if !self.fee_assets.is_empty() { + struct_ser.serialize_field("feeAssets", &self.fee_assets)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for AllowedFeeAssetsResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "height", + "fee_assets", + "feeAssets", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Height, + FeeAssets, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "height" => Ok(GeneratedField::Height), + "feeAssets" | "fee_assets" => Ok(GeneratedField::FeeAssets), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = AllowedFeeAssetsResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.asset.v1alpha1.AllowedFeeAssetsResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut height__ = None; + let mut fee_assets__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Height => { + if height__.is_some() { + return Err(serde::de::Error::duplicate_field("height")); + } + height__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::FeeAssets => { + if fee_assets__.is_some() { + return Err(serde::de::Error::duplicate_field("feeAssets")); + } + fee_assets__ = Some(map_.next_value()?); + } + } + } + Ok(AllowedFeeAssetsResponse { + height: height__.unwrap_or_default(), + fee_assets: fee_assets__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.asset.v1alpha1.AllowedFeeAssetsResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for DenomResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.height != 0 { + len += 1; + } + if !self.denom.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.asset.v1alpha1.DenomResponse", len)?; + if self.height != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; + } + if !self.denom.is_empty() { + struct_ser.serialize_field("denom", &self.denom)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for DenomResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "height", + "denom", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Height, + Denom, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "height" => Ok(GeneratedField::Height), + "denom" => Ok(GeneratedField::Denom), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = DenomResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.asset.v1alpha1.DenomResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut height__ = None; + let mut denom__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Height => { + if height__.is_some() { + return Err(serde::de::Error::duplicate_field("height")); + } + height__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Denom => { + if denom__.is_some() { + return Err(serde::de::Error::duplicate_field("denom")); + } + denom__ = Some(map_.next_value()?); + } + } + } + Ok(DenomResponse { + height: height__.unwrap_or_default(), + denom: denom__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.asset.v1alpha1.DenomResponse", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria.protocol.bridge.v1alpha1.serde.rs b/crates/astria-core/src/generated/astria.protocol.bridge.v1alpha1.serde.rs new file mode 100644 index 0000000000..8fd3b68c1c --- /dev/null +++ b/crates/astria-core/src/generated/astria.protocol.bridge.v1alpha1.serde.rs @@ -0,0 +1,280 @@ +impl serde::Serialize for BridgeAccountInfoResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.height != 0 { + len += 1; + } + if self.rollup_id.is_some() { + len += 1; + } + if self.asset.is_some() { + len += 1; + } + if self.sudo_address.is_some() { + len += 1; + } + if self.withdrawer_address.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.bridge.v1alpha1.BridgeAccountInfoResponse", len)?; + if self.height != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; + } + if let Some(v) = self.rollup_id.as_ref() { + struct_ser.serialize_field("rollupId", v)?; + } + if let Some(v) = self.asset.as_ref() { + struct_ser.serialize_field("asset", v)?; + } + if let Some(v) = self.sudo_address.as_ref() { + struct_ser.serialize_field("sudoAddress", v)?; + } + if let Some(v) = self.withdrawer_address.as_ref() { + struct_ser.serialize_field("withdrawerAddress", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for BridgeAccountInfoResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "height", + "rollup_id", + "rollupId", + "asset", + "sudo_address", + "sudoAddress", + "withdrawer_address", + "withdrawerAddress", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Height, + RollupId, + Asset, + SudoAddress, + WithdrawerAddress, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "height" => Ok(GeneratedField::Height), + "rollupId" | "rollup_id" => Ok(GeneratedField::RollupId), + "asset" => Ok(GeneratedField::Asset), + "sudoAddress" | "sudo_address" => Ok(GeneratedField::SudoAddress), + "withdrawerAddress" | "withdrawer_address" => Ok(GeneratedField::WithdrawerAddress), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = BridgeAccountInfoResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.bridge.v1alpha1.BridgeAccountInfoResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut height__ = None; + let mut rollup_id__ = None; + let mut asset__ = None; + let mut sudo_address__ = None; + let mut withdrawer_address__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Height => { + if height__.is_some() { + return Err(serde::de::Error::duplicate_field("height")); + } + height__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::RollupId => { + if rollup_id__.is_some() { + return Err(serde::de::Error::duplicate_field("rollupId")); + } + rollup_id__ = map_.next_value()?; + } + GeneratedField::Asset => { + if asset__.is_some() { + return Err(serde::de::Error::duplicate_field("asset")); + } + asset__ = map_.next_value()?; + } + GeneratedField::SudoAddress => { + if sudo_address__.is_some() { + return Err(serde::de::Error::duplicate_field("sudoAddress")); + } + sudo_address__ = map_.next_value()?; + } + GeneratedField::WithdrawerAddress => { + if withdrawer_address__.is_some() { + return Err(serde::de::Error::duplicate_field("withdrawerAddress")); + } + withdrawer_address__ = map_.next_value()?; + } + } + } + Ok(BridgeAccountInfoResponse { + height: height__.unwrap_or_default(), + rollup_id: rollup_id__, + asset: asset__, + sudo_address: sudo_address__, + withdrawer_address: withdrawer_address__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.bridge.v1alpha1.BridgeAccountInfoResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for BridgeAccountLastTxHashResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.height != 0 { + len += 1; + } + if self.tx_hash.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.bridge.v1alpha1.BridgeAccountLastTxHashResponse", len)?; + if self.height != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; + } + if let Some(v) = self.tx_hash.as_ref() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("txHash", pbjson::private::base64::encode(&v).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for BridgeAccountLastTxHashResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "height", + "tx_hash", + "txHash", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Height, + TxHash, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "height" => Ok(GeneratedField::Height), + "txHash" | "tx_hash" => Ok(GeneratedField::TxHash), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = BridgeAccountLastTxHashResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.bridge.v1alpha1.BridgeAccountLastTxHashResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut height__ = None; + let mut tx_hash__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Height => { + if height__.is_some() { + return Err(serde::de::Error::duplicate_field("height")); + } + height__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::TxHash => { + if tx_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("txHash")); + } + tx_hash__ = + map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| x.0) + ; + } + } + } + Ok(BridgeAccountLastTxHashResponse { + height: height__.unwrap_or_default(), + tx_hash: tx_hash__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.bridge.v1alpha1.BridgeAccountLastTxHashResponse", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.rs b/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.rs index 3c0f529e9f..f9ad97a170 100644 --- a/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.rs +++ b/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.rs @@ -91,7 +91,9 @@ pub mod action { #[prost(message, tag = "50")] SudoAddressChangeAction(super::SudoAddressChangeAction), #[prost(message, tag = "51")] - ValidatorUpdateAction(::tendermint_proto::abci::ValidatorUpdate), + ValidatorUpdateAction( + crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate, + ), #[prost(message, tag = "52")] IbcRelayerChangeAction(super::IbcRelayerChangeAction), #[prost(message, tag = "53")] diff --git a/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.serde.rs b/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.serde.rs new file mode 100644 index 0000000000..9c89346b65 --- /dev/null +++ b/crates/astria-core/src/generated/astria.protocol.transactions.v1alpha1.serde.rs @@ -0,0 +1,2598 @@ +impl serde::Serialize for Action { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.Action", len)?; + if let Some(v) = self.value.as_ref() { + match v { + action::Value::TransferAction(v) => { + struct_ser.serialize_field("transferAction", v)?; + } + action::Value::SequenceAction(v) => { + struct_ser.serialize_field("sequenceAction", v)?; + } + action::Value::InitBridgeAccountAction(v) => { + struct_ser.serialize_field("initBridgeAccountAction", v)?; + } + action::Value::BridgeLockAction(v) => { + struct_ser.serialize_field("bridgeLockAction", v)?; + } + action::Value::BridgeUnlockAction(v) => { + struct_ser.serialize_field("bridgeUnlockAction", v)?; + } + action::Value::BridgeSudoChangeAction(v) => { + struct_ser.serialize_field("bridgeSudoChangeAction", v)?; + } + action::Value::IbcAction(v) => { + struct_ser.serialize_field("ibcAction", v)?; + } + action::Value::Ics20Withdrawal(v) => { + struct_ser.serialize_field("ics20Withdrawal", v)?; + } + action::Value::SudoAddressChangeAction(v) => { + struct_ser.serialize_field("sudoAddressChangeAction", v)?; + } + action::Value::ValidatorUpdateAction(v) => { + struct_ser.serialize_field("validatorUpdateAction", v)?; + } + action::Value::IbcRelayerChangeAction(v) => { + struct_ser.serialize_field("ibcRelayerChangeAction", v)?; + } + action::Value::FeeAssetChangeAction(v) => { + struct_ser.serialize_field("feeAssetChangeAction", v)?; + } + action::Value::FeeChangeAction(v) => { + struct_ser.serialize_field("feeChangeAction", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for Action { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "transfer_action", + "transferAction", + "sequence_action", + "sequenceAction", + "init_bridge_account_action", + "initBridgeAccountAction", + "bridge_lock_action", + "bridgeLockAction", + "bridge_unlock_action", + "bridgeUnlockAction", + "bridge_sudo_change_action", + "bridgeSudoChangeAction", + "ibc_action", + "ibcAction", + "ics20_withdrawal", + "ics20Withdrawal", + "sudo_address_change_action", + "sudoAddressChangeAction", + "validator_update_action", + "validatorUpdateAction", + "ibc_relayer_change_action", + "ibcRelayerChangeAction", + "fee_asset_change_action", + "feeAssetChangeAction", + "fee_change_action", + "feeChangeAction", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + TransferAction, + SequenceAction, + InitBridgeAccountAction, + BridgeLockAction, + BridgeUnlockAction, + BridgeSudoChangeAction, + IbcAction, + Ics20Withdrawal, + SudoAddressChangeAction, + ValidatorUpdateAction, + IbcRelayerChangeAction, + FeeAssetChangeAction, + FeeChangeAction, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "transferAction" | "transfer_action" => Ok(GeneratedField::TransferAction), + "sequenceAction" | "sequence_action" => Ok(GeneratedField::SequenceAction), + "initBridgeAccountAction" | "init_bridge_account_action" => Ok(GeneratedField::InitBridgeAccountAction), + "bridgeLockAction" | "bridge_lock_action" => Ok(GeneratedField::BridgeLockAction), + "bridgeUnlockAction" | "bridge_unlock_action" => Ok(GeneratedField::BridgeUnlockAction), + "bridgeSudoChangeAction" | "bridge_sudo_change_action" => Ok(GeneratedField::BridgeSudoChangeAction), + "ibcAction" | "ibc_action" => Ok(GeneratedField::IbcAction), + "ics20Withdrawal" | "ics20_withdrawal" => Ok(GeneratedField::Ics20Withdrawal), + "sudoAddressChangeAction" | "sudo_address_change_action" => Ok(GeneratedField::SudoAddressChangeAction), + "validatorUpdateAction" | "validator_update_action" => Ok(GeneratedField::ValidatorUpdateAction), + "ibcRelayerChangeAction" | "ibc_relayer_change_action" => Ok(GeneratedField::IbcRelayerChangeAction), + "feeAssetChangeAction" | "fee_asset_change_action" => Ok(GeneratedField::FeeAssetChangeAction), + "feeChangeAction" | "fee_change_action" => Ok(GeneratedField::FeeChangeAction), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = Action; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.Action") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::TransferAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("transferAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::TransferAction) +; + } + GeneratedField::SequenceAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("sequenceAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::SequenceAction) +; + } + GeneratedField::InitBridgeAccountAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("initBridgeAccountAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::InitBridgeAccountAction) +; + } + GeneratedField::BridgeLockAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("bridgeLockAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::BridgeLockAction) +; + } + GeneratedField::BridgeUnlockAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("bridgeUnlockAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::BridgeUnlockAction) +; + } + GeneratedField::BridgeSudoChangeAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("bridgeSudoChangeAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::BridgeSudoChangeAction) +; + } + GeneratedField::IbcAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("ibcAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::IbcAction) +; + } + GeneratedField::Ics20Withdrawal => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("ics20Withdrawal")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::Ics20Withdrawal) +; + } + GeneratedField::SudoAddressChangeAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("sudoAddressChangeAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::SudoAddressChangeAction) +; + } + GeneratedField::ValidatorUpdateAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("validatorUpdateAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::ValidatorUpdateAction) +; + } + GeneratedField::IbcRelayerChangeAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("ibcRelayerChangeAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::IbcRelayerChangeAction) +; + } + GeneratedField::FeeAssetChangeAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("feeAssetChangeAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::FeeAssetChangeAction) +; + } + GeneratedField::FeeChangeAction => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("feeChangeAction")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(action::Value::FeeChangeAction) +; + } + } + } + Ok(Action { + value: value__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.Action", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for BridgeLockAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.to.is_some() { + len += 1; + } + if self.amount.is_some() { + len += 1; + } + if !self.asset.is_empty() { + len += 1; + } + if !self.fee_asset.is_empty() { + len += 1; + } + if !self.destination_chain_address.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.BridgeLockAction", len)?; + if let Some(v) = self.to.as_ref() { + struct_ser.serialize_field("to", v)?; + } + if let Some(v) = self.amount.as_ref() { + struct_ser.serialize_field("amount", v)?; + } + if !self.asset.is_empty() { + struct_ser.serialize_field("asset", &self.asset)?; + } + if !self.fee_asset.is_empty() { + struct_ser.serialize_field("feeAsset", &self.fee_asset)?; + } + if !self.destination_chain_address.is_empty() { + struct_ser.serialize_field("destinationChainAddress", &self.destination_chain_address)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for BridgeLockAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "to", + "amount", + "asset", + "fee_asset", + "feeAsset", + "destination_chain_address", + "destinationChainAddress", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + To, + Amount, + Asset, + FeeAsset, + DestinationChainAddress, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "to" => Ok(GeneratedField::To), + "amount" => Ok(GeneratedField::Amount), + "asset" => Ok(GeneratedField::Asset), + "feeAsset" | "fee_asset" => Ok(GeneratedField::FeeAsset), + "destinationChainAddress" | "destination_chain_address" => Ok(GeneratedField::DestinationChainAddress), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = BridgeLockAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.BridgeLockAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut to__ = None; + let mut amount__ = None; + let mut asset__ = None; + let mut fee_asset__ = None; + let mut destination_chain_address__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::To => { + if to__.is_some() { + return Err(serde::de::Error::duplicate_field("to")); + } + to__ = map_.next_value()?; + } + GeneratedField::Amount => { + if amount__.is_some() { + return Err(serde::de::Error::duplicate_field("amount")); + } + amount__ = map_.next_value()?; + } + GeneratedField::Asset => { + if asset__.is_some() { + return Err(serde::de::Error::duplicate_field("asset")); + } + asset__ = Some(map_.next_value()?); + } + GeneratedField::FeeAsset => { + if fee_asset__.is_some() { + return Err(serde::de::Error::duplicate_field("feeAsset")); + } + fee_asset__ = Some(map_.next_value()?); + } + GeneratedField::DestinationChainAddress => { + if destination_chain_address__.is_some() { + return Err(serde::de::Error::duplicate_field("destinationChainAddress")); + } + destination_chain_address__ = Some(map_.next_value()?); + } + } + } + Ok(BridgeLockAction { + to: to__, + amount: amount__, + asset: asset__.unwrap_or_default(), + fee_asset: fee_asset__.unwrap_or_default(), + destination_chain_address: destination_chain_address__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.BridgeLockAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for BridgeSudoChangeAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.bridge_address.is_some() { + len += 1; + } + if self.new_sudo_address.is_some() { + len += 1; + } + if self.new_withdrawer_address.is_some() { + len += 1; + } + if !self.fee_asset.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.BridgeSudoChangeAction", len)?; + if let Some(v) = self.bridge_address.as_ref() { + struct_ser.serialize_field("bridgeAddress", v)?; + } + if let Some(v) = self.new_sudo_address.as_ref() { + struct_ser.serialize_field("newSudoAddress", v)?; + } + if let Some(v) = self.new_withdrawer_address.as_ref() { + struct_ser.serialize_field("newWithdrawerAddress", v)?; + } + if !self.fee_asset.is_empty() { + struct_ser.serialize_field("feeAsset", &self.fee_asset)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for BridgeSudoChangeAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "bridge_address", + "bridgeAddress", + "new_sudo_address", + "newSudoAddress", + "new_withdrawer_address", + "newWithdrawerAddress", + "fee_asset", + "feeAsset", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + BridgeAddress, + NewSudoAddress, + NewWithdrawerAddress, + FeeAsset, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "bridgeAddress" | "bridge_address" => Ok(GeneratedField::BridgeAddress), + "newSudoAddress" | "new_sudo_address" => Ok(GeneratedField::NewSudoAddress), + "newWithdrawerAddress" | "new_withdrawer_address" => Ok(GeneratedField::NewWithdrawerAddress), + "feeAsset" | "fee_asset" => Ok(GeneratedField::FeeAsset), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = BridgeSudoChangeAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.BridgeSudoChangeAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut bridge_address__ = None; + let mut new_sudo_address__ = None; + let mut new_withdrawer_address__ = None; + let mut fee_asset__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::BridgeAddress => { + if bridge_address__.is_some() { + return Err(serde::de::Error::duplicate_field("bridgeAddress")); + } + bridge_address__ = map_.next_value()?; + } + GeneratedField::NewSudoAddress => { + if new_sudo_address__.is_some() { + return Err(serde::de::Error::duplicate_field("newSudoAddress")); + } + new_sudo_address__ = map_.next_value()?; + } + GeneratedField::NewWithdrawerAddress => { + if new_withdrawer_address__.is_some() { + return Err(serde::de::Error::duplicate_field("newWithdrawerAddress")); + } + new_withdrawer_address__ = map_.next_value()?; + } + GeneratedField::FeeAsset => { + if fee_asset__.is_some() { + return Err(serde::de::Error::duplicate_field("feeAsset")); + } + fee_asset__ = Some(map_.next_value()?); + } + } + } + Ok(BridgeSudoChangeAction { + bridge_address: bridge_address__, + new_sudo_address: new_sudo_address__, + new_withdrawer_address: new_withdrawer_address__, + fee_asset: fee_asset__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.BridgeSudoChangeAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for BridgeUnlockAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.to.is_some() { + len += 1; + } + if self.amount.is_some() { + len += 1; + } + if !self.fee_asset.is_empty() { + len += 1; + } + if !self.memo.is_empty() { + len += 1; + } + if self.bridge_address.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.BridgeUnlockAction", len)?; + if let Some(v) = self.to.as_ref() { + struct_ser.serialize_field("to", v)?; + } + if let Some(v) = self.amount.as_ref() { + struct_ser.serialize_field("amount", v)?; + } + if !self.fee_asset.is_empty() { + struct_ser.serialize_field("feeAsset", &self.fee_asset)?; + } + if !self.memo.is_empty() { + struct_ser.serialize_field("memo", &self.memo)?; + } + if let Some(v) = self.bridge_address.as_ref() { + struct_ser.serialize_field("bridgeAddress", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for BridgeUnlockAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "to", + "amount", + "fee_asset", + "feeAsset", + "memo", + "bridge_address", + "bridgeAddress", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + To, + Amount, + FeeAsset, + Memo, + BridgeAddress, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "to" => Ok(GeneratedField::To), + "amount" => Ok(GeneratedField::Amount), + "feeAsset" | "fee_asset" => Ok(GeneratedField::FeeAsset), + "memo" => Ok(GeneratedField::Memo), + "bridgeAddress" | "bridge_address" => Ok(GeneratedField::BridgeAddress), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = BridgeUnlockAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.BridgeUnlockAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut to__ = None; + let mut amount__ = None; + let mut fee_asset__ = None; + let mut memo__ = None; + let mut bridge_address__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::To => { + if to__.is_some() { + return Err(serde::de::Error::duplicate_field("to")); + } + to__ = map_.next_value()?; + } + GeneratedField::Amount => { + if amount__.is_some() { + return Err(serde::de::Error::duplicate_field("amount")); + } + amount__ = map_.next_value()?; + } + GeneratedField::FeeAsset => { + if fee_asset__.is_some() { + return Err(serde::de::Error::duplicate_field("feeAsset")); + } + fee_asset__ = Some(map_.next_value()?); + } + GeneratedField::Memo => { + if memo__.is_some() { + return Err(serde::de::Error::duplicate_field("memo")); + } + memo__ = Some(map_.next_value()?); + } + GeneratedField::BridgeAddress => { + if bridge_address__.is_some() { + return Err(serde::de::Error::duplicate_field("bridgeAddress")); + } + bridge_address__ = map_.next_value()?; + } + } + } + Ok(BridgeUnlockAction { + to: to__, + amount: amount__, + fee_asset: fee_asset__.unwrap_or_default(), + memo: memo__.unwrap_or_default(), + bridge_address: bridge_address__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.BridgeUnlockAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for FeeAssetChangeAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.FeeAssetChangeAction", len)?; + if let Some(v) = self.value.as_ref() { + match v { + fee_asset_change_action::Value::Addition(v) => { + struct_ser.serialize_field("addition", v)?; + } + fee_asset_change_action::Value::Removal(v) => { + struct_ser.serialize_field("removal", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for FeeAssetChangeAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "addition", + "removal", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Addition, + Removal, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "addition" => Ok(GeneratedField::Addition), + "removal" => Ok(GeneratedField::Removal), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = FeeAssetChangeAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.FeeAssetChangeAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Addition => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("addition")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(fee_asset_change_action::Value::Addition); + } + GeneratedField::Removal => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("removal")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(fee_asset_change_action::Value::Removal); + } + } + } + Ok(FeeAssetChangeAction { + value: value__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.FeeAssetChangeAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for FeeChangeAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.FeeChangeAction", len)?; + if let Some(v) = self.value.as_ref() { + match v { + fee_change_action::Value::TransferBaseFee(v) => { + struct_ser.serialize_field("transferBaseFee", v)?; + } + fee_change_action::Value::SequenceBaseFee(v) => { + struct_ser.serialize_field("sequenceBaseFee", v)?; + } + fee_change_action::Value::SequenceByteCostMultiplier(v) => { + struct_ser.serialize_field("sequenceByteCostMultiplier", v)?; + } + fee_change_action::Value::InitBridgeAccountBaseFee(v) => { + struct_ser.serialize_field("initBridgeAccountBaseFee", v)?; + } + fee_change_action::Value::BridgeLockByteCostMultiplier(v) => { + struct_ser.serialize_field("bridgeLockByteCostMultiplier", v)?; + } + fee_change_action::Value::BridgeSudoChangeBaseFee(v) => { + struct_ser.serialize_field("bridgeSudoChangeBaseFee", v)?; + } + fee_change_action::Value::Ics20WithdrawalBaseFee(v) => { + struct_ser.serialize_field("ics20WithdrawalBaseFee", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for FeeChangeAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "transfer_base_fee", + "transferBaseFee", + "sequence_base_fee", + "sequenceBaseFee", + "sequence_byte_cost_multiplier", + "sequenceByteCostMultiplier", + "init_bridge_account_base_fee", + "initBridgeAccountBaseFee", + "bridge_lock_byte_cost_multiplier", + "bridgeLockByteCostMultiplier", + "bridge_sudo_change_base_fee", + "bridgeSudoChangeBaseFee", + "ics20_withdrawal_base_fee", + "ics20WithdrawalBaseFee", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + TransferBaseFee, + SequenceBaseFee, + SequenceByteCostMultiplier, + InitBridgeAccountBaseFee, + BridgeLockByteCostMultiplier, + BridgeSudoChangeBaseFee, + Ics20WithdrawalBaseFee, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "transferBaseFee" | "transfer_base_fee" => Ok(GeneratedField::TransferBaseFee), + "sequenceBaseFee" | "sequence_base_fee" => Ok(GeneratedField::SequenceBaseFee), + "sequenceByteCostMultiplier" | "sequence_byte_cost_multiplier" => Ok(GeneratedField::SequenceByteCostMultiplier), + "initBridgeAccountBaseFee" | "init_bridge_account_base_fee" => Ok(GeneratedField::InitBridgeAccountBaseFee), + "bridgeLockByteCostMultiplier" | "bridge_lock_byte_cost_multiplier" => Ok(GeneratedField::BridgeLockByteCostMultiplier), + "bridgeSudoChangeBaseFee" | "bridge_sudo_change_base_fee" => Ok(GeneratedField::BridgeSudoChangeBaseFee), + "ics20WithdrawalBaseFee" | "ics20_withdrawal_base_fee" => Ok(GeneratedField::Ics20WithdrawalBaseFee), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = FeeChangeAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.FeeChangeAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::TransferBaseFee => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("transferBaseFee")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(fee_change_action::Value::TransferBaseFee) +; + } + GeneratedField::SequenceBaseFee => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("sequenceBaseFee")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(fee_change_action::Value::SequenceBaseFee) +; + } + GeneratedField::SequenceByteCostMultiplier => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("sequenceByteCostMultiplier")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(fee_change_action::Value::SequenceByteCostMultiplier) +; + } + GeneratedField::InitBridgeAccountBaseFee => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("initBridgeAccountBaseFee")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(fee_change_action::Value::InitBridgeAccountBaseFee) +; + } + GeneratedField::BridgeLockByteCostMultiplier => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("bridgeLockByteCostMultiplier")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(fee_change_action::Value::BridgeLockByteCostMultiplier) +; + } + GeneratedField::BridgeSudoChangeBaseFee => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("bridgeSudoChangeBaseFee")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(fee_change_action::Value::BridgeSudoChangeBaseFee) +; + } + GeneratedField::Ics20WithdrawalBaseFee => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("ics20WithdrawalBaseFee")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(fee_change_action::Value::Ics20WithdrawalBaseFee) +; + } + } + } + Ok(FeeChangeAction { + value: value__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.FeeChangeAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for IbcHeight { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.revision_number != 0 { + len += 1; + } + if self.revision_height != 0 { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.IbcHeight", len)?; + if self.revision_number != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("revisionNumber", ToString::to_string(&self.revision_number).as_str())?; + } + if self.revision_height != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("revisionHeight", ToString::to_string(&self.revision_height).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for IbcHeight { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "revision_number", + "revisionNumber", + "revision_height", + "revisionHeight", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + RevisionNumber, + RevisionHeight, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "revisionNumber" | "revision_number" => Ok(GeneratedField::RevisionNumber), + "revisionHeight" | "revision_height" => Ok(GeneratedField::RevisionHeight), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = IbcHeight; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.IbcHeight") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut revision_number__ = None; + let mut revision_height__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::RevisionNumber => { + if revision_number__.is_some() { + return Err(serde::de::Error::duplicate_field("revisionNumber")); + } + revision_number__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::RevisionHeight => { + if revision_height__.is_some() { + return Err(serde::de::Error::duplicate_field("revisionHeight")); + } + revision_height__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + } + } + Ok(IbcHeight { + revision_number: revision_number__.unwrap_or_default(), + revision_height: revision_height__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.IbcHeight", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for IbcRelayerChangeAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.IbcRelayerChangeAction", len)?; + if let Some(v) = self.value.as_ref() { + match v { + ibc_relayer_change_action::Value::Addition(v) => { + struct_ser.serialize_field("addition", v)?; + } + ibc_relayer_change_action::Value::Removal(v) => { + struct_ser.serialize_field("removal", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for IbcRelayerChangeAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "addition", + "removal", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Addition, + Removal, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "addition" => Ok(GeneratedField::Addition), + "removal" => Ok(GeneratedField::Removal), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = IbcRelayerChangeAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.IbcRelayerChangeAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Addition => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("addition")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(ibc_relayer_change_action::Value::Addition) +; + } + GeneratedField::Removal => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("removal")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(ibc_relayer_change_action::Value::Removal) +; + } + } + } + Ok(IbcRelayerChangeAction { + value: value__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.IbcRelayerChangeAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for Ics20Withdrawal { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.amount.is_some() { + len += 1; + } + if !self.denom.is_empty() { + len += 1; + } + if !self.destination_chain_address.is_empty() { + len += 1; + } + if self.return_address.is_some() { + len += 1; + } + if self.timeout_height.is_some() { + len += 1; + } + if self.timeout_time != 0 { + len += 1; + } + if !self.source_channel.is_empty() { + len += 1; + } + if !self.fee_asset.is_empty() { + len += 1; + } + if !self.memo.is_empty() { + len += 1; + } + if self.bridge_address.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.Ics20Withdrawal", len)?; + if let Some(v) = self.amount.as_ref() { + struct_ser.serialize_field("amount", v)?; + } + if !self.denom.is_empty() { + struct_ser.serialize_field("denom", &self.denom)?; + } + if !self.destination_chain_address.is_empty() { + struct_ser.serialize_field("destinationChainAddress", &self.destination_chain_address)?; + } + if let Some(v) = self.return_address.as_ref() { + struct_ser.serialize_field("returnAddress", v)?; + } + if let Some(v) = self.timeout_height.as_ref() { + struct_ser.serialize_field("timeoutHeight", v)?; + } + if self.timeout_time != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("timeoutTime", ToString::to_string(&self.timeout_time).as_str())?; + } + if !self.source_channel.is_empty() { + struct_ser.serialize_field("sourceChannel", &self.source_channel)?; + } + if !self.fee_asset.is_empty() { + struct_ser.serialize_field("feeAsset", &self.fee_asset)?; + } + if !self.memo.is_empty() { + struct_ser.serialize_field("memo", &self.memo)?; + } + if let Some(v) = self.bridge_address.as_ref() { + struct_ser.serialize_field("bridgeAddress", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for Ics20Withdrawal { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "amount", + "denom", + "destination_chain_address", + "destinationChainAddress", + "return_address", + "returnAddress", + "timeout_height", + "timeoutHeight", + "timeout_time", + "timeoutTime", + "source_channel", + "sourceChannel", + "fee_asset", + "feeAsset", + "memo", + "bridge_address", + "bridgeAddress", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Amount, + Denom, + DestinationChainAddress, + ReturnAddress, + TimeoutHeight, + TimeoutTime, + SourceChannel, + FeeAsset, + Memo, + BridgeAddress, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "amount" => Ok(GeneratedField::Amount), + "denom" => Ok(GeneratedField::Denom), + "destinationChainAddress" | "destination_chain_address" => Ok(GeneratedField::DestinationChainAddress), + "returnAddress" | "return_address" => Ok(GeneratedField::ReturnAddress), + "timeoutHeight" | "timeout_height" => Ok(GeneratedField::TimeoutHeight), + "timeoutTime" | "timeout_time" => Ok(GeneratedField::TimeoutTime), + "sourceChannel" | "source_channel" => Ok(GeneratedField::SourceChannel), + "feeAsset" | "fee_asset" => Ok(GeneratedField::FeeAsset), + "memo" => Ok(GeneratedField::Memo), + "bridgeAddress" | "bridge_address" => Ok(GeneratedField::BridgeAddress), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = Ics20Withdrawal; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.Ics20Withdrawal") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut amount__ = None; + let mut denom__ = None; + let mut destination_chain_address__ = None; + let mut return_address__ = None; + let mut timeout_height__ = None; + let mut timeout_time__ = None; + let mut source_channel__ = None; + let mut fee_asset__ = None; + let mut memo__ = None; + let mut bridge_address__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Amount => { + if amount__.is_some() { + return Err(serde::de::Error::duplicate_field("amount")); + } + amount__ = map_.next_value()?; + } + GeneratedField::Denom => { + if denom__.is_some() { + return Err(serde::de::Error::duplicate_field("denom")); + } + denom__ = Some(map_.next_value()?); + } + GeneratedField::DestinationChainAddress => { + if destination_chain_address__.is_some() { + return Err(serde::de::Error::duplicate_field("destinationChainAddress")); + } + destination_chain_address__ = Some(map_.next_value()?); + } + GeneratedField::ReturnAddress => { + if return_address__.is_some() { + return Err(serde::de::Error::duplicate_field("returnAddress")); + } + return_address__ = map_.next_value()?; + } + GeneratedField::TimeoutHeight => { + if timeout_height__.is_some() { + return Err(serde::de::Error::duplicate_field("timeoutHeight")); + } + timeout_height__ = map_.next_value()?; + } + GeneratedField::TimeoutTime => { + if timeout_time__.is_some() { + return Err(serde::de::Error::duplicate_field("timeoutTime")); + } + timeout_time__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::SourceChannel => { + if source_channel__.is_some() { + return Err(serde::de::Error::duplicate_field("sourceChannel")); + } + source_channel__ = Some(map_.next_value()?); + } + GeneratedField::FeeAsset => { + if fee_asset__.is_some() { + return Err(serde::de::Error::duplicate_field("feeAsset")); + } + fee_asset__ = Some(map_.next_value()?); + } + GeneratedField::Memo => { + if memo__.is_some() { + return Err(serde::de::Error::duplicate_field("memo")); + } + memo__ = Some(map_.next_value()?); + } + GeneratedField::BridgeAddress => { + if bridge_address__.is_some() { + return Err(serde::de::Error::duplicate_field("bridgeAddress")); + } + bridge_address__ = map_.next_value()?; + } + } + } + Ok(Ics20Withdrawal { + amount: amount__, + denom: denom__.unwrap_or_default(), + destination_chain_address: destination_chain_address__.unwrap_or_default(), + return_address: return_address__, + timeout_height: timeout_height__, + timeout_time: timeout_time__.unwrap_or_default(), + source_channel: source_channel__.unwrap_or_default(), + fee_asset: fee_asset__.unwrap_or_default(), + memo: memo__.unwrap_or_default(), + bridge_address: bridge_address__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.Ics20Withdrawal", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for InitBridgeAccountAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.rollup_id.is_some() { + len += 1; + } + if !self.asset.is_empty() { + len += 1; + } + if !self.fee_asset.is_empty() { + len += 1; + } + if self.sudo_address.is_some() { + len += 1; + } + if self.withdrawer_address.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.InitBridgeAccountAction", len)?; + if let Some(v) = self.rollup_id.as_ref() { + struct_ser.serialize_field("rollupId", v)?; + } + if !self.asset.is_empty() { + struct_ser.serialize_field("asset", &self.asset)?; + } + if !self.fee_asset.is_empty() { + struct_ser.serialize_field("feeAsset", &self.fee_asset)?; + } + if let Some(v) = self.sudo_address.as_ref() { + struct_ser.serialize_field("sudoAddress", v)?; + } + if let Some(v) = self.withdrawer_address.as_ref() { + struct_ser.serialize_field("withdrawerAddress", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for InitBridgeAccountAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "rollup_id", + "rollupId", + "asset", + "fee_asset", + "feeAsset", + "sudo_address", + "sudoAddress", + "withdrawer_address", + "withdrawerAddress", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + RollupId, + Asset, + FeeAsset, + SudoAddress, + WithdrawerAddress, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "rollupId" | "rollup_id" => Ok(GeneratedField::RollupId), + "asset" => Ok(GeneratedField::Asset), + "feeAsset" | "fee_asset" => Ok(GeneratedField::FeeAsset), + "sudoAddress" | "sudo_address" => Ok(GeneratedField::SudoAddress), + "withdrawerAddress" | "withdrawer_address" => Ok(GeneratedField::WithdrawerAddress), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = InitBridgeAccountAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.InitBridgeAccountAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut rollup_id__ = None; + let mut asset__ = None; + let mut fee_asset__ = None; + let mut sudo_address__ = None; + let mut withdrawer_address__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::RollupId => { + if rollup_id__.is_some() { + return Err(serde::de::Error::duplicate_field("rollupId")); + } + rollup_id__ = map_.next_value()?; + } + GeneratedField::Asset => { + if asset__.is_some() { + return Err(serde::de::Error::duplicate_field("asset")); + } + asset__ = Some(map_.next_value()?); + } + GeneratedField::FeeAsset => { + if fee_asset__.is_some() { + return Err(serde::de::Error::duplicate_field("feeAsset")); + } + fee_asset__ = Some(map_.next_value()?); + } + GeneratedField::SudoAddress => { + if sudo_address__.is_some() { + return Err(serde::de::Error::duplicate_field("sudoAddress")); + } + sudo_address__ = map_.next_value()?; + } + GeneratedField::WithdrawerAddress => { + if withdrawer_address__.is_some() { + return Err(serde::de::Error::duplicate_field("withdrawerAddress")); + } + withdrawer_address__ = map_.next_value()?; + } + } + } + Ok(InitBridgeAccountAction { + rollup_id: rollup_id__, + asset: asset__.unwrap_or_default(), + fee_asset: fee_asset__.unwrap_or_default(), + sudo_address: sudo_address__, + withdrawer_address: withdrawer_address__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.InitBridgeAccountAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SequenceAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.rollup_id.is_some() { + len += 1; + } + if !self.data.is_empty() { + len += 1; + } + if !self.fee_asset.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.SequenceAction", len)?; + if let Some(v) = self.rollup_id.as_ref() { + struct_ser.serialize_field("rollupId", v)?; + } + if !self.data.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("data", pbjson::private::base64::encode(&self.data).as_str())?; + } + if !self.fee_asset.is_empty() { + struct_ser.serialize_field("feeAsset", &self.fee_asset)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SequenceAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "rollup_id", + "rollupId", + "data", + "fee_asset", + "feeAsset", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + RollupId, + Data, + FeeAsset, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "rollupId" | "rollup_id" => Ok(GeneratedField::RollupId), + "data" => Ok(GeneratedField::Data), + "feeAsset" | "fee_asset" => Ok(GeneratedField::FeeAsset), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SequenceAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.SequenceAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut rollup_id__ = None; + let mut data__ = None; + let mut fee_asset__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::RollupId => { + if rollup_id__.is_some() { + return Err(serde::de::Error::duplicate_field("rollupId")); + } + rollup_id__ = map_.next_value()?; + } + GeneratedField::Data => { + if data__.is_some() { + return Err(serde::de::Error::duplicate_field("data")); + } + data__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::FeeAsset => { + if fee_asset__.is_some() { + return Err(serde::de::Error::duplicate_field("feeAsset")); + } + fee_asset__ = Some(map_.next_value()?); + } + } + } + Ok(SequenceAction { + rollup_id: rollup_id__, + data: data__.unwrap_or_default(), + fee_asset: fee_asset__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.SequenceAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SignedTransaction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.signature.is_empty() { + len += 1; + } + if !self.public_key.is_empty() { + len += 1; + } + if self.transaction.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.SignedTransaction", len)?; + if !self.signature.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("signature", pbjson::private::base64::encode(&self.signature).as_str())?; + } + if !self.public_key.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("publicKey", pbjson::private::base64::encode(&self.public_key).as_str())?; + } + if let Some(v) = self.transaction.as_ref() { + struct_ser.serialize_field("transaction", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SignedTransaction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "signature", + "public_key", + "publicKey", + "transaction", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Signature, + PublicKey, + Transaction, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "signature" => Ok(GeneratedField::Signature), + "publicKey" | "public_key" => Ok(GeneratedField::PublicKey), + "transaction" => Ok(GeneratedField::Transaction), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SignedTransaction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.SignedTransaction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut signature__ = None; + let mut public_key__ = None; + let mut transaction__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Signature => { + if signature__.is_some() { + return Err(serde::de::Error::duplicate_field("signature")); + } + signature__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::PublicKey => { + if public_key__.is_some() { + return Err(serde::de::Error::duplicate_field("publicKey")); + } + public_key__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Transaction => { + if transaction__.is_some() { + return Err(serde::de::Error::duplicate_field("transaction")); + } + transaction__ = map_.next_value()?; + } + } + } + Ok(SignedTransaction { + signature: signature__.unwrap_or_default(), + public_key: public_key__.unwrap_or_default(), + transaction: transaction__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.SignedTransaction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SudoAddressChangeAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.new_address.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.SudoAddressChangeAction", len)?; + if let Some(v) = self.new_address.as_ref() { + struct_ser.serialize_field("newAddress", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SudoAddressChangeAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "new_address", + "newAddress", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + NewAddress, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "newAddress" | "new_address" => Ok(GeneratedField::NewAddress), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SudoAddressChangeAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.SudoAddressChangeAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut new_address__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::NewAddress => { + if new_address__.is_some() { + return Err(serde::de::Error::duplicate_field("newAddress")); + } + new_address__ = map_.next_value()?; + } + } + } + Ok(SudoAddressChangeAction { + new_address: new_address__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.SudoAddressChangeAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for TransactionFee { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.asset.is_empty() { + len += 1; + } + if self.fee.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.TransactionFee", len)?; + if !self.asset.is_empty() { + struct_ser.serialize_field("asset", &self.asset)?; + } + if let Some(v) = self.fee.as_ref() { + struct_ser.serialize_field("fee", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TransactionFee { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "asset", + "fee", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Asset, + Fee, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "asset" => Ok(GeneratedField::Asset), + "fee" => Ok(GeneratedField::Fee), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TransactionFee; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.TransactionFee") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut asset__ = None; + let mut fee__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Asset => { + if asset__.is_some() { + return Err(serde::de::Error::duplicate_field("asset")); + } + asset__ = Some(map_.next_value()?); + } + GeneratedField::Fee => { + if fee__.is_some() { + return Err(serde::de::Error::duplicate_field("fee")); + } + fee__ = map_.next_value()?; + } + } + } + Ok(TransactionFee { + asset: asset__.unwrap_or_default(), + fee: fee__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.TransactionFee", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for TransactionFeeResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.height != 0 { + len += 1; + } + if !self.fees.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.TransactionFeeResponse", len)?; + if self.height != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; + } + if !self.fees.is_empty() { + struct_ser.serialize_field("fees", &self.fees)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TransactionFeeResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "height", + "fees", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Height, + Fees, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "height" => Ok(GeneratedField::Height), + "fees" => Ok(GeneratedField::Fees), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TransactionFeeResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.TransactionFeeResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut height__ = None; + let mut fees__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Height => { + if height__.is_some() { + return Err(serde::de::Error::duplicate_field("height")); + } + height__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Fees => { + if fees__.is_some() { + return Err(serde::de::Error::duplicate_field("fees")); + } + fees__ = Some(map_.next_value()?); + } + } + } + Ok(TransactionFeeResponse { + height: height__.unwrap_or_default(), + fees: fees__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.TransactionFeeResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for TransactionParams { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.nonce != 0 { + len += 1; + } + if !self.chain_id.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.TransactionParams", len)?; + if self.nonce != 0 { + struct_ser.serialize_field("nonce", &self.nonce)?; + } + if !self.chain_id.is_empty() { + struct_ser.serialize_field("chainId", &self.chain_id)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TransactionParams { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "nonce", + "chain_id", + "chainId", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Nonce, + ChainId, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "nonce" => Ok(GeneratedField::Nonce), + "chainId" | "chain_id" => Ok(GeneratedField::ChainId), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TransactionParams; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.TransactionParams") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut nonce__ = None; + let mut chain_id__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Nonce => { + if nonce__.is_some() { + return Err(serde::de::Error::duplicate_field("nonce")); + } + nonce__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::ChainId => { + if chain_id__.is_some() { + return Err(serde::de::Error::duplicate_field("chainId")); + } + chain_id__ = Some(map_.next_value()?); + } + } + } + Ok(TransactionParams { + nonce: nonce__.unwrap_or_default(), + chain_id: chain_id__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.TransactionParams", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for TransferAction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.to.is_some() { + len += 1; + } + if self.amount.is_some() { + len += 1; + } + if !self.asset.is_empty() { + len += 1; + } + if !self.fee_asset.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.TransferAction", len)?; + if let Some(v) = self.to.as_ref() { + struct_ser.serialize_field("to", v)?; + } + if let Some(v) = self.amount.as_ref() { + struct_ser.serialize_field("amount", v)?; + } + if !self.asset.is_empty() { + struct_ser.serialize_field("asset", &self.asset)?; + } + if !self.fee_asset.is_empty() { + struct_ser.serialize_field("feeAsset", &self.fee_asset)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TransferAction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "to", + "amount", + "asset", + "fee_asset", + "feeAsset", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + To, + Amount, + Asset, + FeeAsset, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "to" => Ok(GeneratedField::To), + "amount" => Ok(GeneratedField::Amount), + "asset" => Ok(GeneratedField::Asset), + "feeAsset" | "fee_asset" => Ok(GeneratedField::FeeAsset), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TransferAction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.TransferAction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut to__ = None; + let mut amount__ = None; + let mut asset__ = None; + let mut fee_asset__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::To => { + if to__.is_some() { + return Err(serde::de::Error::duplicate_field("to")); + } + to__ = map_.next_value()?; + } + GeneratedField::Amount => { + if amount__.is_some() { + return Err(serde::de::Error::duplicate_field("amount")); + } + amount__ = map_.next_value()?; + } + GeneratedField::Asset => { + if asset__.is_some() { + return Err(serde::de::Error::duplicate_field("asset")); + } + asset__ = Some(map_.next_value()?); + } + GeneratedField::FeeAsset => { + if fee_asset__.is_some() { + return Err(serde::de::Error::duplicate_field("feeAsset")); + } + fee_asset__ = Some(map_.next_value()?); + } + } + } + Ok(TransferAction { + to: to__, + amount: amount__, + asset: asset__.unwrap_or_default(), + fee_asset: fee_asset__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.TransferAction", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for UnsignedTransaction { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.actions.is_empty() { + len += 1; + } + if self.params.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria.protocol.transactions.v1alpha1.UnsignedTransaction", len)?; + if !self.actions.is_empty() { + struct_ser.serialize_field("actions", &self.actions)?; + } + if let Some(v) = self.params.as_ref() { + struct_ser.serialize_field("params", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for UnsignedTransaction { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "actions", + "params", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Actions, + Params, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "actions" => Ok(GeneratedField::Actions), + "params" => Ok(GeneratedField::Params), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = UnsignedTransaction; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria.protocol.transactions.v1alpha1.UnsignedTransaction") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut actions__ = None; + let mut params__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Actions => { + if actions__.is_some() { + return Err(serde::de::Error::duplicate_field("actions")); + } + actions__ = Some(map_.next_value()?); + } + GeneratedField::Params => { + if params__.is_some() { + return Err(serde::de::Error::duplicate_field("params")); + } + params__ = map_.next_value()?; + } + } + } + Ok(UnsignedTransaction { + actions: actions__.unwrap_or_default(), + params: params__, + }) + } + } + deserializer.deserialize_struct("astria.protocol.transactions.v1alpha1.UnsignedTransaction", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria_vendored.penumbra.core.component.ibc.v1.serde.rs b/crates/astria-core/src/generated/astria_vendored.penumbra.core.component.ibc.v1.serde.rs new file mode 100644 index 0000000000..7bb062d615 --- /dev/null +++ b/crates/astria-core/src/generated/astria_vendored.penumbra.core.component.ibc.v1.serde.rs @@ -0,0 +1,92 @@ +impl serde::Serialize for IbcRelay { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.raw_action.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria_vendored.penumbra.core.component.ibc.v1.IbcRelay", len)?; + if let Some(v) = self.raw_action.as_ref() { + struct_ser.serialize_field("rawAction", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for IbcRelay { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "raw_action", + "rawAction", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + RawAction, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "rawAction" | "raw_action" => Ok(GeneratedField::RawAction), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = IbcRelay; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria_vendored.penumbra.core.component.ibc.v1.IbcRelay") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut raw_action__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::RawAction => { + if raw_action__.is_some() { + return Err(serde::de::Error::duplicate_field("rawAction")); + } + raw_action__ = map_.next_value()?; + } + } + } + Ok(IbcRelay { + raw_action: raw_action__, + }) + } + } + deserializer.deserialize_struct("astria_vendored.penumbra.core.component.ibc.v1.IbcRelay", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria_vendored.tendermint.abci.rs b/crates/astria-core/src/generated/astria_vendored.tendermint.abci.rs new file mode 100644 index 0000000000..7f31da1e6c --- /dev/null +++ b/crates/astria-core/src/generated/astria_vendored.tendermint.abci.rs @@ -0,0 +1,15 @@ +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ValidatorUpdate { + #[prost(message, optional, tag = "1")] + pub pub_key: ::core::option::Option, + #[prost(int64, tag = "2")] + pub power: i64, +} +impl ::prost::Name for ValidatorUpdate { + const NAME: &'static str = "ValidatorUpdate"; + const PACKAGE: &'static str = "astria_vendored.tendermint.abci"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("astria_vendored.tendermint.abci.{}", Self::NAME) + } +} diff --git a/crates/astria-core/src/generated/astria_vendored.tendermint.abci.serde.rs b/crates/astria-core/src/generated/astria_vendored.tendermint.abci.serde.rs new file mode 100644 index 0000000000..d61ad419fa --- /dev/null +++ b/crates/astria-core/src/generated/astria_vendored.tendermint.abci.serde.rs @@ -0,0 +1,112 @@ +impl serde::Serialize for ValidatorUpdate { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.pub_key.is_some() { + len += 1; + } + if self.power != 0 { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria_vendored.tendermint.abci.ValidatorUpdate", len)?; + if let Some(v) = self.pub_key.as_ref() { + struct_ser.serialize_field("pubKey", v)?; + } + if self.power != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("power", ToString::to_string(&self.power).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for ValidatorUpdate { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "pub_key", + "pubKey", + "power", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + PubKey, + Power, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "pubKey" | "pub_key" => Ok(GeneratedField::PubKey), + "power" => Ok(GeneratedField::Power), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = ValidatorUpdate; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria_vendored.tendermint.abci.ValidatorUpdate") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut pub_key__ = None; + let mut power__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::PubKey => { + if pub_key__.is_some() { + return Err(serde::de::Error::duplicate_field("pubKey")); + } + pub_key__ = map_.next_value()?; + } + GeneratedField::Power => { + if power__.is_some() { + return Err(serde::de::Error::duplicate_field("power")); + } + power__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + } + } + Ok(ValidatorUpdate { + pub_key: pub_key__, + power: power__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria_vendored.tendermint.abci.ValidatorUpdate", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria_vendored.tendermint.crypto.rs b/crates/astria-core/src/generated/astria_vendored.tendermint.crypto.rs new file mode 100644 index 0000000000..c80f97a22c --- /dev/null +++ b/crates/astria-core/src/generated/astria_vendored.tendermint.crypto.rs @@ -0,0 +1,24 @@ +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PublicKey { + #[prost(oneof = "public_key::Sum", tags = "1, 2")] + pub sum: ::core::option::Option, +} +/// Nested message and enum types in `PublicKey`. +pub mod public_key { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Sum { + #[prost(bytes, tag = "1")] + Ed25519(::prost::alloc::vec::Vec), + #[prost(bytes, tag = "2")] + Secp256k1(::prost::alloc::vec::Vec), + } +} +impl ::prost::Name for PublicKey { + const NAME: &'static str = "PublicKey"; + const PACKAGE: &'static str = "astria_vendored.tendermint.crypto"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("astria_vendored.tendermint.crypto.{}", Self::NAME) + } +} diff --git a/crates/astria-core/src/generated/astria_vendored.tendermint.crypto.serde.rs b/crates/astria-core/src/generated/astria_vendored.tendermint.crypto.serde.rs new file mode 100644 index 0000000000..0b3d669249 --- /dev/null +++ b/crates/astria-core/src/generated/astria_vendored.tendermint.crypto.serde.rs @@ -0,0 +1,109 @@ +impl serde::Serialize for PublicKey { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.sum.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria_vendored.tendermint.crypto.PublicKey", len)?; + if let Some(v) = self.sum.as_ref() { + match v { + public_key::Sum::Ed25519(v) => { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("ed25519", pbjson::private::base64::encode(&v).as_str())?; + } + public_key::Sum::Secp256k1(v) => { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("secp256k1", pbjson::private::base64::encode(&v).as_str())?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PublicKey { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "ed25519", + "secp256k1", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Ed25519, + Secp256k1, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "ed25519" => Ok(GeneratedField::Ed25519), + "secp256k1" => Ok(GeneratedField::Secp256k1), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PublicKey; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria_vendored.tendermint.crypto.PublicKey") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut sum__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Ed25519 => { + if sum__.is_some() { + return Err(serde::de::Error::duplicate_field("ed25519")); + } + sum__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| public_key::Sum::Ed25519(x.0)); + } + GeneratedField::Secp256k1 => { + if sum__.is_some() { + return Err(serde::de::Error::duplicate_field("secp256k1")); + } + sum__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| public_key::Sum::Secp256k1(x.0)); + } + } + } + Ok(PublicKey { + sum: sum__, + }) + } + } + deserializer.deserialize_struct("astria_vendored.tendermint.crypto.PublicKey", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria_vendored.tendermint.types.rs b/crates/astria-core/src/generated/astria_vendored.tendermint.types.rs new file mode 100644 index 0000000000..3824a9824d --- /dev/null +++ b/crates/astria-core/src/generated/astria_vendored.tendermint.types.rs @@ -0,0 +1,74 @@ +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlockId { + #[prost(bytes = "vec", tag = "1")] + pub hash: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "2")] + pub part_set_header: ::core::option::Option, +} +impl ::prost::Name for BlockId { + const NAME: &'static str = "BlockID"; + const PACKAGE: &'static str = "astria_vendored.tendermint.types"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("astria_vendored.tendermint.types.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PartSetHeader { + #[prost(uint32, tag = "1")] + pub total: u32, + #[prost(bytes = "vec", tag = "2")] + pub hash: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for PartSetHeader { + const NAME: &'static str = "PartSetHeader"; + const PACKAGE: &'static str = "astria_vendored.tendermint.types"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("astria_vendored.tendermint.types.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Header { + /// basic block info + #[prost(message, optional, tag = "1")] + pub version: ::core::option::Option, + #[prost(string, tag = "2")] + pub chain_id: ::prost::alloc::string::String, + #[prost(int64, tag = "3")] + pub height: i64, + #[prost(message, optional, tag = "4")] + pub time: ::core::option::Option<::pbjson_types::Timestamp>, + /// prev block info + #[prost(message, optional, tag = "5")] + pub last_block_id: ::core::option::Option, + /// hashes of block data + #[prost(bytes = "vec", tag = "6")] + pub last_commit_hash: ::prost::alloc::vec::Vec, + #[prost(bytes = "vec", tag = "7")] + pub data_hash: ::prost::alloc::vec::Vec, + /// hashes from the app output from the prev block + #[prost(bytes = "vec", tag = "8")] + pub validators_hash: ::prost::alloc::vec::Vec, + #[prost(bytes = "vec", tag = "9")] + pub next_validators_hash: ::prost::alloc::vec::Vec, + #[prost(bytes = "vec", tag = "10")] + pub consensus_hash: ::prost::alloc::vec::Vec, + #[prost(bytes = "vec", tag = "11")] + pub app_hash: ::prost::alloc::vec::Vec, + #[prost(bytes = "vec", tag = "12")] + pub last_results_hash: ::prost::alloc::vec::Vec, + /// consensus info + #[prost(bytes = "vec", tag = "13")] + pub evidence_hash: ::prost::alloc::vec::Vec, + #[prost(bytes = "vec", tag = "14")] + pub proposer_address: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for Header { + const NAME: &'static str = "Header"; + const PACKAGE: &'static str = "astria_vendored.tendermint.types"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("astria_vendored.tendermint.types.{}", Self::NAME) + } +} diff --git a/crates/astria-core/src/generated/astria_vendored.tendermint.types.serde.rs b/crates/astria-core/src/generated/astria_vendored.tendermint.types.serde.rs new file mode 100644 index 0000000000..316c2efc06 --- /dev/null +++ b/crates/astria-core/src/generated/astria_vendored.tendermint.types.serde.rs @@ -0,0 +1,578 @@ +impl serde::Serialize for BlockId { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.hash.is_empty() { + len += 1; + } + if self.part_set_header.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria_vendored.tendermint.types.BlockID", len)?; + if !self.hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("hash", pbjson::private::base64::encode(&self.hash).as_str())?; + } + if let Some(v) = self.part_set_header.as_ref() { + struct_ser.serialize_field("partSetHeader", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for BlockId { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "hash", + "part_set_header", + "partSetHeader", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Hash, + PartSetHeader, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "hash" => Ok(GeneratedField::Hash), + "partSetHeader" | "part_set_header" => Ok(GeneratedField::PartSetHeader), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = BlockId; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria_vendored.tendermint.types.BlockID") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut hash__ = None; + let mut part_set_header__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Hash => { + if hash__.is_some() { + return Err(serde::de::Error::duplicate_field("hash")); + } + hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::PartSetHeader => { + if part_set_header__.is_some() { + return Err(serde::de::Error::duplicate_field("partSetHeader")); + } + part_set_header__ = map_.next_value()?; + } + } + } + Ok(BlockId { + hash: hash__.unwrap_or_default(), + part_set_header: part_set_header__, + }) + } + } + deserializer.deserialize_struct("astria_vendored.tendermint.types.BlockID", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for Header { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.version.is_some() { + len += 1; + } + if !self.chain_id.is_empty() { + len += 1; + } + if self.height != 0 { + len += 1; + } + if self.time.is_some() { + len += 1; + } + if self.last_block_id.is_some() { + len += 1; + } + if !self.last_commit_hash.is_empty() { + len += 1; + } + if !self.data_hash.is_empty() { + len += 1; + } + if !self.validators_hash.is_empty() { + len += 1; + } + if !self.next_validators_hash.is_empty() { + len += 1; + } + if !self.consensus_hash.is_empty() { + len += 1; + } + if !self.app_hash.is_empty() { + len += 1; + } + if !self.last_results_hash.is_empty() { + len += 1; + } + if !self.evidence_hash.is_empty() { + len += 1; + } + if !self.proposer_address.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria_vendored.tendermint.types.Header", len)?; + if let Some(v) = self.version.as_ref() { + struct_ser.serialize_field("version", v)?; + } + if !self.chain_id.is_empty() { + struct_ser.serialize_field("chainId", &self.chain_id)?; + } + if self.height != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; + } + if let Some(v) = self.time.as_ref() { + struct_ser.serialize_field("time", v)?; + } + if let Some(v) = self.last_block_id.as_ref() { + struct_ser.serialize_field("lastBlockId", v)?; + } + if !self.last_commit_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("lastCommitHash", pbjson::private::base64::encode(&self.last_commit_hash).as_str())?; + } + if !self.data_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("dataHash", pbjson::private::base64::encode(&self.data_hash).as_str())?; + } + if !self.validators_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("validatorsHash", pbjson::private::base64::encode(&self.validators_hash).as_str())?; + } + if !self.next_validators_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("nextValidatorsHash", pbjson::private::base64::encode(&self.next_validators_hash).as_str())?; + } + if !self.consensus_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("consensusHash", pbjson::private::base64::encode(&self.consensus_hash).as_str())?; + } + if !self.app_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("appHash", pbjson::private::base64::encode(&self.app_hash).as_str())?; + } + if !self.last_results_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("lastResultsHash", pbjson::private::base64::encode(&self.last_results_hash).as_str())?; + } + if !self.evidence_hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("evidenceHash", pbjson::private::base64::encode(&self.evidence_hash).as_str())?; + } + if !self.proposer_address.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("proposerAddress", pbjson::private::base64::encode(&self.proposer_address).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for Header { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "version", + "chain_id", + "chainId", + "height", + "time", + "last_block_id", + "lastBlockId", + "last_commit_hash", + "lastCommitHash", + "data_hash", + "dataHash", + "validators_hash", + "validatorsHash", + "next_validators_hash", + "nextValidatorsHash", + "consensus_hash", + "consensusHash", + "app_hash", + "appHash", + "last_results_hash", + "lastResultsHash", + "evidence_hash", + "evidenceHash", + "proposer_address", + "proposerAddress", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Version, + ChainId, + Height, + Time, + LastBlockId, + LastCommitHash, + DataHash, + ValidatorsHash, + NextValidatorsHash, + ConsensusHash, + AppHash, + LastResultsHash, + EvidenceHash, + ProposerAddress, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "version" => Ok(GeneratedField::Version), + "chainId" | "chain_id" => Ok(GeneratedField::ChainId), + "height" => Ok(GeneratedField::Height), + "time" => Ok(GeneratedField::Time), + "lastBlockId" | "last_block_id" => Ok(GeneratedField::LastBlockId), + "lastCommitHash" | "last_commit_hash" => Ok(GeneratedField::LastCommitHash), + "dataHash" | "data_hash" => Ok(GeneratedField::DataHash), + "validatorsHash" | "validators_hash" => Ok(GeneratedField::ValidatorsHash), + "nextValidatorsHash" | "next_validators_hash" => Ok(GeneratedField::NextValidatorsHash), + "consensusHash" | "consensus_hash" => Ok(GeneratedField::ConsensusHash), + "appHash" | "app_hash" => Ok(GeneratedField::AppHash), + "lastResultsHash" | "last_results_hash" => Ok(GeneratedField::LastResultsHash), + "evidenceHash" | "evidence_hash" => Ok(GeneratedField::EvidenceHash), + "proposerAddress" | "proposer_address" => Ok(GeneratedField::ProposerAddress), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = Header; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria_vendored.tendermint.types.Header") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut version__ = None; + let mut chain_id__ = None; + let mut height__ = None; + let mut time__ = None; + let mut last_block_id__ = None; + let mut last_commit_hash__ = None; + let mut data_hash__ = None; + let mut validators_hash__ = None; + let mut next_validators_hash__ = None; + let mut consensus_hash__ = None; + let mut app_hash__ = None; + let mut last_results_hash__ = None; + let mut evidence_hash__ = None; + let mut proposer_address__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Version => { + if version__.is_some() { + return Err(serde::de::Error::duplicate_field("version")); + } + version__ = map_.next_value()?; + } + GeneratedField::ChainId => { + if chain_id__.is_some() { + return Err(serde::de::Error::duplicate_field("chainId")); + } + chain_id__ = Some(map_.next_value()?); + } + GeneratedField::Height => { + if height__.is_some() { + return Err(serde::de::Error::duplicate_field("height")); + } + height__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Time => { + if time__.is_some() { + return Err(serde::de::Error::duplicate_field("time")); + } + time__ = map_.next_value()?; + } + GeneratedField::LastBlockId => { + if last_block_id__.is_some() { + return Err(serde::de::Error::duplicate_field("lastBlockId")); + } + last_block_id__ = map_.next_value()?; + } + GeneratedField::LastCommitHash => { + if last_commit_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("lastCommitHash")); + } + last_commit_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::DataHash => { + if data_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("dataHash")); + } + data_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::ValidatorsHash => { + if validators_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("validatorsHash")); + } + validators_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::NextValidatorsHash => { + if next_validators_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("nextValidatorsHash")); + } + next_validators_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::ConsensusHash => { + if consensus_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("consensusHash")); + } + consensus_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::AppHash => { + if app_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("appHash")); + } + app_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::LastResultsHash => { + if last_results_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("lastResultsHash")); + } + last_results_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::EvidenceHash => { + if evidence_hash__.is_some() { + return Err(serde::de::Error::duplicate_field("evidenceHash")); + } + evidence_hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::ProposerAddress => { + if proposer_address__.is_some() { + return Err(serde::de::Error::duplicate_field("proposerAddress")); + } + proposer_address__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + } + } + Ok(Header { + version: version__, + chain_id: chain_id__.unwrap_or_default(), + height: height__.unwrap_or_default(), + time: time__, + last_block_id: last_block_id__, + last_commit_hash: last_commit_hash__.unwrap_or_default(), + data_hash: data_hash__.unwrap_or_default(), + validators_hash: validators_hash__.unwrap_or_default(), + next_validators_hash: next_validators_hash__.unwrap_or_default(), + consensus_hash: consensus_hash__.unwrap_or_default(), + app_hash: app_hash__.unwrap_or_default(), + last_results_hash: last_results_hash__.unwrap_or_default(), + evidence_hash: evidence_hash__.unwrap_or_default(), + proposer_address: proposer_address__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria_vendored.tendermint.types.Header", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PartSetHeader { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.total != 0 { + len += 1; + } + if !self.hash.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria_vendored.tendermint.types.PartSetHeader", len)?; + if self.total != 0 { + struct_ser.serialize_field("total", &self.total)?; + } + if !self.hash.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("hash", pbjson::private::base64::encode(&self.hash).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PartSetHeader { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "total", + "hash", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Total, + Hash, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "total" => Ok(GeneratedField::Total), + "hash" => Ok(GeneratedField::Hash), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PartSetHeader; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria_vendored.tendermint.types.PartSetHeader") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut total__ = None; + let mut hash__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Total => { + if total__.is_some() { + return Err(serde::de::Error::duplicate_field("total")); + } + total__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Hash => { + if hash__.is_some() { + return Err(serde::de::Error::duplicate_field("hash")); + } + hash__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + } + } + Ok(PartSetHeader { + total: total__.unwrap_or_default(), + hash: hash__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria_vendored.tendermint.types.PartSetHeader", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/astria_vendored.tendermint.version.rs b/crates/astria-core/src/generated/astria_vendored.tendermint.version.rs new file mode 100644 index 0000000000..11a4add837 --- /dev/null +++ b/crates/astria-core/src/generated/astria_vendored.tendermint.version.rs @@ -0,0 +1,15 @@ +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Consensus { + #[prost(uint64, tag = "1")] + pub block: u64, + #[prost(uint64, tag = "2")] + pub app: u64, +} +impl ::prost::Name for Consensus { + const NAME: &'static str = "Consensus"; + const PACKAGE: &'static str = "astria_vendored.tendermint.version"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("astria_vendored.tendermint.version.{}", Self::NAME) + } +} diff --git a/crates/astria-core/src/generated/astria_vendored.tendermint.version.serde.rs b/crates/astria-core/src/generated/astria_vendored.tendermint.version.serde.rs new file mode 100644 index 0000000000..7f696124d0 --- /dev/null +++ b/crates/astria-core/src/generated/astria_vendored.tendermint.version.serde.rs @@ -0,0 +1,114 @@ +impl serde::Serialize for Consensus { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.block != 0 { + len += 1; + } + if self.app != 0 { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("astria_vendored.tendermint.version.Consensus", len)?; + if self.block != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("block", ToString::to_string(&self.block).as_str())?; + } + if self.app != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("app", ToString::to_string(&self.app).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for Consensus { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "block", + "app", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Block, + App, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "block" => Ok(GeneratedField::Block), + "app" => Ok(GeneratedField::App), + _ => Err(serde::de::Error::unknown_field(value, FIELDS)), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = Consensus; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct astria_vendored.tendermint.version.Consensus") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut block__ = None; + let mut app__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Block => { + if block__.is_some() { + return Err(serde::de::Error::duplicate_field("block")); + } + block__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::App => { + if app__.is_some() { + return Err(serde::de::Error::duplicate_field("app")); + } + app__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + } + } + Ok(Consensus { + block: block__.unwrap_or_default(), + app: app__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("astria_vendored.tendermint.version.Consensus", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/astria-core/src/generated/mod.rs b/crates/astria-core/src/generated/mod.rs index b9df631015..283c2a7f30 100644 --- a/crates/astria-core/src/generated/mod.rs +++ b/crates/astria-core/src/generated/mod.rs @@ -4,13 +4,39 @@ clippy::needless_borrows_for_generic_args, clippy::arithmetic_side_effects )] +//! Files generated using [`tonic-build`] and [`buf`] via the [`tools/protobuf-compiler`] +//! build tool. +//! +//! [`tonic-build`]: https://docs.rs/tonic-build +//! [`buf`]: https://buf.build +//! [`tools/protobuf-compiler`]: ../../../../tools/protobuf-compiler + +#[path = ""] +pub mod astria_vendored { + #[path = ""] + pub mod tendermint { + pub mod abci { + include!("astria_vendored.tendermint.abci.rs"); + + #[cfg(feature = "serde")] + mod _serde_impl { + use super::*; + include!("astria_vendored.tendermint.abci.serde.rs"); + } + } + + pub mod crypto { + include!("astria_vendored.tendermint.crypto.rs"); + + #[cfg(feature = "serde")] + mod _serde_impl { + use super::*; + include!("astria_vendored.tendermint.crypto.serde.rs"); + } + } + } +} -/// Files generated using [`tonic-build`] and [`buf`] via the [`tools/protobuf-compiler`] -/// build tool. -/// -/// [`tonic-build`]: https://docs.rs/tonic-build -/// [`buf`]: https://buf.build -/// [`tools/protobuf-compiler`]: ../../../../tools/protobuf-compiler #[path = ""] pub mod execution { #[path = "astria.execution.v1alpha1.rs"] @@ -59,8 +85,15 @@ pub mod protocol { } #[path = ""] pub mod transaction { - #[path = "astria.protocol.transactions.v1alpha1.rs"] - pub mod v1alpha1; + pub mod v1alpha1 { + include!("astria.protocol.transactions.v1alpha1.rs"); + + #[cfg(feature = "serde")] + mod _serde_impl { + use super::*; + include!("astria.protocol.transactions.v1alpha1.serde.rs"); + } + } } } diff --git a/crates/astria-core/src/protocol/transaction/v1alpha1/action.rs b/crates/astria-core/src/protocol/transaction/v1alpha1/action.rs index 7e41a97de2..f4ef672d28 100644 --- a/crates/astria-core/src/protocol/transaction/v1alpha1/action.rs +++ b/crates/astria-core/src/protocol/transaction/v1alpha1/action.rs @@ -24,10 +24,15 @@ use crate::{ }; #[derive(Clone, Debug)] +#[cfg_attr( + feature = "serde", + derive(::serde::Deserialize, ::serde::Serialize), + serde(into = "raw::Action", try_from = "raw::Action") +)] pub enum Action { Sequence(SequenceAction), Transfer(TransferAction), - ValidatorUpdate(tendermint::validator::Update), + ValidatorUpdate(ValidatorUpdate), SudoAddressChange(SudoAddressChangeAction), Ibc(IbcRelay), Ics20Withdrawal(Ics20Withdrawal), @@ -47,7 +52,7 @@ impl Action { let kind = match self { Action::Sequence(act) => Value::SequenceAction(act.into_raw()), Action::Transfer(act) => Value::TransferAction(act.into_raw()), - Action::ValidatorUpdate(act) => Value::ValidatorUpdateAction(act.into()), + Action::ValidatorUpdate(act) => Value::ValidatorUpdateAction(act.into_raw()), Action::SudoAddressChange(act) => Value::SudoAddressChangeAction(act.into_raw()), Action::Ibc(act) => Value::IbcAction(act.into()), Action::Ics20Withdrawal(act) => Value::Ics20Withdrawal(act.into_raw()), @@ -70,7 +75,7 @@ impl Action { let kind = match self { Action::Sequence(act) => Value::SequenceAction(act.to_raw()), Action::Transfer(act) => Value::TransferAction(act.to_raw()), - Action::ValidatorUpdate(act) => Value::ValidatorUpdateAction(act.clone().into()), + Action::ValidatorUpdate(act) => Value::ValidatorUpdateAction(act.to_raw()), Action::SudoAddressChange(act) => { Value::SudoAddressChangeAction(act.clone().into_raw()) } @@ -110,9 +115,9 @@ impl Action { Value::TransferAction(act) => { Self::Transfer(TransferAction::try_from_raw(act).map_err(ActionError::transfer)?) } - Value::ValidatorUpdateAction(act) => { - Self::ValidatorUpdate(act.try_into().map_err(ActionError::validator_update)?) - } + Value::ValidatorUpdateAction(act) => Self::ValidatorUpdate( + ValidatorUpdate::try_from_raw(act).map_err(ActionError::validator_update)?, + ), Value::SudoAddressChangeAction(act) => Self::SudoAddressChange( SudoAddressChangeAction::try_from_raw(act) .map_err(ActionError::sudo_address_change)?, @@ -240,6 +245,20 @@ impl From for Action { } } +impl From for raw::Action { + fn from(value: Action) -> Self { + value.into_raw() + } +} + +impl TryFrom for Action { + type Error = ActionError; + + fn try_from(value: raw::Action) -> Result { + Self::try_from_raw(value) + } +} + #[allow(clippy::module_name_repetitions)] #[derive(Debug, thiserror::Error)] #[error(transparent)] @@ -258,7 +277,7 @@ impl ActionError { Self(ActionErrorKind::Transfer(inner)) } - fn validator_update(inner: tendermint::error::Error) -> Self { + fn validator_update(inner: ValidatorUpdateError) -> Self { Self(ActionErrorKind::ValidatorUpdate(inner)) } @@ -312,7 +331,7 @@ enum ActionErrorKind { #[error("transfer action was not valid")] Transfer(#[source] TransferActionError), #[error("validator update action was not valid")] - ValidatorUpdate(#[source] tendermint::error::Error), + ValidatorUpdate(#[source] ValidatorUpdateError), #[error("sudo address change action was not valid")] SudoAddressChange(#[source] SudoAddressChangeActionError), #[error("ibc action was not valid")] @@ -533,6 +552,148 @@ enum TransferActionErrorKind { FeeAsset(#[source] asset::ParseDenomError), } +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct ValidatorUpdateError(ValidatorUpdateErrorKind); + +impl ValidatorUpdateError { + fn negative_power(power: i64) -> Self { + Self(ValidatorUpdateErrorKind::NegativePower { + power, + }) + } + + fn public_key_not_set() -> Self { + Self(ValidatorUpdateErrorKind::PublicKeyNotSet) + } + + fn secp256k1_not_supported() -> Self { + Self(ValidatorUpdateErrorKind::Secp256k1NotSupported) + } + + fn verification_key(source: crate::crypto::Error) -> Self { + Self(ValidatorUpdateErrorKind::VerificationKey { + source, + }) + } +} + +#[derive(Debug, thiserror::Error)] +enum ValidatorUpdateErrorKind { + #[error("field .power had negative value `{power}`, which is not permitted")] + NegativePower { power: i64 }, + #[error(".pub_key field was not set")] + PublicKeyNotSet, + #[error(".pub_key field was set to secp256k1, but only ed25519 keys are supported")] + Secp256k1NotSupported, + #[error("bytes stored in the .pub_key field could not be read as an ed25519 verification key")] + VerificationKey { source: crate::crypto::Error }, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr( + feature = "serde", + derive(::serde::Deserialize, ::serde::Serialize), + serde( + into = "crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate", + try_from = "crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate", + ) +)] +pub struct ValidatorUpdate { + pub power: u32, + pub verification_key: crate::crypto::VerificationKey, +} + +impl ValidatorUpdate { + /// Create a validator update by verifying a raw protobuf-decoded + /// [`crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate`]. + /// + /// # Errors + /// Returns an error if the `.power` field is negative, if `.pub_key` + /// is not set, or if `.pub_key` contains a non-ed25519 variant, or + /// if the ed25519 has invalid bytes (that is, bytes from which an + /// ed25519 public key cannot be constructed). + pub fn try_from_raw( + value: crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate, + ) -> Result { + use crate::generated::astria_vendored::tendermint::crypto::{ + public_key, + PublicKey, + }; + let crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate { + pub_key, + power, + } = value; + let power = power + .try_into() + .map_err(|_| ValidatorUpdateError::negative_power(power))?; + let verification_key = match pub_key { + None + | Some(PublicKey { + sum: None, + }) => Err(ValidatorUpdateError::public_key_not_set()), + Some(PublicKey { + sum: Some(public_key::Sum::Secp256k1(..)), + }) => Err(ValidatorUpdateError::secp256k1_not_supported()), + + Some(PublicKey { + sum: Some(public_key::Sum::Ed25519(bytes)), + }) => crate::crypto::VerificationKey::try_from(&*bytes) + .map_err(ValidatorUpdateError::verification_key), + }?; + Ok(Self { + power, + verification_key, + }) + } + + #[must_use] + pub fn into_raw(self) -> crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate { + self.to_raw() + } + + #[must_use] + pub fn to_raw(&self) -> crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate { + use crate::generated::astria_vendored::tendermint::crypto::{ + public_key, + PublicKey, + }; + let Self { + power, + verification_key, + } = self; + + crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate { + power: (*power).into(), + pub_key: Some(PublicKey { + sum: Some(public_key::Sum::Ed25519( + verification_key.to_bytes().to_vec(), + )), + }), + } + } +} + +impl From + for crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate +{ + fn from(value: ValidatorUpdate) -> Self { + value.into_raw() + } +} + +impl TryFrom + for ValidatorUpdate +{ + type Error = ValidatorUpdateError; + + fn try_from( + value: crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate, + ) -> Result { + Self::try_from_raw(value) + } +} + #[derive(Clone, Debug)] #[allow(clippy::module_name_repetitions)] pub struct SudoAddressChangeAction { diff --git a/crates/astria-sequencer/Cargo.toml b/crates/astria-sequencer/Cargo.toml index d8f719eb3b..a85fb864d5 100644 --- a/crates/astria-sequencer/Cargo.toml +++ b/crates/astria-sequencer/Cargo.toml @@ -70,6 +70,7 @@ config = { package = "astria-config", path = "../astria-config", features = [ "tests", ] } insta = { workspace = true, features = ["json"] } +rand_chacha = "0.3.1" tokio = { workspace = true, features = ["test-util"] } [build-dependencies] diff --git a/crates/astria-sequencer/src/app/mod.rs b/crates/astria-sequencer/src/app/mod.rs index b892037ac0..5ce65cd86c 100644 --- a/crates/astria-sequencer/src/app/mod.rs +++ b/crates/astria-sequencer/src/app/mod.rs @@ -24,6 +24,7 @@ use astria_core::{ protocol::{ abci::AbciErrorCode, transaction::v1alpha1::{ + action::ValidatorUpdate, Action, SignedTransaction, }, @@ -209,7 +210,7 @@ impl App { &mut self, storage: Storage, genesis_state: astria_core::sequencer::GenesisState, - genesis_validators: Vec, + genesis_validators: Vec, chain_id: String, ) -> anyhow::Result { let mut state_tx = self @@ -1087,7 +1088,9 @@ impl App { let events = self.apply(state_tx); Ok(abci::response::EndBlock { - validator_updates: validator_updates.into_tendermint_validator_updates(), + validator_updates: validator_updates + .try_into_cometbft() + .context("failed converting astria validators to cometbft compatible type")?, events, ..Default::default() }) diff --git a/crates/astria-sequencer/src/app/test_utils.rs b/crates/astria-sequencer/src/app/test_utils.rs index 38e72a5772..d942e99ec5 100644 --- a/crates/astria-sequencer/src/app/test_utils.rs +++ b/crates/astria-sequencer/src/app/test_utils.rs @@ -6,7 +6,10 @@ use astria_core::{ ADDRESS_LEN, }, protocol::transaction::v1alpha1::{ - action::SequenceAction, + action::{ + SequenceAction, + ValidatorUpdate, + }, SignedTransaction, TransactionParams, UnsignedTransaction, @@ -116,7 +119,7 @@ pub(crate) fn genesis_state() -> GenesisState { pub(crate) async fn initialize_app_with_storage( genesis_state: Option, - genesis_validators: Vec, + genesis_validators: Vec, ) -> (App, Storage) { let storage = cnidarium::TempStorage::new() .await @@ -143,7 +146,7 @@ pub(crate) async fn initialize_app_with_storage( pub(crate) async fn initialize_app( genesis_state: Option, - genesis_validators: Vec, + genesis_validators: Vec, ) -> App { let (app, _storage) = initialize_app_with_storage(genesis_state, genesis_validators).await; app diff --git a/crates/astria-sequencer/src/app/tests_app.rs b/crates/astria-sequencer/src/app/tests_app.rs index 0fcbf541a0..9b89547c05 100644 --- a/crates/astria-sequencer/src/app/tests_app.rs +++ b/crates/astria-sequencer/src/app/tests_app.rs @@ -50,6 +50,7 @@ use crate::{ }, proposal::commitment::generate_rollup_datas_commitment, state_ext::StateReadExt as _, + test_utils::verification_key, }; fn default_tendermint_header() -> Header { @@ -120,22 +121,16 @@ async fn app_pre_execute_transactions() { #[tokio::test] async fn app_begin_block_remove_byzantine_validators() { - use tendermint::{ - abci::types, - validator, - }; - - let pubkey_a = tendermint::public_key::PublicKey::from_raw_ed25519(&[1; 32]).unwrap(); - let pubkey_b = tendermint::public_key::PublicKey::from_raw_ed25519(&[2; 32]).unwrap(); + use tendermint::abci::types; let initial_validator_set = vec![ - validator::Update { - pub_key: pubkey_a, - power: 100u32.into(), + ValidatorUpdate { + power: 100u32, + verification_key: verification_key(1), }, - validator::Update { - pub_key: pubkey_b, - power: 1u32.into(), + ValidatorUpdate { + power: 1u32, + verification_key: verification_key(2), }, ]; @@ -144,10 +139,7 @@ async fn app_begin_block_remove_byzantine_validators() { let misbehavior = types::Misbehavior { kind: types::MisbehaviorKind::Unknown, validator: types::Validator { - address: tendermint::account::Id::from(pubkey_a) - .as_bytes() - .try_into() - .unwrap(), + address: crate::test_utils::verification_key(1).address_bytes(), power: 0u32.into(), }, height: Height::default(), @@ -171,10 +163,7 @@ async fn app_begin_block_remove_byzantine_validators() { // assert that validator with pubkey_a is removed let validator_set = app.state.get_validator_set().await.unwrap(); assert_eq!(validator_set.len(), 1); - assert_eq!( - validator_set.get(&pubkey_b.into()).unwrap().power, - 1u32.into() - ); + assert_eq!(validator_set.get(verification_key(2)).unwrap().power, 1,); } #[tokio::test] @@ -673,20 +662,14 @@ async fn app_prepare_proposal_sequencer_max_bytes_overflow_ok() { #[tokio::test] async fn app_end_block_validator_updates() { - use tendermint::validator; - - let pubkey_a = tendermint::public_key::PublicKey::from_raw_ed25519(&[1; 32]).unwrap(); - let pubkey_b = tendermint::public_key::PublicKey::from_raw_ed25519(&[2; 32]).unwrap(); - let pubkey_c = tendermint::public_key::PublicKey::from_raw_ed25519(&[3; 32]).unwrap(); - let initial_validator_set = vec![ - validator::Update { - pub_key: pubkey_a, - power: 100u32.into(), + ValidatorUpdate { + power: 100, + verification_key: crate::test_utils::verification_key(1), }, - validator::Update { - pub_key: pubkey_b, - power: 1u32.into(), + ValidatorUpdate { + power: 1, + verification_key: crate::test_utils::verification_key(2), }, ]; @@ -694,17 +677,17 @@ async fn app_end_block_validator_updates() { let proposer_address = crate::address::base_prefixed([0u8; 20]); let validator_updates = vec![ - validator::Update { - pub_key: pubkey_a, - power: 0u32.into(), + ValidatorUpdate { + power: 0, + verification_key: verification_key(0), }, - validator::Update { - pub_key: pubkey_b, - power: 100u32.into(), + ValidatorUpdate { + power: 100, + verification_key: verification_key(1), }, - validator::Update { - pub_key: pubkey_c, - power: 100u32.into(), + ValidatorUpdate { + power: 100, + verification_key: verification_key(2), }, ]; @@ -724,11 +707,15 @@ async fn app_end_block_validator_updates() { // validator with pubkey_c should be added let validator_set = app.state.get_validator_set().await.unwrap(); assert_eq!(validator_set.len(), 2); - let validator_b = validator_set.get(&pubkey_b.into()).unwrap(); - assert_eq!(validator_b.pub_key, pubkey_b); - assert_eq!(validator_b.power, 100u32.into()); - let validator_c = validator_set.get(&pubkey_c.into()).unwrap(); - assert_eq!(validator_c.pub_key, pubkey_c); - assert_eq!(validator_c.power, 100u32.into()); + let validator_b = validator_set + .get(verification_key(1).address_bytes()) + .unwrap(); + assert_eq!(validator_b.verification_key, verification_key(1)); + assert_eq!(validator_b.power, 100); + let validator_c = validator_set + .get(verification_key(2).address_bytes()) + .unwrap(); + assert_eq!(validator_c.verification_key, verification_key(2)); + assert_eq!(validator_c.power, 100); assert_eq!(app.state.get_validator_updates().await.unwrap().len(), 0); } diff --git a/crates/astria-sequencer/src/app/tests_breaking_changes.rs b/crates/astria-sequencer/src/app/tests_breaking_changes.rs index 2ca7d70d52..17c00e785a 100644 --- a/crates/astria-sequencer/src/app/tests_breaking_changes.rs +++ b/crates/astria-sequencer/src/app/tests_breaking_changes.rs @@ -24,6 +24,7 @@ use astria_core::{ IbcRelayerChangeAction, SequenceAction, TransferAction, + ValidatorUpdate, }, Action, TransactionParams, @@ -198,10 +199,9 @@ async fn app_execute_transaction_with_every_action_snapshot() { let (mut app, storage) = initialize_app_with_storage(Some(genesis_state), vec![]).await; // setup for ValidatorUpdate action - let pub_key = tendermint::public_key::PublicKey::from_raw_ed25519(&[1u8; 32]).unwrap(); - let update = tendermint::validator::Update { - pub_key, - power: 100u32.into(), + let update = ValidatorUpdate { + power: 100, + verification_key: crate::test_utils::verification_key(1), }; let rollup_id = RollupId::from_unhashed_bytes(b"testchainid"); diff --git a/crates/astria-sequencer/src/app/tests_execute_transaction.rs b/crates/astria-sequencer/src/app/tests_execute_transaction.rs index 93e8d0d2d6..48907159b4 100644 --- a/crates/astria-sequencer/src/app/tests_execute_transaction.rs +++ b/crates/astria-sequencer/src/app/tests_execute_transaction.rs @@ -14,6 +14,7 @@ use astria_core::{ SequenceAction, SudoAddressChangeAction, TransferAction, + ValidatorUpdate, }, Action, TransactionParams, @@ -312,10 +313,9 @@ async fn app_execute_transaction_validator_update() { let mut app = initialize_app(Some(genesis_state()), vec![]).await; - let pub_key = tendermint::public_key::PublicKey::from_raw_ed25519(&[1u8; 32]).unwrap(); - let update = tendermint::validator::Update { - pub_key, - power: 100u32.into(), + let update = ValidatorUpdate { + power: 100, + verification_key: crate::test_utils::verification_key(1), }; let tx = UnsignedTransaction { @@ -332,7 +332,10 @@ async fn app_execute_transaction_validator_update() { let validator_updates = app.state.get_validator_updates().await.unwrap(); assert_eq!(validator_updates.len(), 1); - assert_eq!(validator_updates.get(&pub_key.into()).unwrap(), &update); + assert_eq!( + validator_updates.get(crate::test_utils::verification_key(1).address_bytes()), + Some(&update) + ); } #[tokio::test] diff --git a/crates/astria-sequencer/src/authority/action.rs b/crates/astria-sequencer/src/authority/action.rs index c9986d8b45..399b9e4801 100644 --- a/crates/astria-sequencer/src/authority/action.rs +++ b/crates/astria-sequencer/src/authority/action.rs @@ -10,9 +10,9 @@ use astria_core::{ FeeChange, FeeChangeAction, SudoAddressChangeAction, + ValidatorUpdate, }, }; -use tendermint::account; use tracing::instrument; use crate::{ @@ -24,7 +24,7 @@ use crate::{ }; #[async_trait::async_trait] -impl ActionHandler for tendermint::validator::Update { +impl ActionHandler for ValidatorUpdate { async fn check_stateful( &self, state: &S, @@ -39,14 +39,14 @@ impl ActionHandler for tendermint::validator::Update { // ensure that we're not removing the last validator or a validator // that doesn't exist, these both cause issues in cometBFT - if self.power.is_zero() { + if self.power == 0 { let validator_set = state .get_validator_set() .await .context("failed to get validator set from state")?; // check that validator exists if validator_set - .get(&account::Id::from(self.pub_key)) + .get(self.verification_key.address_bytes()) .is_none() { bail!("cannot remove a non-existing validator"); diff --git a/crates/astria-sequencer/src/authority/component.rs b/crates/astria-sequencer/src/authority/component.rs index c735143567..c3557fe28c 100644 --- a/crates/astria-sequencer/src/authority/component.rs +++ b/crates/astria-sequencer/src/authority/component.rs @@ -4,13 +4,13 @@ use anyhow::{ Context, Result, }; -use astria_core::primitive::v1::Address; -use tendermint::{ - abci::request::{ - BeginBlock, - EndBlock, - }, - validator, +use astria_core::{ + primitive::v1::Address, + protocol::transaction::v1alpha1::action::ValidatorUpdate, +}; +use tendermint::abci::request::{ + BeginBlock, + EndBlock, }; use tracing::instrument; @@ -27,7 +27,7 @@ pub(crate) struct AuthorityComponent; #[derive(Debug)] pub(crate) struct AuthorityComponentAppState { pub(crate) authority_sudo_address: Address, - pub(crate) genesis_validators: Vec, + pub(crate) genesis_validators: Vec, } #[async_trait::async_trait] @@ -40,10 +40,9 @@ impl Component for AuthorityComponent { state .put_sudo_address(app_state.authority_sudo_address) .context("failed to set sudo key")?; + let genesis_validators = app_state.genesis_validators.clone(); state - .put_validator_set(ValidatorSet::new_from_updates( - app_state.genesis_validators.clone(), - )) + .put_validator_set(ValidatorSet::new_from_updates(genesis_validators)) .context("failed to set validator set")?; Ok(()) } @@ -59,8 +58,7 @@ impl Component for AuthorityComponent { .context("failed getting validator set")?; for misbehaviour in &begin_block.byzantine_validators { - let address = tendermint::account::Id::new(misbehaviour.validator.address); - current_set.remove(&address); + current_set.remove(misbehaviour.validator.address); } let state = Arc::get_mut(state) diff --git a/crates/astria-sequencer/src/authority/state_ext.rs b/crates/astria-sequencer/src/authority/state_ext.rs index 3eaa5c51e6..ccfee70bc0 100644 --- a/crates/astria-sequencer/src/authority/state_ext.rs +++ b/crates/astria-sequencer/src/authority/state_ext.rs @@ -5,9 +5,13 @@ use anyhow::{ Context, Result, }; -use astria_core::primitive::v1::{ - Address, - ADDRESS_LEN, +use astria_core::{ + crypto::VerificationKey, + primitive::v1::{ + Address, + ADDRESS_LEN, + }, + protocol::transaction::v1alpha1::action::ValidatorUpdate, }; use async_trait::async_trait; use borsh::{ @@ -22,10 +26,6 @@ use serde::{ Deserialize, Serialize, }; -use tendermint::{ - account, - validator, -}; use tracing::instrument; /// Newtype wrapper to read and write an address from rocksdb. @@ -36,32 +36,47 @@ struct SudoAddress([u8; ADDRESS_LEN]); /// /// Contains a map of hex-encoded public keys to validator updates. #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] -pub(crate) struct ValidatorSet(BTreeMap); +pub(crate) struct ValidatorSet(BTreeMap); + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd)] +pub(crate) struct ValidatorSetKey(#[serde(with = "::hex::serde")] [u8; ADDRESS_LEN]); + +impl From<[u8; ADDRESS_LEN]> for ValidatorSetKey { + fn from(value: [u8; ADDRESS_LEN]) -> Self { + Self(value) + } +} + +impl From for ValidatorSetKey { + fn from(value: VerificationKey) -> Self { + Self(value.address_bytes()) + } +} impl ValidatorSet { - pub(crate) fn new_from_updates(updates: Vec) -> Self { - let validator_set = updates - .into_iter() - .map(|update| (account::Id::from(update.pub_key), update)) - .collect::>(); - Self(validator_set) + pub(crate) fn new_from_updates(updates: Vec) -> Self { + Self( + updates + .into_iter() + .map(|update| (update.verification_key.into(), update)) + .collect::>(), + ) } pub(crate) fn len(&self) -> usize { self.0.len() } - pub(crate) fn get(&self, address: &account::Id) -> Option<&validator::Update> { - self.0.get(address) + pub(crate) fn get>(&self, address: T) -> Option<&ValidatorUpdate> { + self.0.get(&address.into()) } - pub(crate) fn push_update(&mut self, update: validator::Update) { - let address = tendermint::account::Id::from(update.pub_key); - self.0.insert(address, update); + pub(crate) fn push_update(&mut self, update: ValidatorUpdate) { + self.0.insert(update.verification_key.into(), update); } - pub(crate) fn remove(&mut self, address: &account::Id) { - self.0.remove(address); + pub(crate) fn remove>(&mut self, address: T) { + self.0.remove(&address.into()); } /// Apply updates to the validator set. @@ -70,15 +85,19 @@ impl ValidatorSet { /// Otherwise, update the validator's power. pub(crate) fn apply_updates(&mut self, validator_updates: ValidatorSet) { for (address, update) in validator_updates.0 { - match update.power.value() { + match update.power { 0 => self.0.remove(&address), _ => self.0.insert(address, update), }; } } - pub(crate) fn into_tendermint_validator_updates(self) -> Vec { - self.0.into_values().collect::>() + pub(crate) fn try_into_cometbft(self) -> anyhow::Result> { + self.0 + .into_values() + .map(crate::utils::sequencer_to_cometbft_validator) + .collect::, _>>() + .context("failed to map one or more astria validators to cometbft validators") } } @@ -179,18 +198,19 @@ impl StateWriteExt for T {} #[cfg(test)] mod test { + use astria_core::protocol::transaction::v1alpha1::action::ValidatorUpdate; use cnidarium::StateDelta; - use tendermint::{ - validator, - vote, - PublicKey, - }; use super::{ StateReadExt as _, StateWriteExt as _, ValidatorSet, }; + use crate::test_utils::verification_key; + + fn empty_validator_set() -> ValidatorSet { + ValidatorSet::new_from_updates(vec![]) + } #[tokio::test] async fn sudo_address() { @@ -252,10 +272,9 @@ mod test { let snapshot = storage.latest_snapshot(); let mut state = StateDelta::new(snapshot); - let initial = vec![validator::Update { - pub_key: PublicKey::from_raw_ed25519(&[1u8; 32]) - .expect("creating ed25519 key should not fail"), - power: vote::Power::from(10u32), + let initial = vec![ValidatorUpdate { + power: 10, + verification_key: verification_key(1), }]; let initial_validator_set = ValidatorSet::new_from_updates(initial); @@ -273,10 +292,9 @@ mod test { ); // can update - let updates = vec![validator::Update { - pub_key: PublicKey::from_raw_ed25519(&[2u8; 32]) - .expect("creating ed25519 key should not fail"), - power: vote::Power::from(20u32), + let updates = vec![ValidatorUpdate { + power: 20, + verification_key: verification_key(2), }]; let updated_validator_set = ValidatorSet::new_from_updates(updates); state @@ -298,16 +316,13 @@ mod test { let snapshot = storage.latest_snapshot(); let state = StateDelta::new(snapshot); - // create update validator set - let empty_validator_set = ValidatorSet::new_from_updates(vec![]); - // querying for empty validator set is ok assert_eq!( state .get_validator_updates() .await .expect("if no updates have been written return empty set"), - empty_validator_set, + empty_validator_set(), "returned empty validator set different than expected" ); } @@ -320,15 +335,13 @@ mod test { // create update validator set let mut updates = vec![ - validator::Update { - pub_key: PublicKey::from_raw_ed25519(&[1u8; 32]) - .expect("creating ed25519 key should not fail"), - power: vote::Power::from(10u32), + ValidatorUpdate { + power: 10, + verification_key: verification_key(1), }, - validator::Update { - pub_key: PublicKey::from_raw_ed25519(&[2u8; 32]) - .expect("creating ed25519 key should not fail"), - power: vote::Power::from(0u32), + ValidatorUpdate { + power: 0, + verification_key: verification_key(2), }, ]; let mut validator_set_updates = ValidatorSet::new_from_updates(updates); @@ -348,15 +361,13 @@ mod test { // create different updates updates = vec![ - validator::Update { - pub_key: PublicKey::from_raw_ed25519(&[1u8; 32]) - .expect("creating ed25519 key should not fail"), - power: vote::Power::from(22u32), + ValidatorUpdate { + power: 22, + verification_key: verification_key(1), }, - validator::Update { - pub_key: PublicKey::from_raw_ed25519(&[3u8; 32]) - .expect("creating ed25519 key should not fail"), - power: vote::Power::from(10u32), + ValidatorUpdate { + power: 10, + verification_key: verification_key(3), }, ]; @@ -383,10 +394,9 @@ mod test { let mut state = StateDelta::new(snapshot); // create update validator set - let updates = vec![validator::Update { - pub_key: PublicKey::from_raw_ed25519(&[1u8; 32]) - .expect("creating ed25519 key should not fail"), - power: vote::Power::from(10u32), + let updates = vec![ValidatorUpdate { + power: 10, + verification_key: verification_key(1), }]; let validator_set_updates = ValidatorSet::new_from_updates(updates); @@ -407,13 +417,12 @@ mod test { state.clear_validator_updates(); // check that clear worked - let empty_validator_set = ValidatorSet::new_from_updates(vec![]); assert_eq!( state .get_validator_updates() .await .expect("if no updates have been written return empty set"), - empty_validator_set, + empty_validator_set(), "returned validator set different than expected" ); } @@ -430,39 +439,32 @@ mod test { #[tokio::test] async fn execute_validator_updates() { - let key_0 = - PublicKey::from_raw_ed25519(&[1u8; 32]).expect("creating ed25519 key should not fail"); - let key_1 = - PublicKey::from_raw_ed25519(&[2u8; 32]).expect("creating ed25519 key should not fail"); - let key_2 = - PublicKey::from_raw_ed25519(&[3u8; 32]).expect("creating ed25519 key should not fail"); - // create initial validator set let initial = vec![ - validator::Update { - pub_key: key_0, - power: vote::Power::from(1u32), + ValidatorUpdate { + power: 1, + verification_key: verification_key(0), }, - validator::Update { - pub_key: key_1, - power: vote::Power::from(2u32), + ValidatorUpdate { + power: 2, + verification_key: verification_key(1), }, - validator::Update { - pub_key: key_2, - power: vote::Power::from(3u32), + ValidatorUpdate { + power: 3, + verification_key: verification_key(2), }, ]; let mut initial_validator_set = ValidatorSet::new_from_updates(initial); // create set of updates (update key_0, remove key_1) let updates = vec![ - validator::Update { - pub_key: key_0, - power: vote::Power::from(5u32), + ValidatorUpdate { + power: 5, + verification_key: verification_key(0), }, - validator::Update { - pub_key: key_1, - power: vote::Power::from(0u32), + ValidatorUpdate { + power: 0, + verification_key: verification_key(1), }, ]; @@ -473,13 +475,13 @@ mod test { // create end state let updates = vec![ - validator::Update { - pub_key: key_0, - power: vote::Power::from(5u32), + ValidatorUpdate { + power: 5, + verification_key: verification_key(0), }, - validator::Update { - pub_key: key_2, - power: vote::Power::from(3u32), + ValidatorUpdate { + power: 3, + verification_key: verification_key(2), }, ]; let validator_set_endstate = ValidatorSet::new_from_updates(updates); diff --git a/crates/astria-sequencer/src/lib.rs b/crates/astria-sequencer/src/lib.rs index e1edfc6409..9e5ccdae22 100644 --- a/crates/astria-sequencer/src/lib.rs +++ b/crates/astria-sequencer/src/lib.rs @@ -19,6 +19,8 @@ mod sequencer; pub(crate) mod service; pub(crate) mod state_ext; pub(crate) mod storage_keys; +#[cfg(test)] +pub(crate) mod test_utils; pub(crate) mod transaction; mod utils; diff --git a/crates/astria-sequencer/src/service/consensus.rs b/crates/astria-sequencer/src/service/consensus.rs index 9b9b540258..acb92a0a80 100644 --- a/crates/astria-sequencer/src/service/consensus.rs +++ b/crates/astria-sequencer/src/service/consensus.rs @@ -137,7 +137,15 @@ impl Consensus { .init_chain( self.storage.clone(), genesis_state, - init_chain.validators.clone(), + init_chain + .validators + .iter() + .cloned() + .map(crate::utils::cometbft_to_sequencer_validator) + .collect::>() + .context( + "failed converting cometbft genesis validators to astria validators", + )?, init_chain.chain_id, ) .await diff --git a/crates/astria-sequencer/src/test_utils.rs b/crates/astria-sequencer/src/test_utils.rs new file mode 100644 index 0000000000..15d34d4e56 --- /dev/null +++ b/crates/astria-sequencer/src/test_utils.rs @@ -0,0 +1,11 @@ +use astria_core::crypto::{ + SigningKey, + VerificationKey, +}; + +pub(crate) fn verification_key(seed: u64) -> VerificationKey { + use rand::SeedableRng as _; + let rng = rand_chacha::ChaChaRng::seed_from_u64(seed); + let signing_key = SigningKey::new(rng); + signing_key.verification_key() +} diff --git a/crates/astria-sequencer/src/utils.rs b/crates/astria-sequencer/src/utils.rs index 794efae20b..d2d2d3bf27 100644 --- a/crates/astria-sequencer/src/utils.rs +++ b/crates/astria-sequencer/src/utils.rs @@ -1,3 +1,9 @@ +use anyhow::Context as _; +use astria_core::{ + generated::astria_vendored::tendermint::abci as raw, + protocol::transaction::v1alpha1::action::ValidatorUpdate, +}; + pub(crate) struct Hex<'a>(pub(crate) &'a [u8]); impl<'a> std::fmt::Display for Hex<'a> { @@ -8,3 +14,71 @@ impl<'a> std::fmt::Display for Hex<'a> { Ok(()) } } + +pub(crate) fn cometbft_to_sequencer_validator( + value: tendermint::validator::Update, +) -> anyhow::Result { + let tendermint_proto::abci::ValidatorUpdate { + pub_key, + power, + } = value.into(); + ValidatorUpdate::try_from_raw(raw::ValidatorUpdate { + power, + pub_key: pub_key.map(pubkey::cometbft_to_astria), + }) + .context("failed converting cometbft validator update to astria validator update") +} + +pub(crate) fn sequencer_to_cometbft_validator( + value: ValidatorUpdate, +) -> anyhow::Result { + let astria_core::generated::astria_vendored::tendermint::abci::ValidatorUpdate { + pub_key, + power, + } = value.into_raw(); + tendermint_proto::abci::ValidatorUpdate { + pub_key: pub_key.map(pubkey::astria_to_cometbft), + power, + } + .try_into() + .context("failed converting astria validator update to cometbft validator update") +} + +mod pubkey { + use astria_core::generated::astria_vendored::tendermint::crypto::{ + public_key::Sum as AstriaSum, + PublicKey as AstriaKey, + }; + use tendermint_proto::crypto::{ + public_key::Sum as CometbftSum, + PublicKey as CometbftKey, + }; + + pub(super) fn astria_to_cometbft(key: AstriaKey) -> CometbftKey { + let AstriaKey { + sum, + } = key; + let sum = match sum { + Some(AstriaSum::Ed25519(bytes)) => Some(CometbftSum::Ed25519(bytes)), + Some(AstriaSum::Secp256k1(bytes)) => Some(CometbftSum::Secp256k1(bytes)), + None => None, + }; + CometbftKey { + sum, + } + } + + pub(super) fn cometbft_to_astria(key: CometbftKey) -> AstriaKey { + let CometbftKey { + sum, + } = key; + let sum = match sum { + Some(CometbftSum::Ed25519(bytes)) => Some(AstriaSum::Ed25519(bytes)), + Some(CometbftSum::Secp256k1(bytes)) => Some(AstriaSum::Secp256k1(bytes)), + None => None, + }; + AstriaKey { + sum, + } + } +} diff --git a/tools/protobuf-compiler/src/main.rs b/tools/protobuf-compiler/src/main.rs index a83cb25e1e..1342abb6fe 100644 --- a/tools/protobuf-compiler/src/main.rs +++ b/tools/protobuf-compiler/src/main.rs @@ -73,23 +73,11 @@ fn main() { ]) .client_mod_attribute(".", "#[cfg(feature=\"client\")]") .server_mod_attribute(".", "#[cfg(feature=\"server\")]") + .extern_path(".astria_vendored.penumbra", "::penumbra-proto") .extern_path( - ".astria_vendored.tendermint.abci", - "::tendermint-proto::abci", - ) - .extern_path( - ".astria_vendored.tendermint.crypto", - "::tendermint-proto::crypto", - ) - .extern_path( - ".astria_vendored.tendermint.version", - "::tendermint-proto::version", - ) - .extern_path( - ".astria_vendored.tendermint.types", - "::tendermint-proto::types", + ".astria_vendored.tendermint.abci.ValidatorUpdate", + "crate::generated::astria_vendored::tendermint::abci::ValidatorUpdate", ) - .extern_path(".astria_vendored.penumbra", "::penumbra-proto") .type_attribute(".astria.primitive.v1.Uint128", "#[derive(Copy)]") .use_arc_self(true) // override prost-types with pbjson-types @@ -109,9 +97,8 @@ fn main() { .unwrap() .out_dir(&out_dir) .build(&[ - ".astria.execution.v1alpha2", - ".astria.primitive.v1", - ".astria.sequencerblock.v1alpha1", + ".astria", + ".astria_vendored", ".celestia", ".cosmos", ".tendermint", @@ -150,6 +137,7 @@ fn clean_non_astria_code(generated: &mut ContentMap) { .keys() .filter(|name| { !name.starts_with("astria.") + && !name.starts_with("astria_vendored.") && !name.starts_with("celestia.") && !name.starts_with("cosmos.") && !name.starts_with("tendermint.") From 0684117e96a21a9eb9f0e14d3a2a40373ce6f966 Mon Sep 17 00:00:00 2001 From: noot <36753753+noot@users.noreply.github.com> Date: Thu, 11 Jul 2024 22:46:17 +0200 Subject: [PATCH 14/24] fix(withdrawer): support withdrawer address that differs from bridge address (#1262) ## Summary specify bridge address in withdrawer actions so that the actions will still be accepted by the sequencer even if the withdrawer address for the account is different than the bridge address (assuming the withdrawer signer is the correct key ofc) ## Background we want to support withdrawer addresses that differ from bridge addresses ## Changes - specify bridge address in withdrawer actions ## Testing unit tests ## Related Issues closes #1241 --- .../src/bridge_withdrawer/ethereum/convert.rs | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs index da9c51938d..8dd50f85c8 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs @@ -65,6 +65,7 @@ pub(crate) fn event_to_action( event_with_metadata.transaction_hash, fee_asset, asset_withdrawal_divisor, + bridge_address, ) .wrap_err("failed to convert sequencer withdrawal event to action")?, WithdrawalEvent::Ics20(event) => event_to_ics20_withdrawal( @@ -88,6 +89,7 @@ fn event_to_bridge_unlock( transaction_hash: TxHash, fee_asset: asset::Denom, asset_withdrawal_divisor: u128, + bridge_address: Address, ) -> eyre::Result { let memo = bridge::UnlockMemo { // XXX: The documentation mentions that the ethers U64 type will panic if it cannot be @@ -110,7 +112,7 @@ fn event_to_bridge_unlock( ))?, memo: serde_json::to_string(&memo).wrap_err("failed to serialize memo to json")?, fee_asset, - bridge_address: None, + bridge_address: Some(bridge_address), }; Ok(Action::BridgeUnlock(action)) @@ -176,7 +178,7 @@ fn event_to_ics20_withdrawal( source_channel: channel .parse() .wrap_err("failed to parse channel from denom")?, - bridge_address: None, + bridge_address: Some(bridge_address), }; Ok(Action::Ics20Withdrawal(action)) } @@ -212,12 +214,13 @@ mod tests { block_number: 1.into(), transaction_hash: [2u8; 32].into(), }; + let bridge_address = crate::astria_address([99u8; 20]); let action = event_to_action( event_with_meta, denom.clone(), denom.clone(), 1, - crate::astria_address([99u8; 20]), + bridge_address, crate::ASTRIA_ADDRESS_PREFIX, ) .unwrap(); @@ -234,7 +237,7 @@ mod tests { }) .unwrap(), fee_asset: denom, - bridge_address: None, + bridge_address: Some(bridge_address), }; assert_eq!(action, expected_action); @@ -253,12 +256,13 @@ mod tests { transaction_hash: [2u8; 32].into(), }; let divisor = 10; + let bridge_address = crate::astria_address([99u8; 20]); let action = event_to_action( event_with_meta, denom.clone(), denom.clone(), divisor, - crate::astria_address([99u8; 20]), + bridge_address, crate::ASTRIA_ADDRESS_PREFIX, ) .unwrap(); @@ -275,7 +279,7 @@ mod tests { }) .unwrap(), fee_asset: denom, - bridge_address: None, + bridge_address: Some(bridge_address), }; assert_eq!(action, expected_action); @@ -330,7 +334,7 @@ mod tests { timeout_height: IbcHeight::new(u64::MAX, u64::MAX).unwrap(), timeout_time: 0, // zero this for testing source_channel: "channel-0".parse().unwrap(), - bridge_address: None, + bridge_address: Some(bridge_address), }; assert_eq!(action, expected_action); } From bb41dc4e6bcf64712d1e7dfd8cf31b4f8361741a Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Sat, 13 Jul 2024 09:56:19 +0200 Subject: [PATCH 15/24] fix(core, bridge, sequencer)!: dismabiguate return addresses (#1266) ## Summary Ensures that bridge-withdrawer does not use a rollup address as a sequencer refund address. ## Background Bridge-withdrawer was using an address on the ethereum rollup as the sequencer refund address in the actions it was submitting to sequencer. That was wrong. ## Changes - add a field `rollup_refund_address` to `Ibc20WithdrawalFromRollupMemo` to make its purpose clear - In bridge-withdrawer, set `refund_address` to the bridge account always (`refund_address` and `bridge_address` are now the same). - Sequencer now assumes that for refunds of ics20 withdrawals initiated by a rollup `FungibleToketPacketData.sender` is now the bridge address to which funds should be refunded. This is consistent with the change above. - Remove field `Ibc20WithdrawalFromRollupMemo.brigde_address` as it is no longer necessary. ## Testing Tests were updated and pass. This patch relies on the tests exhaustively testing the ics20 logic. ## Breaking Changelist - This change is network-breaking because `Ibc20WithdrawalFromRollupMemo` is used in Sequencer's protocol. --- .../src/bridge_withdrawer/ethereum/convert.rs | 23 ++--------- .../src/bridge_withdrawer/ethereum/watcher.rs | 28 +------------ .../src/bridge_withdrawer/mod.rs | 5 +-- .../src/bridge_withdrawer/submitter/tests.rs | 2 +- crates/astria-bridge-withdrawer/src/lib.rs | 7 +--- crates/astria-core/src/bridge.rs | 16 +++----- ...ge__test__bridge_unlock_memo_snapshot.snap | 4 +- ..._withdrawal_from_rollup_memo_snapshot.snap | 8 ++-- .../src/ibc/ics20_transfer.rs | 40 ++++++++++--------- 9 files changed, 42 insertions(+), 91 deletions(-) diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs index 8dd50f85c8..dab2828183 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs @@ -56,7 +56,6 @@ pub(crate) fn event_to_action( rollup_asset_denom: asset::Denom, asset_withdrawal_divisor: u128, bridge_address: Address, - sequencer_address_prefix: &str, ) -> eyre::Result { let action = match event_with_metadata.event { WithdrawalEvent::Sequencer(event) => event_to_bridge_unlock( @@ -76,7 +75,6 @@ pub(crate) fn event_to_action( rollup_asset_denom, asset_withdrawal_divisor, bridge_address, - sequencer_address_prefix, ) .wrap_err("failed to convert ics20 withdrawal event to action")?, }; @@ -128,12 +126,10 @@ fn event_to_ics20_withdrawal( rollup_asset_denom: asset::Denom, asset_withdrawal_divisor: u128, bridge_address: Address, - sequencer_address_prefix: &str, ) -> eyre::Result { // TODO: make this configurable const ICS20_WITHDRAWAL_TIMEOUT: Duration = Duration::from_secs(300); - let sender = event.sender.to_fixed_bytes(); let denom = rollup_asset_denom.clone(); let channel = denom @@ -143,23 +139,15 @@ fn event_to_ics20_withdrawal( let memo = Ics20WithdrawalFromRollupMemo { memo: event.memo, - bridge_address, block_number: block_number.as_u64(), + rollup_return_address: event.sender.to_string(), transaction_hash: transaction_hash.into(), }; let action = Ics20Withdrawal { denom: rollup_asset_denom, destination_chain_address: event.destination_chain_address, - // note: this is actually a rollup address; we expect failed ics20 withdrawals to be - // returned to the rollup. - // this is only ok for now because addresses on the sequencer and the rollup are both 20 - // bytes, but this won't work otherwise. - return_address: Address::builder() - .array(sender) - .prefix(sequencer_address_prefix) - .try_build() - .wrap_err("failed to construct return address")?, + return_address: bridge_address, amount: event .amount .as_u128() @@ -221,7 +209,6 @@ mod tests { denom.clone(), 1, bridge_address, - crate::ASTRIA_ADDRESS_PREFIX, ) .unwrap(); let Action::BridgeUnlock(action) = action else { @@ -263,7 +250,6 @@ mod tests { denom.clone(), divisor, bridge_address, - crate::ASTRIA_ADDRESS_PREFIX, ) .unwrap(); let Action::BridgeUnlock(action) = action else { @@ -307,7 +293,6 @@ mod tests { denom.clone(), 1, bridge_address, - crate::ASTRIA_ADDRESS_PREFIX, ) .unwrap(); let Action::Ics20Withdrawal(mut action) = action else { @@ -321,12 +306,12 @@ mod tests { let expected_action = Ics20Withdrawal { denom: denom.clone(), destination_chain_address, - return_address: crate::astria_address([0u8; 20]), + return_address: bridge_address, amount: 99, memo: serde_json::to_string(&Ics20WithdrawalFromRollupMemo { memo: "hello".to_string(), - bridge_address, block_number: 1u64, + rollup_return_address: ethers::types::Address::from([0u8; 20]).to_string(), transaction_hash: [2u8; 32], }) .unwrap(), diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs index adb7de7457..4e24918000 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs @@ -75,7 +75,6 @@ pub(crate) struct Builder { pub(crate) rollup_asset_denom: Denom, pub(crate) bridge_address: Address, pub(crate) submitter_handle: submitter::Handle, - pub(crate) sequencer_address_prefix: String, } impl Builder { @@ -89,7 +88,6 @@ impl Builder { rollup_asset_denom, bridge_address, submitter_handle, - sequencer_address_prefix, } = self; let contract_address = address_from_string(ðereum_contract_address) @@ -114,7 +112,6 @@ impl Builder { shutdown_token: shutdown_token.clone(), startup_handle, submitter_handle, - sequencer_address_prefix, }) } } @@ -129,7 +126,6 @@ pub(crate) struct Watcher { rollup_asset_denom: Denom, bridge_address: Address, state: Arc, - sequencer_address_prefix: String, } impl Watcher { @@ -145,7 +141,6 @@ impl Watcher { state, shutdown_token, submitter_handle, - sequencer_address_prefix, .. } = self; @@ -154,7 +149,6 @@ impl Watcher { rollup_asset_denom, bridge_address, asset_withdrawal_divisor, - sequencer_address_prefix, }; state.set_watcher_ready(); @@ -497,7 +491,6 @@ struct EventToActionConvertConfig { rollup_asset_denom: Denom, bridge_address: Address, asset_withdrawal_divisor: u128, - sequencer_address_prefix: String, } impl EventToActionConvertConfig { @@ -508,7 +501,6 @@ impl EventToActionConvertConfig { self.rollup_asset_denom.clone(), self.asset_withdrawal_divisor, self.bridge_address, - &self.sequencer_address_prefix, ) } } @@ -670,7 +662,6 @@ mod tests { state: Arc::new(State::new()), rollup_asset_denom: denom.clone(), bridge_address, - sequencer_address_prefix: crate::ASTRIA_ADDRESS_PREFIX.into(), } .build() .unwrap(); @@ -686,15 +677,8 @@ mod tests { block_number: receipt.block_number.unwrap(), transaction_hash: receipt.transaction_hash, }; - let expected_action = event_to_action( - expected_event, - denom.clone(), - denom, - 1, - bridge_address, - crate::ASTRIA_ADDRESS_PREFIX, - ) - .unwrap(); + let expected_action = + event_to_action(expected_event, denom.clone(), denom, 1, bridge_address).unwrap(); let Action::BridgeUnlock(expected_action) = expected_action else { panic!("expected action to be BridgeUnlock, got {expected_action:?}"); }; @@ -743,7 +727,6 @@ mod tests { denom.clone(), 1, bridge_address, - crate::ASTRIA_ADDRESS_PREFIX, ) .unwrap(); let Action::BridgeUnlock(expected_action) = expected_action else { @@ -768,7 +751,6 @@ mod tests { rollup_asset_denom: denom.clone(), bridge_address, submitter_handle: submitter::Handle::new(batch_tx), - sequencer_address_prefix: crate::ASTRIA_ADDRESS_PREFIX.into(), } .build() .unwrap(); @@ -849,7 +831,6 @@ mod tests { rollup_asset_denom: denom.clone(), bridge_address, submitter_handle: submitter::Handle::new(batch_tx), - sequencer_address_prefix: crate::ASTRIA_ADDRESS_PREFIX.into(), } .build() .unwrap(); @@ -874,7 +855,6 @@ mod tests { denom.clone(), 1, bridge_address, - crate::ASTRIA_ADDRESS_PREFIX, ) .unwrap() else { panic!("expected action to be Ics20Withdrawal"); @@ -976,7 +956,6 @@ mod tests { rollup_asset_denom: denom.clone(), bridge_address, submitter_handle: submitter::Handle::new(batch_tx), - sequencer_address_prefix: crate::ASTRIA_ADDRESS_PREFIX.into(), } .build() .unwrap(); @@ -999,7 +978,6 @@ mod tests { denom.clone(), 1, bridge_address, - crate::ASTRIA_ADDRESS_PREFIX, ) .unwrap(); let Action::BridgeUnlock(expected_action) = expected_action else { @@ -1078,7 +1056,6 @@ mod tests { rollup_asset_denom: denom.clone(), bridge_address, submitter_handle: submitter::Handle::new(batch_tx), - sequencer_address_prefix: crate::ASTRIA_ADDRESS_PREFIX.into(), } .build() .unwrap(); @@ -1107,7 +1084,6 @@ mod tests { denom.clone(), 1, bridge_address, - crate::ASTRIA_ADDRESS_PREFIX, ) .unwrap() else { panic!("expected action to be Ics20Withdrawal"); diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs index d655f9c645..216ec60770 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs @@ -124,7 +124,6 @@ impl BridgeWithdrawer { .wrap_err("failed to parse ROLLUP_ASSET_DENOMINATION as Denom")?, bridge_address: sequencer_bridge_address, submitter_handle, - sequencer_address_prefix: sequencer_address_prefix.clone(), } .build() .wrap_err("failed to build ethereum watcher")?; @@ -410,8 +409,6 @@ pub(crate) fn flatten_result(res: Result, JoinError>) -> eyre } #[cfg(test)] -pub(crate) const ASTRIA_ADDRESS_PREFIX: &str = "astria"; - /// Constructs an [`Address`] prefixed by `"astria"`. #[cfg(test)] pub(crate) fn astria_address( @@ -419,7 +416,7 @@ pub(crate) fn astria_address( ) -> astria_core::primitive::v1::Address { astria_core::primitive::v1::Address::builder() .array(array) - .prefix(ASTRIA_ADDRESS_PREFIX) + .prefix("astria") .try_build() .unwrap() } diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs index b0e681897f..1c4e693ed5 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs @@ -189,8 +189,8 @@ fn make_ics20_withdrawal_action() -> Action { amount: 99, memo: serde_json::to_string(&Ics20WithdrawalFromRollupMemo { memo: "hello".to_string(), - bridge_address: crate::astria_address([0u8; 20]), block_number: DEFAULT_LAST_ROLLUP_HEIGHT, + rollup_return_address: ethers::types::Address::from([0u8; 20]).to_string(), transaction_hash: [2u8; 32], }) .unwrap(), diff --git a/crates/astria-bridge-withdrawer/src/lib.rs b/crates/astria-bridge-withdrawer/src/lib.rs index 623892b393..8ce4dae86e 100644 --- a/crates/astria-bridge-withdrawer/src/lib.rs +++ b/crates/astria-bridge-withdrawer/src/lib.rs @@ -4,11 +4,8 @@ mod build_info; pub(crate) mod config; pub(crate) mod metrics; -pub use bridge_withdrawer::BridgeWithdrawer; #[cfg(test)] -pub(crate) use bridge_withdrawer::{ - astria_address, - ASTRIA_ADDRESS_PREFIX, -}; +pub(crate) use bridge_withdrawer::astria_address; +pub use bridge_withdrawer::BridgeWithdrawer; pub use build_info::BUILD_INFO; pub use config::Config; diff --git a/crates/astria-core/src/bridge.rs b/crates/astria-core/src/bridge.rs index a8eedce914..c8cb1fb570 100644 --- a/crates/astria-core/src/bridge.rs +++ b/crates/astria-core/src/bridge.rs @@ -1,10 +1,9 @@ -use crate::primitive::v1::Address; - #[derive(Clone, Debug)] #[cfg_attr( feature = "serde", derive(serde::Serialize), - derive(serde::Deserialize) + derive(serde::Deserialize), + serde(rename_all = "camelCase", deny_unknown_fields) )] pub struct UnlockMemo { pub block_number: u64, @@ -24,12 +23,13 @@ pub struct UnlockMemo { #[cfg_attr( feature = "serde", derive(serde::Serialize), - derive(serde::Deserialize) + derive(serde::Deserialize), + serde(rename_all = "camelCase", deny_unknown_fields) )] pub struct Ics20WithdrawalFromRollupMemo { pub memo: String, - pub bridge_address: Address, pub block_number: u64, + pub rollup_return_address: String, #[cfg_attr( feature = "serde", serde( @@ -72,12 +72,8 @@ mod test { fn ics20_withdrawal_from_rollup_memo_snapshot() { let memo = Ics20WithdrawalFromRollupMemo { memo: "hello".to_string(), - bridge_address: Address::builder() - .array([99; 20]) - .prefix("astria") - .try_build() - .unwrap(), block_number: 1, + rollup_return_address: "rollup-defined".to_string(), transaction_hash: [88; 32], }; diff --git a/crates/astria-core/src/snapshots/astria_core__bridge__test__bridge_unlock_memo_snapshot.snap b/crates/astria-core/src/snapshots/astria_core__bridge__test__bridge_unlock_memo_snapshot.snap index f0f7700ccb..611bd40d7d 100644 --- a/crates/astria-core/src/snapshots/astria_core__bridge__test__bridge_unlock_memo_snapshot.snap +++ b/crates/astria-core/src/snapshots/astria_core__bridge__test__bridge_unlock_memo_snapshot.snap @@ -3,6 +3,6 @@ source: crates/astria-core/src/bridge.rs expression: memo --- { - "block_number": 42, - "transaction_hash": "WFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFg=" + "blockNumber": 42, + "transactionHash": "WFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFg=" } diff --git a/crates/astria-core/src/snapshots/astria_core__bridge__test__ics20_withdrawal_from_rollup_memo_snapshot.snap b/crates/astria-core/src/snapshots/astria_core__bridge__test__ics20_withdrawal_from_rollup_memo_snapshot.snap index 6de0a3120b..614cb85239 100644 --- a/crates/astria-core/src/snapshots/astria_core__bridge__test__ics20_withdrawal_from_rollup_memo_snapshot.snap +++ b/crates/astria-core/src/snapshots/astria_core__bridge__test__ics20_withdrawal_from_rollup_memo_snapshot.snap @@ -4,9 +4,7 @@ expression: memo --- { "memo": "hello", - "bridge_address": { - "bech32m": "astria1vd3kxcmrvd3kxcmrvd3kxcmrvd3kxcmrj6p6kl" - }, - "block_number": 1, - "transaction_hash": "WFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFg=" + "blockNumber": 1, + "rollupReturnAddress": "rollup-defined", + "transactionHash": "WFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFg=" } diff --git a/crates/astria-sequencer/src/ibc/ics20_transfer.rs b/crates/astria-sequencer/src/ibc/ics20_transfer.rs index 197fe214f3..0d60274bde 100644 --- a/crates/astria-sequencer/src/ibc/ics20_transfer.rs +++ b/crates/astria-sequencer/src/ibc/ics20_transfer.rs @@ -389,7 +389,7 @@ async fn execute_ics20_transfer( .parse() .context("failed to parse packet data amount to u128")?; let recipient = if is_refund { - packet_data.sender + packet_data.sender.clone() } else { packet_data.receiver }; @@ -412,13 +412,15 @@ async fn execute_ics20_transfer( // // in this case, we lock the tokens back in the bridge account and // emit a `Deposit` event to send the tokens back to the rollup. - let maybe_memo = serde_json::from_slice(packet_data.memo.as_bytes()); - if is_refund && maybe_memo.is_ok() { - let memo: Ics20WithdrawalFromRollupMemo = - maybe_memo.expect("memo is valid as it was checked by is_ok()"); + if is_refund && serde_json::from_str::(&packet_data.memo).is_ok() + { + let bridge_account = packet_data.sender.parse().context( + "sender not an Astria Address: for refunds of ics20 withdrawals that came from a \ + rollup, the sender must be a valid Astria Address (usually the bridge account)", + )?; execute_rollup_withdrawal_refund( state, - &memo.bridge_address, + bridge_account, &denom_trace, packet_amount, recipient, @@ -448,7 +450,7 @@ async fn execute_ics20_transfer( // execute relevant state changes if it is execute_ics20_transfer_bridge_lock( state, - &recipient, + recipient, &trace_with_dest, packet_amount, packet_data.memo.clone(), @@ -525,7 +527,7 @@ async fn execute_ics20_transfer( /// and locks the tokens back in the specified bridge account. async fn execute_rollup_withdrawal_refund( state: &mut S, - bridge_address: &Address, + bridge_address: Address, denom: &denom::TracePrefixed, amount: u128, destination_address: String, @@ -533,7 +535,7 @@ async fn execute_rollup_withdrawal_refund( execute_deposit(state, bridge_address, denom, amount, destination_address).await?; state - .increase_balance(*bridge_address, denom, amount) + .increase_balance(bridge_address, denom, amount) .await .context( "failed to update bridge account account balance in execute_rollup_withdrawal_refund", @@ -548,7 +550,7 @@ async fn execute_rollup_withdrawal_refund( /// this function is a no-op. async fn execute_ics20_transfer_bridge_lock( state: &mut S, - recipient: &Address, + recipient: Address, denom: &denom::TracePrefixed, amount: u128, memo: String, @@ -557,7 +559,7 @@ async fn execute_ics20_transfer_bridge_lock( // check if the recipient is a bridge account; if so, // ensure that the packet memo field (`destination_address`) is set. let is_bridge_lock = state - .get_bridge_account_rollup_id(recipient) + .get_bridge_account_rollup_id(&recipient) .await .context("failed to get bridge account rollup ID from state")? .is_some(); @@ -597,7 +599,7 @@ async fn execute_ics20_transfer_bridge_lock( async fn execute_deposit( state: &mut S, - bridge_address: &Address, + bridge_address: Address, denom: &denom::TracePrefixed, amount: u128, destination_address: String, @@ -606,7 +608,7 @@ async fn execute_deposit( // ensure that the asset ID being transferred // to it is allowed. let Some(rollup_id) = state - .get_bridge_account_rollup_id(bridge_address) + .get_bridge_account_rollup_id(&bridge_address) .await .context("failed to get bridge account rollup ID from state")? else { @@ -614,7 +616,7 @@ async fn execute_deposit( }; let allowed_asset = state - .get_bridge_account_ibc_asset(bridge_address) + .get_bridge_account_ibc_asset(&bridge_address) .await .context("failed to get bridge account asset ID")?; ensure!( @@ -623,7 +625,7 @@ async fn execute_deposit( ); let deposit = Deposit::new( - *bridge_address, + bridge_address, rollup_id, amount, denom.into(), @@ -1003,7 +1005,7 @@ mod test { let destination_address = "destinationaddress".to_string(); execute_rollup_withdrawal_refund( &mut state_tx, - &bridge_address, + bridge_address, &denom, amount, destination_address, @@ -1030,8 +1032,8 @@ mod test { let snapshot = storage.latest_snapshot(); let mut state_tx = StateDelta::new(snapshot.clone()); - let destination_chain_address = "destinationchainaddress".to_string(); let bridge_address = crate::address::base_prefixed([99u8; 20]); + let destination_chain_address = bridge_address.to_string(); let denom = "nootasset".parse::().unwrap(); let rollup_id = RollupId::from_unhashed_bytes(b"testchainid"); @@ -1042,13 +1044,13 @@ mod test { let packet = FungibleTokenPacketData { denom: denom.to_string(), - sender: destination_chain_address.clone(), + sender: bridge_address.to_string(), amount: "100".to_string(), receiver: "other-chain-address".to_string(), memo: serde_json::to_string(&Ics20WithdrawalFromRollupMemo { - bridge_address, memo: String::new(), block_number: 1, + rollup_return_address: "rollup-defined".to_string(), transaction_hash: [1u8; 32], }) .unwrap(), From 1555c0328b0f964c2476bc29741e7ea322d96f19 Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Sat, 13 Jul 2024 17:21:19 +0200 Subject: [PATCH 16/24] feat(cli): add cmd to collect withdrawal events and submit as actions (#1261) ## Summary Adds `bridge collect-withdrawals` and `bridge submit-withdrawals` subcommands to `astria-cli`. ## Background The worker service `astria-bridge-withdrawer` is a closed system that collects withdrawal event from the rollup and submits them to the sequencer in a closed loop. But it can be desirable to inspect the withdrawal events generated by the bridge contracts, and then submit them manually. This functionality is now provided by `astria-cli`. ## Changes - Add subcommands `bridge collect-withdrawal-s` to `astria-cli`, which has two different modes of operation, depending on whether `--to-rollup-height` is set: - if set, it fetches all blocks between `--from-rollup-height` and `--to-rollup-height` (inclusive), converts them to seuquencer actions, and then writes them to a file. - if not set, it fetches blocks from `--from-rollup-height` until SIGINT (Ctrl-C) is received. After the signal, the converted actions are written to a file. ## Testing - Added `run-smoke-cli` in `charts/deploy.just` (@joroshiba) - Added a `smoke-cli:` in `.github/workflows/docker-build.yaml` (@joroshiba) --------- Co-authored-by: Jordan Oroshiba --- .github/workflows/docker-build.yml | 36 +- Cargo.lock | 9 + charts/deploy.just | 130 +++- crates/astria-cli/Cargo.toml | 13 +- crates/astria-cli/src/cli/bridge.rs | 22 + crates/astria-cli/src/cli/mod.rs | 9 +- crates/astria-cli/src/cli/sequencer.rs | 64 +- .../astria-cli/src/commands/bridge/collect.rs | 582 ++++++++++++++++++ crates/astria-cli/src/commands/bridge/mod.rs | 2 + .../astria-cli/src/commands/bridge/submit.rs | 155 +++++ crates/astria-cli/src/commands/mod.rs | 6 +- crates/astria-cli/src/main.rs | 26 +- 12 files changed, 993 insertions(+), 61 deletions(-) create mode 100644 crates/astria-cli/src/cli/bridge.rs create mode 100644 crates/astria-cli/src/commands/bridge/collect.rs create mode 100644 crates/astria-cli/src/commands/bridge/mod.rs create mode 100644 crates/astria-cli/src/commands/bridge/submit.rs diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 9a34b861f7..147f455e64 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -146,9 +146,43 @@ jobs: timeout-minutes: 3 run: just run-smoke-test + smoke-cli: + needs: [run_checker, composer, conductor, sequencer, sequencer-relayer, evm-bridge-withdrawer] + if: (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'astriaorg/astria') && (github.event_name == 'merge_group' || needs.run_checker.outputs.run_docker == 'true') + runs-on: buildjet-8vcpu-ubuntu-2204 + steps: + - uses: actions/checkout@v4 + - name: Install just + uses: taiki-e/install-action@just + - name: Install kind + uses: helm/kind-action@v1 + with: + install_only: true + - name: Install astria cli (rust) + run: | + just install-cli + - name: Log in to GHCR + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Setup Smoke Test Environment + timeout-minutes: 5 + run: | + TAG=sha-$(git rev-parse --short HEAD) + just deploy cluster + kubectl create secret generic regcred --from-file=.dockerconfigjson=$HOME/.docker/config.json --type=kubernetes.io/dockerconfigjson + echo -e "\n\nDeploying with astria images tagged $TAG" + just deploy smoke-cli $TAG + - name: Run Smoke test + timeout-minutes: 3 + run: just run-smoke-cli + + docker: if: ${{ always() && !cancelled() }} - needs: [composer, conductor, sequencer, sequencer-relayer, evm-bridge-withdrawer, smoke-test] + needs: [composer, conductor, sequencer, sequencer-relayer, evm-bridge-withdrawer, smoke-test, smoke-cli] uses: ./.github/workflows/reusable-success.yml with: success: ${{ !contains(needs.*.result, 'failure') }} diff --git a/Cargo.lock b/Cargo.lock index 21684370bd..94e5939f2e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -539,17 +539,26 @@ dependencies = [ name = "astria-cli" version = "0.3.1" dependencies = [ + "astria-bridge-contracts", "astria-core", "astria-sequencer-client", "clap", "color-eyre", + "ethers", + "futures", "hex", + "humantime", + "ibc-types", "rand 0.8.5", "serde", + "serde_json", "serde_yaml", "sha2 0.10.8", + "tendermint", "tokio", "tracing", + "tracing-subscriber 0.3.18", + "tryhard", "which", ] diff --git a/charts/deploy.just b/charts/deploy.just index 513fc92ae3..520270f195 100644 --- a/charts/deploy.just +++ b/charts/deploy.just @@ -11,6 +11,9 @@ delete tool *ARGS: init tool *ARGS: @just init-{{tool}} {{ARGS}} +run-smoke type *ARGS: + @just run-smoke-{{type}} {{ARGS}} + load-image image: kind load docker-image {{image}} --name astria-dev-cluster @@ -148,7 +151,6 @@ deploy-local-metrics: kubectl apply -f kubernetes/metrics-server-local.yml defaultTag := "" - deploy-smoke-test tag=defaultTag: @echo "Deploying ingress controller..." && just deploy ingress-controller > /dev/null @just wait-for-ingress-controller > /dev/null @@ -171,6 +173,25 @@ deploy-smoke-test tag=defaultTag: {{ if tag != '' { replace('--set images.evmBridgeWithdrawer.devTag=#', '#', tag) } else { '' } }} > /dev/null @just wait-for-rollup > /dev/null +deploy-smoke-cli tag=defaultTag: + @echo "Deploying ingress controller..." && just deploy ingress-controller > /dev/null + @just wait-for-ingress-controller > /dev/null + @echo "Deploying local celestia instance..." && just deploy celestia-local > /dev/null + @helm dependency update charts/sequencer > /dev/null + @helm dependency update charts/evm-rollup > /dev/null + @echo "Setting up single astria sequencer..." && helm install \ + -n astria-validator-single single-sequencer-chart ./charts/sequencer \ + -f dev/values/validators/all.yml \ + -f dev/values/validators/single.yml \ + {{ if tag != '' { replace('--set images.sequencer.devTag=# --set sequencer-relayer.images.sequencerRelayer.devTag=#', '#', tag) } else { '' } }} \ + --create-namespace > /dev/null + @just wait-for-sequencer > /dev/null + @echo "Starting EVM rollup..." && helm install -n astria-dev-cluster astria-chain-chart ./charts/evm-rollup -f dev/values/rollup/dev.yaml \ + {{ if tag != '' { replace('--set images.conductor.devTag=# --set images.composer.devTag=#', '#', tag) } else { '' } }} \ + --set config.blockscout.enabled=false \ + --set config.faucet.enabled=false > /dev/null + @just wait-for-dev-rollup > /dev/null + evm_destination_address := "0xaC21B97d35Bf75A7dAb16f35b111a50e78A72F30" # 1 RIA is 10^9 nRIA @@ -180,32 +201,33 @@ rollup_multiplier := "1000000000" # 10 RIA sequencer_transfer_amount := "10" sequencer_rpc_url := "http://rpc.sequencer.localdev.me" +sequencer_bridge_address := "astria13ahqz4pjqfmynk9ylrqv4fwe4957x2p0h5782u" +sequencer_bridge_pkey := "dfa7108e38ab71f89f356c72afc38600d5758f11a8c337164713e4471411d2e0" +sequencer_chain_id := "sequencer-test-chain-0" init-rollup-bridge rollupName=defaultRollupName evmDestinationAddress=evm_destination_address transferAmount=sequencer_transfer_amount: #!/usr/bin/env bash - SEQUENCER_BRIDGE_PKEY="dfa7108e38ab71f89f356c72afc38600d5758f11a8c337164713e4471411d2e0" SEQUENCER_FUNDS_PKEY="934ab488f9e1900f6a08f50605ce1409ca9d95ebdc400dafc2e8a4306419fd52" - SEQUENCER_BRIDGE_ADDRESS="astria13ahqz4pjqfmynk9ylrqv4fwe4957x2p0h5782u" - SEQUENCER_CHAIN_ID="sequencer-test-chain-0" ASSET="nria" FEE_ASSET="nria" TRANSFER_AMOUNT=$(echo "{{transferAmount}} * {{sequencer_base_amount}}" | bc) astria-cli sequencer init-bridge-account \ --rollup-name {{rollupName}} \ - --private-key $SEQUENCER_BRIDGE_PKEY \ - --sequencer.chain-id $SEQUENCER_CHAIN_ID \ + --private-key {{sequencer_bridge_pkey}} \ + --sequencer.chain-id {{sequencer_chain_id}} \ --sequencer-url {{sequencer_rpc_url}} \ --fee-asset=$FEE_ASSET --asset=$ASSET || exit 1 - astria-cli sequencer bridge-lock $SEQUENCER_BRIDGE_ADDRESS \ + astria-cli sequencer bridge-lock {{sequencer_bridge_address}} \ --amount $TRANSFER_AMOUNT \ --destination-chain-address {{evmDestinationAddress}} \ --private-key $SEQUENCER_FUNDS_PKEY \ - --sequencer.chain-id $SEQUENCER_CHAIN_ID \ + --sequencer.chain-id {{sequencer_chain_id}} \ --sequencer-url {{sequencer_rpc_url}} \ --fee-asset=$FEE_ASSET --asset=$ASSET || exit 1 eth_rpc_url := "http://executor.astria.localdev.me/" +eth_ws_url := "ws://ws-executor.astria.localdev.me/" bridge_tx_bytes := "0xf8f280843c54e7f182898594a58639fb5458e65e4fa917ff951c390292c24a15880de0b6b3a7640000b884bab916d00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002d617374726961313777306164656736346b7930646178776432756779756e65656c6c6d6a676e786c333935303400000000000000000000000000000000000000820a96a086b85348c9816f6d34533669db3d3626cf55eecea6a380d4d072efb1839df443a04b8b60c8b91dd30add1ca4a96097238d73bab29b0a958322d9a51755d5a5f287" bridge_tx_hash := "0x67db5b0825e8f60b926234e209d54e0336cd94defe6720e7acadf871e0377150" run-smoke-test: @@ -304,6 +326,98 @@ delete-smoke-test: just delete sequencer just delete rollup +evm_contract_address := "0xA58639fB5458e65E4fA917FF951C390292C24A15" +run-smoke-cli: + #!/usr/bin/env bash + MAX_CHECKS=30 + + # Checking starting balance + BALANCE=$(just evm-get-balance {{evm_destination_address}}) + if [ $BALANCE -ne 0 ]; then + echo "Starting balance is not correct" + exit 1 + fi + + echo "Testing Bridge In..." + just init rollup-bridge + CHECKS=0 + EXPECTED_BALANCE=$(echo "{{sequencer_transfer_amount}} * {{sequencer_base_amount}} * {{rollup_multiplier}}" | bc) + while [ $CHECKS -lt $MAX_CHECKS ]; do + CHECKS=$((CHECKS+1)) + BALANCE=$(just evm-get-balance {{evm_destination_address}}) + echo "Check $CHECKS, Balance: $BALANCE, Expected: $EXPECTED_BALANCE" + if [ "$BALANCE" == "$EXPECTED_BALANCE" ]; then + echo "Bridge In success" + break + else + sleep 1 + fi + done + if [ $CHECKS -eq $MAX_CHECKS ]; then + echo "Bridge In failure" + exit 1 + fi + + echo "Testing Bridge Out..." + just evm-send-raw-transaction {{bridge_tx_bytes}} + TRANSFERED_BALANCE=$(echo "1 * {{sequencer_base_amount}} * {{rollup_multiplier}}" | bc) + EXPECTED_BALANCE=$(echo "$EXPECTED_BALANCE - $TRANSFERED_BALANCE" | bc) + CHECKS=0 + while [ $CHECKS -lt $MAX_CHECKS ]; do + CHECKS=$((CHECKS+1)) + BALANCE=$(just evm-get-balance {{evm_destination_address}}) + echo "Check $CHECKS, Balance: $BALANCE, Expected: $EXPECTED_BALANCE" + if [ "$BALANCE" == "$EXPECTED_BALANCE" ]; then + echo "Bridge Out EVM success" + break + else + sleep 1 + fi + done + if [ $CHECKS -eq $MAX_CHECKS ]; then + echo "Bridge Out EVM failure" + exit 1 + fi + + CURRENT_BLOCK_HEX=$(just evm-get-block-by-number latest | jq -r '.number') + CURRENT_BLOCK=$(just hex-to-dec $CURRENT_BLOCK_HEX) + echo {{sequencer_bridge_pkey}} > test_se + + astria-cli bridge collect-withdrawals \ + --rollup-endpoint {{eth_ws_url}} \ + --contract-address {{evm_contract_address}} \ + --from-rollup-height 1 \ + --to-rollup-height $CURRENT_BLOCK \ + --rollup-asset-denom nria \ + --bridge-address {{sequencer_bridge_address}} \ + --output ./withdrawals.json + astria-cli bridge submit-withdrawals \ + --signing-key <(printf "%s" "{{sequencer_bridge_pkey}}") \ + --sequencer-chain-id {{sequencer_chain_id}} \ + --sequencer-url {{sequencer_rpc_url}} \ + --input ./withdrawals.json + + + CHECKS=0 + EXPECTED_BALANCE=$(echo "1 * {{sequencer_base_amount}}" | bc) + while [ $CHECKS -lt $MAX_CHECKS ]; do + CHECKS=$((CHECKS+1)) + BALANCE=$(astria-cli sequencer account balance astria17w0adeg64ky0daxwd2ugyuneellmjgnxl39504 --sequencer-url {{sequencer_rpc_url}} | awk '/nria/{print $(NF-1)}') + echo "Check $CHECKS, Balance: $BALANCE, Expected: $EXPECTED_BALANCE" + if [ "$BALANCE" == "$EXPECTED_BALANCE" ]; then + echo "Bridge Out Sequencer success" + break + else + sleep 1 + fi + done + if [ $CHECKS -gt $MAX_CHECKS ]; then + echo "Bridge Out Sequencer failure" + exit 1 + fi + + exit 0 + ############################################# ## EVM Curl Command Helper Functions ## ############################################# diff --git a/crates/astria-cli/Cargo.toml b/crates/astria-cli/Cargo.toml index f9717fde75..c5ed24f76c 100644 --- a/crates/astria-cli/Cargo.toml +++ b/crates/astria-cli/Cargo.toml @@ -14,17 +14,26 @@ name = "astria-cli" [dependencies] color-eyre = "0.6" -astria-core = { path = "../astria-core" } +astria-bridge-contracts = { path = "../astria-bridge-contracts" } +astria-core = { path = "../astria-core", features = ["serde"] } clap = { workspace = true, features = ["derive", "env"] } +ethers = { workspace = true, features = ["ws"] } hex = { workspace = true } +ibc-types = { workspace = true } rand = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_yaml = "0.9.25" sha2 = { workspace = true } -tokio = { workspace = true, features = ["rt", "macros"] } +tendermint = { workspace = true } +tokio = { workspace = true, features = ["rt", "macros", "signal"] } tracing = { workspace = true } which = { workspace = true } +humantime.workspace = true +tryhard.workspace = true +serde_json.workspace = true +futures.workspace = true +tracing-subscriber = "0.3.18" [dependencies.astria-sequencer-client] package = "astria-sequencer-client" diff --git a/crates/astria-cli/src/cli/bridge.rs b/crates/astria-cli/src/cli/bridge.rs new file mode 100644 index 0000000000..efd0eccd25 --- /dev/null +++ b/crates/astria-cli/src/cli/bridge.rs @@ -0,0 +1,22 @@ +use clap::Subcommand; +use color_eyre::eyre; + +/// Interact with a Sequencer node +// allow: these are one-shot variants. the size doesn't matter as they are +// passed around only once. +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Subcommand)] +pub(crate) enum Command { + /// Commands for interacting with Sequencer accounts + CollectWithdrawals(crate::commands::bridge::collect::WithdrawalEvents), + SubmitWithdrawals(crate::commands::bridge::submit::WithdrawalEvents), +} + +impl Command { + pub(crate) async fn run(self) -> eyre::Result<()> { + match self { + Command::CollectWithdrawals(args) => args.run().await, + Command::SubmitWithdrawals(args) => args.run().await, + } + } +} diff --git a/crates/astria-cli/src/cli/mod.rs b/crates/astria-cli/src/cli/mod.rs index f4d07e4878..83711795c9 100644 --- a/crates/astria-cli/src/cli/mod.rs +++ b/crates/astria-cli/src/cli/mod.rs @@ -1,3 +1,4 @@ +pub(crate) mod bridge; pub(crate) mod sequencer; use clap::{ @@ -16,7 +17,7 @@ const DEFAULT_SEQUENCER_CHAIN_ID: &str = "astria-dusk-7"; #[command(name = "astria-cli", version)] pub struct Cli { #[command(subcommand)] - pub command: Option, + pub(crate) command: Option, } impl Cli { @@ -33,7 +34,11 @@ impl Cli { /// Commands that can be run #[derive(Debug, Subcommand)] -pub enum Command { +pub(crate) enum Command { + Bridge { + #[command(subcommand)] + command: bridge::Command, + }, Sequencer { #[command(subcommand)] command: SequencerCommand, diff --git a/crates/astria-cli/src/cli/sequencer.rs b/crates/astria-cli/src/cli/sequencer.rs index 68faa04db3..783282aedd 100644 --- a/crates/astria-cli/src/cli/sequencer.rs +++ b/crates/astria-cli/src/cli/sequencer.rs @@ -7,7 +7,7 @@ use clap::{ /// Interact with a Sequencer node #[derive(Debug, Subcommand)] -pub enum Command { +pub(crate) enum Command { /// Commands for interacting with Sequencer accounts Account { #[command(subcommand)] @@ -43,7 +43,7 @@ pub enum Command { } #[derive(Debug, Subcommand)] -pub enum AccountCommand { +pub(crate) enum AccountCommand { /// Create a new Sequencer account Create, Balance(BasicAccountArgs), @@ -51,19 +51,19 @@ pub enum AccountCommand { } #[derive(Debug, Subcommand)] -pub enum AddressCommand { +pub(crate) enum AddressCommand { /// Construct a bech32m Sequencer address given a public key Bech32m(Bech32mAddressArgs), } #[derive(Debug, Subcommand)] -pub enum BalanceCommand { +pub(crate) enum BalanceCommand { /// Get the balance of a Sequencer account Get(BasicAccountArgs), } #[derive(Debug, Subcommand)] -pub enum SudoCommand { +pub(crate) enum SudoCommand { IbcRelayer { #[command(subcommand)] command: IbcRelayerChangeCommand, @@ -77,7 +77,7 @@ pub enum SudoCommand { } #[derive(Debug, Subcommand)] -pub enum IbcRelayerChangeCommand { +pub(crate) enum IbcRelayerChangeCommand { /// Add IBC Relayer Add(IbcRelayerChangeArgs), /// Remove IBC Relayer @@ -85,7 +85,7 @@ pub enum IbcRelayerChangeCommand { } #[derive(Debug, Subcommand)] -pub enum FeeAssetChangeCommand { +pub(crate) enum FeeAssetChangeCommand { /// Add Fee Asset Add(FeeAssetChangeArgs), /// Remove Fee Asset @@ -93,7 +93,7 @@ pub enum FeeAssetChangeCommand { } #[derive(Args, Debug)] -pub struct BasicAccountArgs { +pub(crate) struct BasicAccountArgs { /// The url of the Sequencer node #[arg( long, @@ -106,7 +106,7 @@ pub struct BasicAccountArgs { } #[derive(Args, Debug)] -pub struct Bech32mAddressArgs { +pub(crate) struct Bech32mAddressArgs { /// The hex formatted byte part of the bech32m address #[arg(long)] pub(crate) bytes: String, @@ -116,7 +116,7 @@ pub struct Bech32mAddressArgs { } #[derive(Args, Debug)] -pub struct TransferArgs { +pub(crate) struct TransferArgs { // The address of the Sequencer account to send amount to pub(crate) to_address: Address, // The amount being sent @@ -145,17 +145,17 @@ pub struct TransferArgs { env = "ROLLUP_SEQUENCER_CHAIN_ID", default_value = crate::cli::DEFAULT_SEQUENCER_CHAIN_ID )] - pub sequencer_chain_id: String, + pub(crate) sequencer_chain_id: String, /// The asset to transer. #[arg(long, default_value = "nria")] - pub asset: asset::Denom, + pub(crate) asset: asset::Denom, /// The asset to pay the transfer fees with. #[arg(long, default_value = "nria")] - pub fee_asset: asset::Denom, + pub(crate) fee_asset: asset::Denom, } #[derive(Args, Debug)] -pub struct FeeAssetChangeArgs { +pub(crate) struct FeeAssetChangeArgs { /// The bech32m prefix that will be used for constructing addresses using the private key #[arg(long, default_value = "astria")] pub(crate) prefix: String, @@ -178,14 +178,14 @@ pub struct FeeAssetChangeArgs { env = "ROLLUP_SEQUENCER_CHAIN_ID", default_value = crate::cli::DEFAULT_SEQUENCER_CHAIN_ID )] - pub sequencer_chain_id: String, + pub(crate) sequencer_chain_id: String, /// Asset's denomination string #[arg(long)] pub(crate) asset: asset::Denom, } #[derive(Args, Debug)] -pub struct IbcRelayerChangeArgs { +pub(crate) struct IbcRelayerChangeArgs { /// The prefix to construct a bech32m address given the private key. #[arg(long, default_value = "astria")] pub(crate) prefix: String, @@ -208,14 +208,14 @@ pub struct IbcRelayerChangeArgs { env = "ROLLUP_SEQUENCER_CHAIN_ID", default_value = crate::cli::DEFAULT_SEQUENCER_CHAIN_ID )] - pub sequencer_chain_id: String, + pub(crate) sequencer_chain_id: String, /// The address to add or remove as an IBC relayer #[arg(long)] pub(crate) address: Address, } #[derive(Args, Debug)] -pub struct InitBridgeAccountArgs { +pub(crate) struct InitBridgeAccountArgs { /// The bech32m prefix that will be used for constructing addresses using the private key #[arg(long, default_value = "astria")] pub(crate) prefix: String, @@ -238,21 +238,21 @@ pub struct InitBridgeAccountArgs { env = "ROLLUP_SEQUENCER_CHAIN_ID", default_value = crate::cli::DEFAULT_SEQUENCER_CHAIN_ID )] - pub sequencer_chain_id: String, + pub(crate) sequencer_chain_id: String, /// Plaintext rollup name (to be hashed into a rollup ID) /// to initialize the bridge account with. #[arg(long)] pub(crate) rollup_name: String, /// The asset to transer. #[arg(long, default_value = "nria")] - pub asset: asset::Denom, + pub(crate) asset: asset::Denom, /// The asset to pay the transfer fees with. #[arg(long, default_value = "nria")] - pub fee_asset: asset::Denom, + pub(crate) fee_asset: asset::Denom, } #[derive(Args, Debug)] -pub struct BridgeLockArgs { +pub(crate) struct BridgeLockArgs { /// The address of the Sequencer account to lock amount to pub(crate) to_address: Address, /// The amount being locked @@ -282,23 +282,23 @@ pub struct BridgeLockArgs { env = "ROLLUP_SEQUENCER_CHAIN_ID", default_value = crate::cli::DEFAULT_SEQUENCER_CHAIN_ID )] - pub sequencer_chain_id: String, + pub(crate) sequencer_chain_id: String, /// The asset to lock. #[arg(long, default_value = "nria")] - pub asset: asset::Denom, + pub(crate) asset: asset::Denom, /// The asset to pay the transfer fees with. #[arg(long, default_value = "nria")] - pub fee_asset: asset::Denom, + pub(crate) fee_asset: asset::Denom, } #[derive(Debug, Subcommand)] -pub enum BlockHeightCommand { +pub(crate) enum BlockHeightCommand { /// Get the current block height of the Sequencer node Get(BlockHeightGetArgs), } #[derive(Args, Debug)] -pub struct BlockHeightGetArgs { +pub(crate) struct BlockHeightGetArgs { /// The url of the Sequencer node #[arg( long, @@ -312,11 +312,11 @@ pub struct BlockHeightGetArgs { env = "ROLLUP_SEQUENCER_CHAIN_ID", default_value = crate::cli::DEFAULT_SEQUENCER_CHAIN_ID )] - pub sequencer_chain_id: String, + pub(crate) sequencer_chain_id: String, } #[derive(Args, Debug)] -pub struct SudoAddressChangeArgs { +pub(crate) struct SudoAddressChangeArgs { /// The bech32m prefix that will be used for constructing addresses using the private key #[arg(long, default_value = "astria")] pub(crate) prefix: String, @@ -339,14 +339,14 @@ pub struct SudoAddressChangeArgs { env = "ROLLUP_SEQUENCER_CHAIN_ID", default_value = crate::cli::DEFAULT_SEQUENCER_CHAIN_ID )] - pub sequencer_chain_id: String, + pub(crate) sequencer_chain_id: String, /// The new address to take over sudo privileges #[arg(long)] pub(crate) address: Address, } #[derive(Args, Debug)] -pub struct ValidatorUpdateArgs { +pub(crate) struct ValidatorUpdateArgs { /// The url of the Sequencer node #[arg( long, @@ -360,7 +360,7 @@ pub struct ValidatorUpdateArgs { env = "ROLLUP_SEQUENCER_CHAIN_ID", default_value = crate::cli::DEFAULT_SEQUENCER_CHAIN_ID )] - pub sequencer_chain_id: String, + pub(crate) sequencer_chain_id: String, /// The bech32m prefix that will be used for constructing addresses using the private key #[arg(long, default_value = "astria")] pub(crate) prefix: String, diff --git a/crates/astria-cli/src/commands/bridge/collect.rs b/crates/astria-cli/src/commands/bridge/collect.rs new file mode 100644 index 0000000000..46cf8dbcf2 --- /dev/null +++ b/crates/astria-cli/src/commands/bridge/collect.rs @@ -0,0 +1,582 @@ +use std::{ + collections::BTreeMap, + path::{ + Path, + PathBuf, + }, + sync::Arc, + time::Duration, +}; + +use astria_bridge_contracts::i_astria_withdrawer::{ + IAstriaWithdrawer, + Ics20WithdrawalFilter, + SequencerWithdrawalFilter, +}; +use astria_core::{ + bridge::{ + self, + Ics20WithdrawalFromRollupMemo, + }, + primitive::v1::{ + asset::{ + self, + TracePrefixed, + }, + Address, + }, + protocol::transaction::v1alpha1::{ + action::{ + BridgeUnlockAction, + Ics20Withdrawal, + }, + Action, + }, +}; +use clap::Args; +use color_eyre::eyre::{ + self, + bail, + ensure, + eyre, + OptionExt as _, + WrapErr as _, +}; +use ethers::{ + contract::EthEvent, + core::types::Block, + providers::{ + Middleware, + Provider, + ProviderError, + StreamExt as _, + Ws, + }, + types::{ + Filter, + Log, + H256, + }, +}; +use futures::stream::BoxStream; +use tracing::{ + error, + info, + instrument, + warn, +}; + +#[derive(Args, Debug)] +pub(crate) struct WithdrawalEvents { + /// The websocket endpoint of a geth compatible rollup. + #[arg(long)] + rollup_endpoint: String, + /// The eth address of the astria bridge contracts. + #[arg(long)] + contract_address: ethers::types::Address, + /// The start rollup height from which blocks will be checked for withdrawal events. + #[arg(long)] + from_rollup_height: u64, + /// The end rollup height from which blocks will be checked for withdrawal events. + /// If not set, then this tool will stream blocks until SIGINT is received. + #[arg(long)] + to_rollup_height: Option, + /// The asset that will be used to pay the Sequencer fees (should the generated + /// actions be submitted to the Sequencer). + #[arg(long, default_value = "nria")] + fee_asset: asset::Denom, + /// The asset denomination of the asset that's withdrawn from the bridge. + #[arg(long)] + rollup_asset_denom: asset::Denom, + /// The bech32-encoded bridge address corresponding to the bridged rollup + /// asset on the sequencer. Should match the bridge address in the geth + /// rollup's bridge configuration for that asset. + #[arg(long)] + bridge_address: Address, + /// The path to write the collected withdrawal events converted + /// to Sequencer actions. + #[arg(long, short)] + output: PathBuf, +} + +impl WithdrawalEvents { + pub(crate) async fn run(self) -> eyre::Result<()> { + let Self { + rollup_endpoint, + contract_address, + from_rollup_height, + to_rollup_height, + fee_asset, + rollup_asset_denom, + bridge_address, + output, + } = self; + + let output = open_output(&output).wrap_err("failed to open output for writing")?; + + let block_provider = connect_to_rollup(&rollup_endpoint) + .await + .wrap_err("failed to connect to rollup")?; + + let asset_withdrawal_divisor = + get_asset_withdrawal_divisor(contract_address, block_provider.clone()) + .await + .wrap_err("failed determining asset withdrawal divisor")?; + + let mut incoming_blocks = + create_stream_of_blocks(&block_provider, from_rollup_height, to_rollup_height) + .await + .wrap_err("failed initializing stream of rollup blocks")?; + + let mut actions_by_rollup_height = ActionsByRollupHeight::new(); + loop { + tokio::select! { + biased; + + _ = tokio::signal::ctrl_c() => { + break; + } + + block = incoming_blocks.next() => { + match block { + Some(Ok(block)) => + if let Err(err) = actions_by_rollup_height.convert_and_insert(BlockToActions { + block_provider: block_provider.clone(), + contract_address, + block, + fee_asset: fee_asset.clone(), + rollup_asset_denom: rollup_asset_denom.clone(), + bridge_address, + asset_withdrawal_divisor, + }).await { + error!( + err = AsRef::::as_ref(&err), + "failed converting contract block to Sequencer actions and storing them; exiting stream"); + break; + } + Some(Err(error)) => { + error!( + error = AsRef::::as_ref(&error), + "encountered an error getting block; exiting stream", + ); + break; + }, + None => { + info!("block subscription ended"); + break; + } + } + } + } + } + + actions_by_rollup_height + .write_to_output(output) + .wrap_err("failed to write actions to file") + } +} + +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(transparent)] +pub(crate) struct ActionsByRollupHeight(BTreeMap>); + +impl ActionsByRollupHeight { + fn new() -> Self { + Self(BTreeMap::new()) + } + + pub(crate) fn into_inner(self) -> BTreeMap> { + self.0 + } + + #[instrument(skip_all, err)] + async fn convert_and_insert(&mut self, block_to_actions: BlockToActions) -> eyre::Result<()> { + let rollup_height = block_to_actions + .block + .number + .ok_or_eyre("block was missing a number")? + .as_u64(); + let actions = block_to_actions.run().await; + ensure!( + self.0.insert(rollup_height, actions).is_none(), + "already collected actions for block at rollup height `{rollup_height}`; no 2 blocks \ + with the same height should have been seen", + ); + Ok(()) + } + + #[instrument(skip_all, fields(target = %output.path.display()), err)] + fn write_to_output(self, output: Output) -> eyre::Result<()> { + let writer = std::io::BufWriter::new(output.handle); + serde_json::to_writer(writer, &self.0).wrap_err("failed writing actions to file") + } +} + +/// Constructs a block stream from `start` until `maybe_end`, if `Some`. +/// Constructs an open ended stream from `start` if `None`. +#[instrument(skip_all, fields(start, end = maybe_end), err)] +async fn create_stream_of_blocks( + block_provider: &Provider, + start: u64, + maybe_end: Option, +) -> eyre::Result>>> { + let subscription = if let Some(end) = maybe_end { + futures::stream::iter(start..=end) + .then(move |height| async move { + block_provider + .get_block(height) + .await + .wrap_err("failed to get block")? + .ok_or_else(|| eyre!("block with number {height} missing")) + }) + .boxed() + } else { + let mut block_subscription = block_provider + .subscribe_blocks() + .await + .wrap_err("failed to subscribe to blocks from rollup")? + .boxed(); + + let Some(current_rollup_block) = block_subscription.next().await else { + bail!("failed to get current rollup block from subscription") + }; + + let Some(current_rollup_block_height) = current_rollup_block.number else { + bail!( + "couldn't determine current rollup block height; value was not set on current on \ + most recent block", + ); + }; + + futures::stream::iter(start..current_rollup_block_height.as_u64()) + .then(move |height| async move { + block_provider + .get_block(height) + .await + .wrap_err("failed to get block")? + .ok_or_else(|| eyre!("block with number {height} missing")) + }) + .chain(futures::stream::once( + async move { Ok(current_rollup_block) }, + )) + .chain(block_subscription.map(Ok)) + .boxed() + }; + Ok(subscription) +} + +#[derive(Debug)] +struct Output { + handle: std::fs::File, + path: PathBuf, +} + +#[instrument(skip_all, fields(target = %target.as_ref().display()), err)] +fn open_output>(target: P) -> eyre::Result { + let handle = std::fs::File::options() + .write(true) + .create_new(true) + .open(&target) + .wrap_err("failed to open specified fil}e for writing")?; + Ok(Output { + handle, + path: target.as_ref().to_path_buf(), + }) +} + +#[instrument(err)] +async fn connect_to_rollup(rollup_endpoint: &str) -> eyre::Result>> { + let retry_config = tryhard::RetryFutureConfig::new(10) + .fixed_backoff(Duration::from_secs(2)) + .on_retry( + |attempt, next_delay: Option, error: &ProviderError| { + let wait_duration = next_delay + .map(humantime::format_duration) + .map(tracing::field::display); + warn!( + attempt, + wait_duration, + error = error as &dyn std::error::Error, + "attempt to connect to rollup node failed; retrying after backoff", + ); + std::future::ready(()) + }, + ); + + let provider = tryhard::retry_fn(|| Provider::::connect(rollup_endpoint)) + .with_config(retry_config) + .await + .wrap_err("failed connecting to rollup after several retries; giving up")?; + Ok(Arc::new(provider)) +} + +#[instrument(skip_all, fields(%contract_address), err(Display))] +async fn get_asset_withdrawal_divisor( + contract_address: ethers::types::Address, + provider: Arc>, +) -> eyre::Result { + let contract = IAstriaWithdrawer::new(contract_address, provider); + + let base_chain_asset_precision = contract + .base_chain_asset_precision() + .call() + .await + .wrap_err("failed to get asset withdrawal decimals")?; + + let exponent = 18u32.checked_sub(base_chain_asset_precision).ok_or_eyre( + "failed calculating asset divisor. The base chain asset precision should be <= 18 as \ + that's enforced by the contract, so the construction should work. Did the precision \ + change?", + )?; + Ok(10u128.pow(exponent)) +} + +fn packet_timeout_time() -> eyre::Result { + tendermint::Time::now() + .checked_add(Duration::from_secs(300)) + .ok_or_eyre("adding 5 minutes to current time caused overflow")? + .unix_timestamp_nanos() + .try_into() + .wrap_err("failed to i128 nanoseconds to u64") +} + +struct BlockToActions { + block_provider: Arc>, + contract_address: ethers::types::Address, + block: Block, + fee_asset: asset::Denom, + rollup_asset_denom: asset::Denom, + bridge_address: Address, + asset_withdrawal_divisor: u128, +} + +impl BlockToActions { + async fn run(self) -> Vec { + let mut actions = Vec::new(); + + let Some(block_hash) = self.block.hash else { + warn!("block hash missing; skipping"); + return actions; + }; + + match get_log::( + self.block_provider.clone(), + self.contract_address, + block_hash, + ) + .await + { + Err(error) => warn!( + error = AsRef::::as_ref(&error), + "encountered an error getting logs for sequencer withdrawal events", + ), + Ok(logs) => { + for log in logs { + match self.log_to_sequencer_withdrawal_action(log) { + Ok(action) => actions.push(action), + Err(error) => { + warn!( + error = AsRef::::as_ref(&error), + "failed converting ethers contract log to sequencer withdrawal \ + action; skipping" + ); + } + } + } + } + } + match get_log::( + self.block_provider.clone(), + self.contract_address, + block_hash, + ) + .await + { + Err(error) => warn!( + error = AsRef::::as_ref(&error), + "encountered an error getting logs for ics20 withdrawal events", + ), + Ok(logs) => { + for log in logs { + match self.log_to_ics20_withdrawal_action(log) { + Ok(action) => actions.push(action), + Err(error) => { + warn!( + error = AsRef::::as_ref(&error), + "failed converting ethers contract log to ics20 withdrawal \ + action; skipping" + ); + } + } + } + } + } + actions + } + + fn log_to_ics20_withdrawal_action(&self, log: Log) -> eyre::Result { + LogToIcs20WithdrawalAction { + log, + fee_asset: self.fee_asset.clone(), + rollup_asset_denom: self.rollup_asset_denom.clone(), + asset_withdrawal_divisor: self.asset_withdrawal_divisor, + bridge_address: self.bridge_address, + } + .try_convert() + .wrap_err("failed converting log to ics20 withdrawal action") + } + + fn log_to_sequencer_withdrawal_action(&self, log: Log) -> eyre::Result { + LogToSequencerWithdrawalAction { + log, + bridge_address: self.bridge_address, + fee_asset: self.fee_asset.clone(), + asset_withdrawal_divisor: self.asset_withdrawal_divisor, + } + .try_into_action() + .wrap_err("failed converting log to sequencer withdrawal action") + } +} + +fn action_inputs_from_log(log: Log) -> eyre::Result<(T, u64, [u8; 32])> { + let block_number = log + .block_number + .ok_or_eyre("log did not contain block number")? + .as_u64(); + let transaction_hash = log + .transaction_hash + .ok_or_eyre("log did not contain transaction hash")? + .into(); + + let event = T::decode_log(&log.into()) + .wrap_err_with(|| format!("failed decoding contract log as `{}`", T::name()))?; + Ok((event, block_number, transaction_hash)) +} + +#[derive(Debug)] +struct LogToIcs20WithdrawalAction { + log: Log, + fee_asset: asset::Denom, + rollup_asset_denom: asset::Denom, + asset_withdrawal_divisor: u128, + bridge_address: Address, +} + +impl LogToIcs20WithdrawalAction { + fn try_convert(self) -> eyre::Result { + let Self { + log, + fee_asset, + rollup_asset_denom, + asset_withdrawal_divisor, + bridge_address, + } = self; + + let (event, block_number, transaction_hash) = + action_inputs_from_log::(log) + .wrap_err("failed getting required data from log")?; + + let source_channel = rollup_asset_denom + .as_trace_prefixed() + .and_then(TracePrefixed::last_channel) + .ok_or_eyre("rollup asset denom must have a channel to be withdrawn via IBC")? + .parse() + .wrap_err("failed to parse channel from rollup asset denom")?; + + let memo = Ics20WithdrawalFromRollupMemo { + memo: event.memo, + block_number, + rollup_return_address: event.sender.to_string(), + transaction_hash, + }; + + let action = Ics20Withdrawal { + denom: rollup_asset_denom, + destination_chain_address: event.destination_chain_address, + // note: this is actually a rollup address; we expect failed ics20 withdrawals to be + // returned to the rollup. + // this is only ok for now because addresses on the sequencer and the rollup are both 20 + // bytes, but this won't work otherwise. + return_address: bridge_address, + amount: event + .amount + .as_u128() + .checked_div(asset_withdrawal_divisor) + .ok_or(eyre::eyre!( + "failed to divide amount by asset withdrawal multiplier" + ))?, + memo: serde_json::to_string(&memo).wrap_err("failed to serialize memo to json")?, + fee_asset, + // note: this refers to the timeout on the destination chain, which we are unaware of. + // thus, we set it to the maximum possible value. + timeout_height: ibc_types::core::client::Height::new(u64::MAX, u64::MAX) + .wrap_err("failed to generate timeout height")?, + timeout_time: packet_timeout_time() + .wrap_err("failed to calculate packet timeout time")?, + source_channel, + bridge_address: Some(bridge_address), + }; + Ok(Action::Ics20Withdrawal(action)) + } +} + +#[derive(Debug)] +struct LogToSequencerWithdrawalAction { + log: Log, + fee_asset: asset::Denom, + asset_withdrawal_divisor: u128, + bridge_address: Address, +} + +impl LogToSequencerWithdrawalAction { + fn try_into_action(self) -> eyre::Result { + let Self { + log, + fee_asset, + asset_withdrawal_divisor, + bridge_address, + } = self; + let (event, block_number, transaction_hash) = + action_inputs_from_log::(log) + .wrap_err("failed getting required data from log")?; + + let memo = bridge::UnlockMemo { + block_number, + transaction_hash, + }; + + let action = BridgeUnlockAction { + to: event + .destination_chain_address + .parse() + .wrap_err("failed to parse destination chain address")?, + amount: event + .amount + .as_u128() + .checked_div(asset_withdrawal_divisor) + .ok_or_eyre("failed to divide amount by asset withdrawal multiplier")?, + memo: serde_json::to_string(&memo).wrap_err("failed to serialize memo to json")?, + fee_asset, + bridge_address: Some(bridge_address), + }; + + Ok(Action::BridgeUnlock(action)) + } +} + +async fn get_log( + provider: Arc>, + contract_address: ethers::types::Address, + block_hash: H256, +) -> eyre::Result> { + let event_sig = T::signature(); + let filter = Filter::new() + .at_block_hash(block_hash) + .address(contract_address) + .topic0(event_sig); + + provider + .get_logs(&filter) + .await + .wrap_err("failed to get sequencer withdrawal events") +} diff --git a/crates/astria-cli/src/commands/bridge/mod.rs b/crates/astria-cli/src/commands/bridge/mod.rs new file mode 100644 index 0000000000..9f12fb87d5 --- /dev/null +++ b/crates/astria-cli/src/commands/bridge/mod.rs @@ -0,0 +1,2 @@ +pub(crate) mod collect; +pub(crate) mod submit; diff --git a/crates/astria-cli/src/commands/bridge/submit.rs b/crates/astria-cli/src/commands/bridge/submit.rs new file mode 100644 index 0000000000..7287fee1a4 --- /dev/null +++ b/crates/astria-cli/src/commands/bridge/submit.rs @@ -0,0 +1,155 @@ +use std::path::{ + Path, + PathBuf, +}; + +use astria_core::{ + crypto::SigningKey, + protocol::transaction::v1alpha1::{ + Action, + TransactionParams, + UnsignedTransaction, + }, +}; +use astria_sequencer_client::{ + tendermint_rpc::endpoint, + Address, + HttpClient, + SequencerClientExt as _, +}; +use clap::Args; +use color_eyre::eyre::{ + self, + ensure, + WrapErr as _, +}; +use tracing::{ + error, + info, + instrument, + warn, +}; + +#[derive(Args, Debug)] +pub(crate) struct WithdrawalEvents { + #[arg(long, short)] + input: PathBuf, + #[arg(long)] + signing_key: PathBuf, + #[arg(long, default_value = "astria")] + sequencer_address_prefix: String, + #[arg(long)] + sequencer_chain_id: String, + #[arg(long)] + sequencer_url: String, +} + +impl WithdrawalEvents { + pub(crate) async fn run(self) -> eyre::Result<()> { + let signing_key = read_signing_key(&self.signing_key).wrap_err_with(|| { + format!( + "failed reading signing key from file: {}", + self.signing_key.display() + ) + })?; + + let actions_by_rollup_number = read_actions(&self.input).wrap_err_with(|| { + format!("failed reading actions from file: {}", self.input.display()) + })?; + + let sequencer_client = HttpClient::new(&*self.sequencer_url) + .wrap_err("failed constructing http sequencer client")?; + + for (rollup_height, actions) in actions_by_rollup_number.into_inner() { + if actions.is_empty() { + warn!( + rollup_height, + "entry for rollup height exists, but actions were empty; skipping" + ); + continue; + } + match submit_transaction( + sequencer_client.clone(), + &self.sequencer_chain_id, + &self.sequencer_address_prefix, + &signing_key, + actions, + ) + .await + .wrap_err_with(|| { + format!("submitting withdrawal actions for rollup height `{rollup_height}` failed") + }) { + Err(e) => { + error!( + rollup_height, + "failed submitting actions; bailing and not submitting the rest" + ); + return Err(e); + } + Ok(response) => info!( + sequencer_height = %response.height, + rollup_height, + "actions derived from rollup succesfully submitted to sequencer" + ), + } + } + Ok(()) + } +} + +fn read_actions>(path: P) -> eyre::Result { + let s = std::fs::read_to_string(path).wrap_err("failed buffering file contents as string")?; + serde_json::from_str(&s) + .wrap_err("failed deserializing file contents height-to-sequencer-actions serde object") +} + +fn read_signing_key>(path: P) -> eyre::Result { + let hex = + std::fs::read_to_string(&path).wrap_err("failed to read file contents into buffer")?; + let bytes = hex::decode(hex.trim()).wrap_err("failed to decode file contents as hex")?; + SigningKey::try_from(&*bytes).wrap_err("failed to construct signing key hex-decoded bytes") +} + +#[instrument(skip_all, fields(actions = actions.len()), err)] +async fn submit_transaction( + client: HttpClient, + chain_id: &str, + prefix: &str, + signing_key: &SigningKey, + actions: Vec, +) -> eyre::Result { + let from_address = Address::builder() + .array(signing_key.verification_key().address_bytes()) + .prefix(prefix) + .try_build() + .wrap_err("failed constructing a valid from address from the provided prefix")?; + + let nonce_res = client + .get_latest_nonce(from_address) + .await + .wrap_err("failed to get nonce")?; + + let tx = UnsignedTransaction { + params: TransactionParams::builder() + .nonce(nonce_res.nonce) + .chain_id(chain_id) + .build(), + actions, + } + .into_signed(signing_key); + let res = client + .submit_transaction_commit(tx) + .await + .wrap_err("failed to submit transaction")?; + ensure!( + res.check_tx.code.is_ok(), + "failed to check tx: {}", + res.check_tx.log + ); + ensure!( + res.tx_result.code.is_ok(), + "failed to execute tx: {}", + res.tx_result.log + ); + Ok(res) +} diff --git a/crates/astria-cli/src/commands/mod.rs b/crates/astria-cli/src/commands/mod.rs index f4f7814e78..436d5c74fc 100644 --- a/crates/astria-cli/src/commands/mod.rs +++ b/crates/astria-cli/src/commands/mod.rs @@ -1,10 +1,10 @@ +pub(crate) mod bridge; mod sequencer; use color_eyre::{ eyre, eyre::eyre, }; -use tracing::instrument; use crate::cli::{ sequencer::{ @@ -34,10 +34,12 @@ use crate::cli::{ /// # Panics /// /// * If the command is not recognized -#[instrument] pub async fn run(cli: Cli) -> eyre::Result<()> { if let Some(command) = cli.command { match command { + Command::Bridge { + command, + } => command.run().await?, Command::Sequencer { command, } => match command { diff --git a/crates/astria-cli/src/main.rs b/crates/astria-cli/src/main.rs index 81c257e2ad..c587580795 100644 --- a/crates/astria-cli/src/main.rs +++ b/crates/astria-cli/src/main.rs @@ -4,13 +4,16 @@ use astria_cli::{ cli::Cli, commands, }; -use color_eyre::{ - eyre, - eyre::Context, -}; +use color_eyre::eyre; + +#[tokio::main] +async fn main() -> ExitCode { + tracing_subscriber::fmt() + .pretty() + .with_writer(std::io::stderr) + .init(); -fn main() -> ExitCode { - if let Err(err) = run() { + if let Err(err) = run().await { eprintln!("{err:?}"); return ExitCode::FAILURE; } @@ -18,12 +21,7 @@ fn main() -> ExitCode { ExitCode::SUCCESS } -/// Run our asynchronous command code in a blocking manner -fn run() -> eyre::Result<()> { - let rt = tokio::runtime::Runtime::new().wrap_err("failed to create a new runtime")?; - - rt.block_on(async { - let args = Cli::get_args()?; - commands::run(args).await - }) +async fn run() -> eyre::Result<()> { + let args = Cli::get_args()?; + commands::run(args).await } From 75ac37a5009f7fbe4105a05c7bda2878bb56ea4e Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Mon, 15 Jul 2024 14:03:47 +0200 Subject: [PATCH 17/24] feat(cli, bridge-withdrawer)!: share code between cli and service (#1270) ## Summary Moves common code from bridge-withdrawer and cli to `astria-bridge-contracts`. ## Background The `bridge collect-withdrawals` subcommand of `astria-cli` and the `astria-bridge-withdrawer` have common logic that should use the same code. This patch ensures that both use the same codepaths to get contract withdraw events from the astria bridge contracts running on the rollup before converting them to astria Sequencer actions. Writing this patch it was noticed that bridge-withdrawer with incorrectly set rollup asset denom would break and was fixed. ## Changes - Define a `GetWithdrawalActions::get_for_block_hash` utility for fetching withdrawal events for a given block hash and converting them to astria sequencer actions in one go - Refactor `astria-bridge-withdrawer` and `astria-cli` in terms of it - Remove `bridge collect-withdrawals --rollup-asset-denom` - Add `bridge collect-withdrawals --sequencer-asset-to-withdraw` (to disambiguate between withdrawal kinds) - Add `bridge collect-withdrawals --ics20-asset-to-withdraw` (to disambiguate between withdrawal kinds) - Require `ASTRIA_BRIDGE_WITHDRAWER_ROLLUP_ASSET_DENOM` to always contain a trace-prefixed ics20 asset denomination (a string of form `[//.../]base` because withdrawing `ibc/` denoms does not make sense for the bridge. ## Testing Smoke tests still pass and run. The overly heavy unit tests (which weren't really unit tests) in `astria-bridge-withdrawer` were removed in favor of future blackbox tests (pending in https://github.com/astriaorg/astria/issues/1227, which would have removed these eventually). ## Breaking Changelist Neither of these are breaking in practice because the CLI `bridge` subcommand did not see a release yet, and `bridge-withdrawer` with `ibc/` rollup asset denoms was never used (and would likely not work) - removed an argument on the CLI - enforces a stricter config on the bridge-withdrawer --- Cargo.lock | 6 + charts/deploy.just | 2 +- crates/astria-bridge-contracts/Cargo.toml | 7 + crates/astria-bridge-contracts/src/lib.rs | 650 +++++++++++++ .../src/bridge_withdrawer/ethereum/convert.rs | 326 ------- .../src/bridge_withdrawer/ethereum/mod.rs | 4 - .../bridge_withdrawer/ethereum/test_utils.rs | 205 ---- .../src/bridge_withdrawer/ethereum/watcher.rs | 890 ++---------------- .../src/bridge_withdrawer/mod.rs | 5 +- crates/astria-bridge-withdrawer/src/config.rs | 2 +- .../astria-cli/src/commands/bridge/collect.rs | 393 ++------ 11 files changed, 809 insertions(+), 1681 deletions(-) delete mode 100644 crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs delete mode 100644 crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/test_utils.rs diff --git a/Cargo.lock b/Cargo.lock index 94e5939f2e..71b83ee9c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -488,7 +488,13 @@ dependencies = [ name = "astria-bridge-contracts" version = "0.1.0" dependencies = [ + "astria-core", "ethers", + "futures", + "ibc-types", + "serde_json", + "tendermint", + "thiserror", ] [[package]] diff --git a/charts/deploy.just b/charts/deploy.just index 520270f195..846bba7e29 100644 --- a/charts/deploy.just +++ b/charts/deploy.just @@ -388,7 +388,7 @@ run-smoke-cli: --contract-address {{evm_contract_address}} \ --from-rollup-height 1 \ --to-rollup-height $CURRENT_BLOCK \ - --rollup-asset-denom nria \ + --sequencer-asset-to-withdraw nria \ --bridge-address {{sequencer_bridge_address}} \ --output ./withdrawals.json astria-cli bridge submit-withdrawals \ diff --git a/crates/astria-bridge-contracts/Cargo.toml b/crates/astria-bridge-contracts/Cargo.toml index 72de607667..a96bba7088 100644 --- a/crates/astria-bridge-contracts/Cargo.toml +++ b/crates/astria-bridge-contracts/Cargo.toml @@ -6,4 +6,11 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +astria-core = { path = "../astria-core", features = ["serde"] } + ethers = { workspace = true } +futures = { workspace = true } +ibc-types = { workspace = true } +serde_json = { workspace = true } +tendermint = { workspace = true } +thiserror = { workspace = true } diff --git a/crates/astria-bridge-contracts/src/lib.rs b/crates/astria-bridge-contracts/src/lib.rs index 7ca33aa159..7266c93eb8 100644 --- a/crates/astria-bridge-contracts/src/lib.rs +++ b/crates/astria-bridge-contracts/src/lib.rs @@ -1,4 +1,654 @@ #[rustfmt::skip] #[allow(clippy::pedantic)] mod generated; +use std::{ + borrow::Cow, + sync::Arc, +}; + +use astria_core::{ + primitive::v1::{ + asset, + Address, + AddressError, + }, + protocol::transaction::v1alpha1::{ + action::Ics20Withdrawal, + Action, + }, +}; +use astria_withdrawer::{ + Ics20WithdrawalFilter, + SequencerWithdrawalFilter, +}; +use ethers::{ + contract::EthEvent, + providers::Middleware, + types::{ + Filter, + Log, + H256, + }, +}; pub use generated::*; + +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct BuildError(BuildErrorKind); + +impl BuildError { + #[must_use] + fn bad_divisor(base_chain_asset_precision: u32) -> Self { + Self(BuildErrorKind::BadDivisor { + base_chain_asset_precision, + }) + } + + #[must_use] + fn call_base_chain_asset_precision< + T: Into>, + >( + source: T, + ) -> Self { + Self(BuildErrorKind::CallBaseChainAssetPrecision { + source: source.into(), + }) + } + + #[must_use] + pub fn no_withdraws_configured() -> Self { + Self(BuildErrorKind::NoWithdrawsConfigured) + } + + #[must_use] + fn not_set(field: &'static str) -> Self { + Self(BuildErrorKind::NotSet { + field, + }) + } + + #[must_use] + fn ics20_asset_without_channel() -> Self { + Self(BuildErrorKind::Ics20AssetWithoutChannel) + } + + #[must_use] + fn parse_ics20_asset_source_channel(source: ibc_types::IdentifierError) -> Self { + Self(BuildErrorKind::ParseIcs20AssetSourceChannel { + source, + }) + } +} + +#[derive(Debug, thiserror::Error)] +enum BuildErrorKind { + #[error( + "failed calculating asset divisor. The base chain asset precision should be <= 18 as \ + that's enforced by the contract, so the construction should work. Did the precision \ + change? Precision returned by contract: `{base_chain_asset_precision}`" + )] + BadDivisor { base_chain_asset_precision: u32 }, + #[error("required option `{field}` not set")] + NotSet { field: &'static str }, + #[error( + "getting withdraws actions must be configured for one of sequencer or ics20 (or both); \ + neither was set" + )] + NoWithdrawsConfigured, + #[error("failed to call the `BASE_CHAIN_ASSET_PRECISION` of the provided contract")] + CallBaseChainAssetPrecision { + source: Box, + }, + #[error("ics20 asset must have a channel to be withdrawn via IBC")] + Ics20AssetWithoutChannel, + #[error("could not parse ics20 asset channel as channel ID")] + ParseIcs20AssetSourceChannel { source: ibc_types::IdentifierError }, +} + +pub struct NoProvider; +pub struct WithProvider

(Arc

); + +pub struct GetWithdrawalActionsBuilder { + provider: TProvider, + contract_address: Option, + bridge_address: Option

, + fee_asset: Option, + sequencer_asset_to_withdraw: Option, + ics20_asset_to_withdraw: Option, +} + +impl Default for GetWithdrawalActionsBuilder { + fn default() -> Self { + Self::new() + } +} + +impl GetWithdrawalActionsBuilder { + #[must_use] + pub fn new() -> Self { + Self { + provider: NoProvider, + contract_address: None, + bridge_address: None, + fee_asset: None, + sequencer_asset_to_withdraw: None, + ics20_asset_to_withdraw: None, + } + } +} + +impl

GetWithdrawalActionsBuilder

{ + #[must_use] + pub fn provider(self, provider: Arc) -> GetWithdrawalActionsBuilder> { + let Self { + contract_address, + bridge_address, + fee_asset, + sequencer_asset_to_withdraw, + ics20_asset_to_withdraw, + .. + } = self; + GetWithdrawalActionsBuilder { + provider: WithProvider(provider), + contract_address, + bridge_address, + fee_asset, + sequencer_asset_to_withdraw, + ics20_asset_to_withdraw, + } + } + + #[must_use] + pub fn contract_address(self, contract_address: ethers::types::Address) -> Self { + Self { + contract_address: Some(contract_address), + ..self + } + } + + #[must_use] + pub fn bridge_address(self, bridge_address: Address) -> Self { + Self { + bridge_address: Some(bridge_address), + ..self + } + } + + #[must_use] + pub fn fee_asset(self, fee_asset: asset::Denom) -> Self { + Self { + fee_asset: Some(fee_asset), + ..self + } + } + + #[must_use] + pub fn sequencer_asset_to_withdraw(self, sequencer_asset_to_withdraw: asset::Denom) -> Self { + self.set_sequencer_asset_to_withdraw(Some(sequencer_asset_to_withdraw)) + } + + #[must_use] + pub fn set_sequencer_asset_to_withdraw( + self, + sequencer_asset_to_withdraw: Option, + ) -> Self { + Self { + sequencer_asset_to_withdraw, + ..self + } + } + + #[must_use] + pub fn ics20_asset_to_withdraw(self, ics20_asset_to_withdraw: asset::TracePrefixed) -> Self { + self.set_ics20_asset_to_withdraw(Some(ics20_asset_to_withdraw)) + } + + #[must_use] + pub fn set_ics20_asset_to_withdraw( + self, + ics20_asset_to_withdraw: Option, + ) -> Self { + Self { + ics20_asset_to_withdraw, + ..self + } + } +} + +impl

GetWithdrawalActionsBuilder> +where + P: Middleware + 'static, + P::Error: std::error::Error + 'static, +{ + /// Constructs a [`GetWithdrawalActions`] fetcher. + /// + /// # Errors + /// Returns an error in one of these cases: + /// + `contract_address` is not set + /// + `bridge_address` is not set + /// + `fee_asset` is not set + /// + neither `source_asset_to_withdraw` nor `ics20_asset_to_withdraw` are set + /// + `ics20_asset_to_withdraw` is set, but does not contain a ics20 channel + /// + the `BASE_CHAIN_ASSET_PRECISION` call on the provided `contract_address` cannot be + /// executed + /// + the base chain asset precision retrieved from the contract at `contract_address` is + /// greater than 18 (this is currently hardcoded in the smart contract). + pub async fn try_build(self) -> Result, BuildError> { + let Self { + provider: WithProvider(provider), + contract_address, + bridge_address, + fee_asset, + sequencer_asset_to_withdraw, + ics20_asset_to_withdraw, + } = self; + + let Some(contract_address) = contract_address else { + return Err(BuildError::not_set("contract_address")); + }; + let Some(bridge_address) = bridge_address else { + return Err(BuildError::not_set("bridge_address")); + }; + let Some(fee_asset) = fee_asset else { + return Err(BuildError::not_set("fee_asset")); + }; + + if sequencer_asset_to_withdraw.is_none() && ics20_asset_to_withdraw.is_none() { + return Err(BuildError::no_withdraws_configured()); + } + + let mut ics20_source_channel = None; + if let Some(ics20_asset_to_withdraw) = &ics20_asset_to_withdraw { + ics20_source_channel.replace( + ics20_asset_to_withdraw + .last_channel() + .ok_or(BuildError::ics20_asset_without_channel())? + .parse() + .map_err(BuildError::parse_ics20_asset_source_channel)?, + ); + }; + + let contract = + i_astria_withdrawer::IAstriaWithdrawer::new(contract_address, provider.clone()); + + let base_chain_asset_precision = contract + .base_chain_asset_precision() + .call() + .await + .map_err(BuildError::call_base_chain_asset_precision)?; + + let exponent = 18u32 + .checked_sub(base_chain_asset_precision) + .ok_or_else(|| BuildError::bad_divisor(base_chain_asset_precision))?; + + let asset_withdrawal_divisor = 10u128.pow(exponent); + + Ok(GetWithdrawalActions { + provider, + contract_address, + asset_withdrawal_divisor, + bridge_address, + fee_asset, + sequencer_asset_to_withdraw, + ics20_asset_to_withdraw, + ics20_source_channel, + }) + } +} + +pub struct GetWithdrawalActions

{ + provider: Arc

, + contract_address: ethers::types::Address, + asset_withdrawal_divisor: u128, + bridge_address: Address, + fee_asset: asset::Denom, + sequencer_asset_to_withdraw: Option, + ics20_asset_to_withdraw: Option, + ics20_source_channel: Option, +} + +impl

GetWithdrawalActions

+where + P: Middleware, + P::Error: std::error::Error + 'static, +{ + fn configured_for_sequencer_withdrawals(&self) -> bool { + self.sequencer_asset_to_withdraw.is_some() + } + + fn configured_for_ics20_withdrawals(&self) -> bool { + self.ics20_asset_to_withdraw.is_some() + } + + /// Gets all withdrawal events for `block_hash` and converts them to astria sequencer actions. + /// + /// # Errors + /// Returns an error in one of the following cases: + /// + fetching logs for either ics20 or sequencer withdrawal events fails + /// + converting either event to Sequencer actions fails due to the events being malformed. + pub async fn get_for_block_hash( + &self, + block_hash: H256, + ) -> Result, GetWithdrawalActionsError> { + use futures::FutureExt as _; + let get_ics20_logs = if self.configured_for_ics20_withdrawals() { + get_logs::(&self.provider, self.contract_address, block_hash) + .boxed() + } else { + futures::future::ready(Ok(vec![])).boxed() + }; + let get_sequencer_logs = if self.configured_for_sequencer_withdrawals() { + get_logs::( + &self.provider, + self.contract_address, + block_hash, + ) + .boxed() + } else { + futures::future::ready(Ok(vec![])).boxed() + }; + let (ics20_logs, sequencer_logs) = + futures::future::try_join(get_ics20_logs, get_sequencer_logs) + .await + .map_err(GetWithdrawalActionsError::get_logs)?; + + // XXX: The calls to `log_to_*_action` rely on only be called if `GetWithdrawalActions` + // is configured for either ics20 or sequencer withdrawals (or both). They would panic + // otherwise. + ics20_logs + .into_iter() + .map(|log| self.log_to_ics20_withdrawal_action(log)) + .chain( + sequencer_logs + .into_iter() + .map(|log| self.log_to_sequencer_withdrawal_action(log)), + ) + .collect() + } + + fn log_to_ics20_withdrawal_action( + &self, + log: Log, + ) -> Result { + let block_number = log + .block_number + .ok_or_else(|| GetWithdrawalActionsError::log_without_block_number(&log))? + .as_u64(); + + let transaction_hash = log + .transaction_hash + .ok_or_else(|| GetWithdrawalActionsError::log_without_transaction_hash(&log))? + .into(); + + let event = decode_log::(log) + .map_err(GetWithdrawalActionsError::decode_log)?; + + let (denom, source_channel) = ( + self.ics20_asset_to_withdraw + .clone() + .expect("must be set if this method is entered") + .into(), + self.ics20_source_channel + .clone() + .expect("must be set if this method is entered"), + ); + + let memo = serde_json::to_string(&astria_core::bridge::Ics20WithdrawalFromRollupMemo { + memo: event.memo.clone(), + block_number, + rollup_return_address: event.sender.to_string(), + transaction_hash, + }) + .map_err(|source| { + GetWithdrawalActionsError::encode_memo("Ics20WithdrawalFromRollupMemo", source) + })?; + + let amount = calculate_amount(&event, self.asset_withdrawal_divisor) + .map_err(GetWithdrawalActionsError::calculate_withdrawal_amount)?; + + let action = Ics20Withdrawal { + denom, + destination_chain_address: event.destination_chain_address, + return_address: self.bridge_address, + amount, + memo, + fee_asset: self.fee_asset.clone(), + // note: this refers to the timeout on the destination chain, which we are unaware of. + // thus, we set it to the maximum possible value. + timeout_height: max_timeout_height(), + timeout_time: timeout_in_5_min(), + source_channel, + bridge_address: Some(self.bridge_address), + }; + Ok(Action::Ics20Withdrawal(action)) + } + + fn log_to_sequencer_withdrawal_action( + &self, + log: Log, + ) -> Result { + let block_number = log + .block_number + .ok_or_else(|| GetWithdrawalActionsError::log_without_block_number(&log))? + .as_u64(); + + let transaction_hash = log + .transaction_hash + .ok_or_else(|| GetWithdrawalActionsError::log_without_transaction_hash(&log))? + .into(); + + let event = decode_log::(log) + .map_err(GetWithdrawalActionsError::decode_log)?; + + let memo = serde_json::to_string(&astria_core::bridge::UnlockMemo { + block_number, + transaction_hash, + }) + .map_err(|err| GetWithdrawalActionsError::encode_memo("bridge::UnlockMemo", err))?; + + let amount = calculate_amount(&event, self.asset_withdrawal_divisor) + .map_err(GetWithdrawalActionsError::calculate_withdrawal_amount)?; + + let to = parse_destination_chain_as_address(&event) + .map_err(GetWithdrawalActionsError::destination_chain_as_address)?; + + let action = astria_core::protocol::transaction::v1alpha1::action::BridgeUnlockAction { + to, + amount, + memo, + fee_asset: self.fee_asset.clone(), + bridge_address: Some(self.bridge_address), + }; + + Ok(Action::BridgeUnlock(action)) + } +} + +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct GetWithdrawalActionsError(GetWithdrawalActionsErrorKind); + +impl GetWithdrawalActionsError { + fn calculate_withdrawal_amount(source: CalculateWithdrawalAmountError) -> Self { + Self(GetWithdrawalActionsErrorKind::CalculateWithdrawalAmount( + source, + )) + } + + fn decode_log(source: DecodeLogError) -> Self { + Self(GetWithdrawalActionsErrorKind::DecodeLog(source)) + } + + fn destination_chain_as_address(source: DestinationChainAsAddressError) -> Self { + Self(GetWithdrawalActionsErrorKind::DestinationChainAsAddress( + source, + )) + } + + fn encode_memo(which: &'static str, source: serde_json::Error) -> Self { + Self(GetWithdrawalActionsErrorKind::EncodeMemo { + which, + source, + }) + } + + fn get_logs(source: GetLogsError) -> Self { + Self(GetWithdrawalActionsErrorKind::GetLogs(source)) + } + + // XXX: Somehow identify the log? + fn log_without_block_number(_log: &Log) -> Self { + Self(GetWithdrawalActionsErrorKind::LogWithoutBlockNumber) + } + + // XXX: Somehow identify the log? + fn log_without_transaction_hash(_log: &Log) -> Self { + Self(GetWithdrawalActionsErrorKind::LogWithoutTransactionHash) + } +} + +#[derive(Debug, thiserror::Error)] +enum GetWithdrawalActionsErrorKind { + #[error(transparent)] + DecodeLog(DecodeLogError), + #[error(transparent)] + DestinationChainAsAddress(DestinationChainAsAddressError), + #[error("failed encoding memo `{which}`")] + EncodeMemo { + which: &'static str, + source: serde_json::Error, + }, + #[error(transparent)] + GetLogs(GetLogsError), + #[error("log did not contain a block number")] + LogWithoutBlockNumber, + #[error("log did not contain a transaction hash")] + LogWithoutTransactionHash, + #[error(transparent)] + CalculateWithdrawalAmount(CalculateWithdrawalAmountError), +} + +#[derive(Debug, thiserror::Error)] +#[error("failed decoding a log into an Astria bridge contract event `{event_name}`")] +struct DecodeLogError { + event_name: Cow<'static, str>, + // use a trait object instead of the error to not force the middleware + // type parameter into the error. + source: Box, +} + +fn decode_log(log: Log) -> Result { + T::decode_log(&log.into()).map_err(|err| DecodeLogError { + event_name: T::name(), + source: err.into(), + }) +} + +#[derive(Debug, thiserror::Error)] +#[error("failed getting the eth logs for event `{event_name}`")] +struct GetLogsError { + event_name: Cow<'static, str>, + // use a trait object instead of the error to not force the middleware + // type parameter into the error. + source: Box, +} + +async fn get_logs( + provider: &M, + contract_address: ethers::types::Address, + block_hash: H256, +) -> Result, GetLogsError> +where + M: Middleware, + M::Error: std::error::Error + 'static, +{ + let event_sig = T::signature(); + let filter = Filter::new() + .at_block_hash(block_hash) + .address(contract_address) + .topic0(event_sig); + + provider + .get_logs(&filter) + .await + .map_err(|err| GetLogsError { + event_name: T::name(), + source: err.into(), + }) +} + +trait GetAmount { + fn get_amount(&self) -> u128; +} + +impl GetAmount for Ics20WithdrawalFilter { + fn get_amount(&self) -> u128 { + self.amount.as_u128() + } +} + +impl GetAmount for SequencerWithdrawalFilter { + fn get_amount(&self) -> u128 { + self.amount.as_u128() + } +} + +#[derive(Debug, thiserror::Error)] +#[error( + "failed calculate amount to withdraw because mount in event could not be divided by the asset \ + withdrawal divisor; amount: `{amount}`, divisor: `{divisor}`" +)] +struct CalculateWithdrawalAmountError { + amount: u128, + divisor: u128, +} + +fn calculate_amount( + event: &T, + asset_withdrawal_divisor: u128, +) -> Result { + event + .get_amount() + .checked_div(asset_withdrawal_divisor) + .ok_or_else(|| CalculateWithdrawalAmountError { + amount: event.get_amount(), + divisor: asset_withdrawal_divisor, + }) +} + +fn max_timeout_height() -> ibc_types::core::client::Height { + ibc_types::core::client::Height::new(u64::MAX, u64::MAX) + .expect("non-zero arguments should never fail") +} + +#[derive(Debug, thiserror::Error)] +#[error("failed to parse destination chain address as Astria address for a bridge unlock")] +struct DestinationChainAsAddressError { + #[from] + source: AddressError, +} + +fn parse_destination_chain_as_address( + event: &SequencerWithdrawalFilter, +) -> Result { + event.destination_chain_address.parse().map_err(Into::into) +} + +fn timeout_in_5_min() -> u64 { + use std::time::Duration; + tendermint::Time::now() + .checked_add(Duration::from_secs(300)) + .expect("adding 5 minutes to the current time should never fail") + .unix_timestamp_nanos() + .try_into() + .expect("timestamp must be positive, so this conversion would only fail if negative") +} + +#[cfg(test)] +mod tests { + use super::max_timeout_height; + #[test] + fn max_timeout_height_does_not_panic() { + max_timeout_height(); + } +} diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs deleted file mode 100644 index dab2828183..0000000000 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/convert.rs +++ /dev/null @@ -1,326 +0,0 @@ -use std::time::Duration; - -use astria_bridge_contracts::i_astria_withdrawer::{ - Ics20WithdrawalFilter, - SequencerWithdrawalFilter, -}; -use astria_core::{ - bridge::{ - self, - Ics20WithdrawalFromRollupMemo, - }, - primitive::v1::{ - asset::{ - self, - denom::TracePrefixed, - }, - Address, - }, - protocol::transaction::v1alpha1::{ - action::{ - BridgeUnlockAction, - Ics20Withdrawal, - }, - Action, - }, -}; -use astria_eyre::eyre::{ - self, - OptionExt, - WrapErr as _, -}; -use ethers::types::{ - TxHash, - U64, -}; -use ibc_types::core::client::Height as IbcHeight; - -#[derive(Debug, PartialEq, Eq)] -pub(crate) enum WithdrawalEvent { - Sequencer(SequencerWithdrawalFilter), - Ics20(Ics20WithdrawalFilter), -} - -#[derive(Debug, PartialEq, Eq)] -pub(crate) struct EventWithMetadata { - pub(crate) event: WithdrawalEvent, - /// The block in which the log was emitted - pub(crate) block_number: U64, - /// The transaction hash in which the log was emitted - pub(crate) transaction_hash: TxHash, -} - -pub(crate) fn event_to_action( - event_with_metadata: EventWithMetadata, - fee_asset: asset::Denom, - rollup_asset_denom: asset::Denom, - asset_withdrawal_divisor: u128, - bridge_address: Address, -) -> eyre::Result { - let action = match event_with_metadata.event { - WithdrawalEvent::Sequencer(event) => event_to_bridge_unlock( - &event, - event_with_metadata.block_number, - event_with_metadata.transaction_hash, - fee_asset, - asset_withdrawal_divisor, - bridge_address, - ) - .wrap_err("failed to convert sequencer withdrawal event to action")?, - WithdrawalEvent::Ics20(event) => event_to_ics20_withdrawal( - event, - event_with_metadata.block_number, - event_with_metadata.transaction_hash, - fee_asset, - rollup_asset_denom, - asset_withdrawal_divisor, - bridge_address, - ) - .wrap_err("failed to convert ics20 withdrawal event to action")?, - }; - Ok(action) -} - -fn event_to_bridge_unlock( - event: &SequencerWithdrawalFilter, - block_number: U64, - transaction_hash: TxHash, - fee_asset: asset::Denom, - asset_withdrawal_divisor: u128, - bridge_address: Address, -) -> eyre::Result { - let memo = bridge::UnlockMemo { - // XXX: The documentation mentions that the ethers U64 type will panic if it cannot be - // converted to u64. However, this is part of a catch-all documentation that does not apply - // to U64. - block_number: block_number.as_u64(), - transaction_hash: transaction_hash.into(), - }; - let action = BridgeUnlockAction { - to: event - .destination_chain_address - .parse() - .wrap_err("failed to parse destination chain address")?, - amount: event - .amount - .as_u128() - .checked_div(asset_withdrawal_divisor) - .ok_or(eyre::eyre!( - "failed to divide amount by asset withdrawal multiplier" - ))?, - memo: serde_json::to_string(&memo).wrap_err("failed to serialize memo to json")?, - fee_asset, - bridge_address: Some(bridge_address), - }; - - Ok(Action::BridgeUnlock(action)) -} - -// FIXME: Get this to work for now, but replace this with a builder. -#[allow(clippy::too_many_arguments)] -fn event_to_ics20_withdrawal( - event: Ics20WithdrawalFilter, - block_number: U64, - transaction_hash: TxHash, - fee_asset: asset::Denom, - rollup_asset_denom: asset::Denom, - asset_withdrawal_divisor: u128, - bridge_address: Address, -) -> eyre::Result { - // TODO: make this configurable - const ICS20_WITHDRAWAL_TIMEOUT: Duration = Duration::from_secs(300); - - let denom = rollup_asset_denom.clone(); - - let channel = denom - .as_trace_prefixed() - .and_then(TracePrefixed::last_channel) - .ok_or_eyre("denom must have a channel to be withdrawn via IBC")?; - - let memo = Ics20WithdrawalFromRollupMemo { - memo: event.memo, - block_number: block_number.as_u64(), - rollup_return_address: event.sender.to_string(), - transaction_hash: transaction_hash.into(), - }; - - let action = Ics20Withdrawal { - denom: rollup_asset_denom, - destination_chain_address: event.destination_chain_address, - return_address: bridge_address, - amount: event - .amount - .as_u128() - .checked_div(asset_withdrawal_divisor) - .ok_or(eyre::eyre!( - "failed to divide amount by asset withdrawal multiplier" - ))?, - memo: serde_json::to_string(&memo).wrap_err("failed to serialize memo to json")?, - fee_asset, - // note: this refers to the timeout on the destination chain, which we are unaware of. - // thus, we set it to the maximum possible value. - timeout_height: IbcHeight::new(u64::MAX, u64::MAX) - .wrap_err("failed to generate timeout height")?, - timeout_time: calculate_packet_timeout_time(ICS20_WITHDRAWAL_TIMEOUT) - .wrap_err("failed to calculate packet timeout time")?, - source_channel: channel - .parse() - .wrap_err("failed to parse channel from denom")?, - bridge_address: Some(bridge_address), - }; - Ok(Action::Ics20Withdrawal(action)) -} - -fn calculate_packet_timeout_time(timeout_delta: Duration) -> eyre::Result { - tendermint::Time::now() - .checked_add(timeout_delta) - .ok_or_eyre("time must not overflow from now plus 10 minutes")? - .unix_timestamp_nanos() - .try_into() - .wrap_err("failed to convert packet timeout i128 to u64") -} - -#[cfg(test)] -mod tests { - use astria_bridge_contracts::i_astria_withdrawer::SequencerWithdrawalFilter; - - use super::*; - - fn default_native_asset() -> asset::Denom { - "nria".parse().unwrap() - } - - #[test] - fn event_to_bridge_unlock() { - let denom = default_native_asset(); - let event_with_meta = EventWithMetadata { - event: WithdrawalEvent::Sequencer(SequencerWithdrawalFilter { - sender: [0u8; 20].into(), - amount: 99.into(), - destination_chain_address: crate::astria_address([1u8; 20]).to_string(), - }), - block_number: 1.into(), - transaction_hash: [2u8; 32].into(), - }; - let bridge_address = crate::astria_address([99u8; 20]); - let action = event_to_action( - event_with_meta, - denom.clone(), - denom.clone(), - 1, - bridge_address, - ) - .unwrap(); - let Action::BridgeUnlock(action) = action else { - panic!("expected BridgeUnlock action, got {action:?}"); - }; - - let expected_action = BridgeUnlockAction { - to: crate::astria_address([1u8; 20]), - amount: 99, - memo: serde_json::to_string(&bridge::UnlockMemo { - block_number: 1, - transaction_hash: [2u8; 32], - }) - .unwrap(), - fee_asset: denom, - bridge_address: Some(bridge_address), - }; - - assert_eq!(action, expected_action); - } - - #[test] - fn event_to_bridge_unlock_divide_value() { - let denom = default_native_asset(); - let event_with_meta = EventWithMetadata { - event: WithdrawalEvent::Sequencer(SequencerWithdrawalFilter { - sender: [0u8; 20].into(), - amount: 990.into(), - destination_chain_address: crate::astria_address([1u8; 20]).to_string(), - }), - block_number: 1.into(), - transaction_hash: [2u8; 32].into(), - }; - let divisor = 10; - let bridge_address = crate::astria_address([99u8; 20]); - let action = event_to_action( - event_with_meta, - denom.clone(), - denom.clone(), - divisor, - bridge_address, - ) - .unwrap(); - let Action::BridgeUnlock(action) = action else { - panic!("expected BridgeUnlock action, got {action:?}"); - }; - - let expected_action = BridgeUnlockAction { - to: crate::astria_address([1u8; 20]), - amount: 99, - memo: serde_json::to_string(&bridge::UnlockMemo { - block_number: 1, - transaction_hash: [2u8; 32], - }) - .unwrap(), - fee_asset: denom, - bridge_address: Some(bridge_address), - }; - - assert_eq!(action, expected_action); - } - - #[test] - fn event_to_ics20_withdrawal() { - let denom = "transfer/channel-0/utia".parse::().unwrap(); - let destination_chain_address = crate::astria_address([1u8; 20]).to_string(); - let event_with_meta = EventWithMetadata { - event: WithdrawalEvent::Ics20(Ics20WithdrawalFilter { - sender: [0u8; 20].into(), - amount: 99.into(), - destination_chain_address: destination_chain_address.clone(), - memo: "hello".to_string(), - }), - block_number: 1.into(), - transaction_hash: [2u8; 32].into(), - }; - - let bridge_address = crate::astria_address([99u8; 20]); - let action = event_to_action( - event_with_meta, - denom.clone(), - denom.clone(), - 1, - bridge_address, - ) - .unwrap(); - let Action::Ics20Withdrawal(mut action) = action else { - panic!("expected Ics20Withdrawal action, got {action:?}"); - }; - - // TODO: instead of zeroing this, we should pass in the latest block time to the function - // and generate the timeout time from that. - action.timeout_time = 0; // zero this for testing - - let expected_action = Ics20Withdrawal { - denom: denom.clone(), - destination_chain_address, - return_address: bridge_address, - amount: 99, - memo: serde_json::to_string(&Ics20WithdrawalFromRollupMemo { - memo: "hello".to_string(), - block_number: 1u64, - rollup_return_address: ethers::types::Address::from([0u8; 20]).to_string(), - transaction_hash: [2u8; 32], - }) - .unwrap(), - fee_asset: denom, - timeout_height: IbcHeight::new(u64::MAX, u64::MAX).unwrap(), - timeout_time: 0, // zero this for testing - source_channel: "channel-0".parse().unwrap(), - bridge_address: Some(bridge_address), - }; - assert_eq!(action, expected_action); - } -} diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/mod.rs index 8216a66bc7..1d889b863e 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/mod.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/mod.rs @@ -1,5 +1 @@ -pub(crate) mod convert; pub(crate) mod watcher; - -#[cfg(test)] -mod test_utils; diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/test_utils.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/test_utils.rs deleted file mode 100644 index e63fb5b4cd..0000000000 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/test_utils.rs +++ /dev/null @@ -1,205 +0,0 @@ -use std::{ - sync::Arc, - time::Duration, -}; - -use astria_bridge_contracts::{ - astria_bridgeable_erc20::{ - ASTRIABRIDGEABLEERC20_ABI, - ASTRIABRIDGEABLEERC20_BYTECODE, - }, - astria_withdrawer::{ - ASTRIAWITHDRAWER_ABI, - ASTRIAWITHDRAWER_BYTECODE, - }, -}; -use ethers::{ - abi::Tokenizable, - core::utils::Anvil, - prelude::*, - utils::AnvilInstance, -}; - -#[allow(clippy::struct_field_names)] -pub(crate) struct ConfigureAstriaWithdrawerDeployer { - pub(crate) base_chain_asset_precision: u32, - pub(crate) base_chain_bridge_address: astria_core::primitive::v1::Address, - pub(crate) base_chain_asset_denomination: String, -} - -impl Default for ConfigureAstriaWithdrawerDeployer { - fn default() -> Self { - Self { - base_chain_asset_precision: 18, - base_chain_bridge_address: crate::astria_address([0u8; 20]), - base_chain_asset_denomination: "test-denom".to_string(), - } - } -} - -impl ConfigureAstriaWithdrawerDeployer { - pub(crate) async fn deploy(self) -> (Address, Arc>, LocalWallet, AnvilInstance) { - let Self { - base_chain_asset_precision, - base_chain_bridge_address, - base_chain_asset_denomination, - } = self; - - deploy_astria_withdrawer( - base_chain_asset_precision.into(), - base_chain_bridge_address, - base_chain_asset_denomination, - ) - .await - } -} - -/// Starts a local anvil instance and deploys the `AstriaWithdrawer` contract to it. -/// -/// Returns the contract address, provider, wallet, and anvil instance. -/// -/// # Panics -/// -/// - if the provider fails to connect to the anvil instance -/// - if the contract fails to deploy -pub(crate) async fn deploy_astria_withdrawer( - base_chain_asset_precision: U256, - base_chain_bridge_address: astria_core::primitive::v1::Address, - base_chain_asset_denomination: String, -) -> (Address, Arc>, LocalWallet, AnvilInstance) { - // setup anvil and signing wallet - let anvil = Anvil::new().spawn(); - let wallet: LocalWallet = anvil.keys()[0].clone().into(); - let provider = Arc::new( - Provider::::connect(anvil.ws_endpoint()) - .await - .unwrap() - .interval(Duration::from_millis(10u64)), - ); - let signer = SignerMiddleware::new( - provider.clone(), - wallet.clone().with_chain_id(anvil.chain_id()), - ); - - let abi = ASTRIAWITHDRAWER_ABI.clone(); - let bytecode = ASTRIAWITHDRAWER_BYTECODE.to_vec(); - - let args = vec![ - base_chain_asset_precision.into_token(), - base_chain_bridge_address.to_string().into_token(), - base_chain_asset_denomination.into_token(), - ]; - - let factory = ContractFactory::new(abi.clone(), bytecode.into(), signer.into()); - let contract = factory.deploy_tokens(args).unwrap().send().await.unwrap(); - let contract_address = contract.address(); - - ( - contract_address, - provider, - wallet.with_chain_id(anvil.chain_id()), - anvil, - ) -} - -pub(crate) struct ConfigureAstriaBridgeableERC20Deployer { - pub(crate) bridge_address: Address, - pub(crate) base_chain_asset_precision: u32, - pub(crate) base_chain_bridge_address: astria_core::primitive::v1::Address, - pub(crate) base_chain_asset_denomination: String, - pub(crate) name: String, - pub(crate) symbol: String, -} - -impl Default for ConfigureAstriaBridgeableERC20Deployer { - fn default() -> Self { - Self { - bridge_address: Address::zero(), - base_chain_asset_precision: 18, - base_chain_bridge_address: crate::astria_address([0u8; 20]), - base_chain_asset_denomination: "testdenom".to_string(), - name: "test-token".to_string(), - symbol: "TT".to_string(), - } - } -} - -impl ConfigureAstriaBridgeableERC20Deployer { - pub(crate) async fn deploy(self) -> (Address, Arc>, LocalWallet, AnvilInstance) { - let Self { - bridge_address, - base_chain_asset_precision, - base_chain_bridge_address, - base_chain_asset_denomination, - name, - symbol, - } = self; - - deploy_astria_bridgeable_erc20( - bridge_address, - base_chain_asset_precision.into(), - base_chain_bridge_address, - base_chain_asset_denomination, - name, - symbol, - ) - .await - } -} - -/// Starts a local anvil instance and deploys the `AstriaBridgeableERC20` contract to it. -/// -/// Returns the contract address, provider, wallet, and anvil instance. -/// -/// # Panics -/// -/// - if the provider fails to connect to the anvil instance -/// - if the contract fails to deploy -pub(crate) async fn deploy_astria_bridgeable_erc20( - mut bridge_address: Address, - base_chain_asset_precision: ethers::abi::Uint, - base_chain_bridge_address: astria_core::primitive::v1::Address, - base_chain_asset_denomination: String, - name: String, - symbol: String, -) -> (Address, Arc>, LocalWallet, AnvilInstance) { - // setup anvil and signing wallet - let anvil = Anvil::new().spawn(); - let wallet: LocalWallet = anvil.keys()[0].clone().into(); - let provider = Arc::new( - Provider::::connect(anvil.ws_endpoint()) - .await - .unwrap() - .interval(Duration::from_millis(10u64)), - ); - let signer = SignerMiddleware::new( - provider.clone(), - wallet.clone().with_chain_id(anvil.chain_id()), - ); - - let abi = ASTRIABRIDGEABLEERC20_ABI.clone(); - let bytecode = ASTRIABRIDGEABLEERC20_BYTECODE.to_vec(); - - let factory = ContractFactory::new(abi.clone(), bytecode.into(), signer.into()); - - if bridge_address == Address::zero() { - bridge_address = wallet.address(); - } - let args = vec![ - bridge_address.into_token(), - base_chain_asset_precision.into_token(), - base_chain_bridge_address.to_string().into_token(), - base_chain_asset_denomination.into_token(), - name.into_token(), - symbol.into_token(), - ]; - let contract = factory.deploy_tokens(args).unwrap().send().await.unwrap(); - let contract_address = contract.address(); - - ( - contract_address, - provider, - wallet.with_chain_id(anvil.chain_id()), - anvil, - ) -} diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs index 4e24918000..87cbccad24 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/ethereum/watcher.rs @@ -3,33 +3,25 @@ use std::{ time::Duration, }; -use astria_bridge_contracts::i_astria_withdrawer::{ - IAstriaWithdrawer, - Ics20WithdrawalFilter, - SequencerWithdrawalFilter, +use astria_bridge_contracts::{ + GetWithdrawalActions, + GetWithdrawalActionsBuilder, }; -use astria_core::{ - primitive::v1::{ - asset::{ - self, - denom, - Denom, - }, - Address, - }, - protocol::transaction::v1alpha1::Action, +use astria_core::primitive::v1::{ + asset, + Address, }; use astria_eyre::{ eyre::{ self, bail, eyre, + OptionExt as _, WrapErr as _, }, Result, }; use ethers::{ - contract::EthEvent as _, core::types::Block, providers::{ Middleware, @@ -38,29 +30,18 @@ use ethers::{ StreamExt as _, Ws, }, - types::{ - Filter, - Log, - H256, - }, + types::H256, utils::hex, }; use tokio::select; use tokio_util::sync::CancellationToken; use tracing::{ - debug, info, - trace, warn, }; use crate::bridge_withdrawer::{ batch::Batch, - ethereum::convert::{ - event_to_action, - EventWithMetadata, - WithdrawalEvent, - }, startup, state::State, submitter, @@ -72,7 +53,7 @@ pub(crate) struct Builder { pub(crate) ethereum_contract_address: String, pub(crate) ethereum_rpc_endpoint: String, pub(crate) state: Arc, - pub(crate) rollup_asset_denom: Denom, + pub(crate) rollup_asset_denom: asset::TracePrefixed, pub(crate) bridge_address: Address, pub(crate) submitter_handle: submitter::Handle, } @@ -93,16 +74,6 @@ impl Builder { let contract_address = address_from_string(ðereum_contract_address) .wrap_err("failed to parse ethereum contract address")?; - if rollup_asset_denom - .as_trace_prefixed() - .map_or(false, denom::TracePrefixed::trace_is_empty) - { - warn!( - "rollup asset denomination is not prefixed; Ics20Withdrawal actions will not be \ - submitted" - ); - } - Ok(Watcher { contract_address, ethereum_rpc_endpoint: ethereum_rpc_endpoint.to_string(), @@ -123,42 +94,32 @@ pub(crate) struct Watcher { submitter_handle: submitter::Handle, contract_address: ethers::types::Address, ethereum_rpc_endpoint: String, - rollup_asset_denom: Denom, + rollup_asset_denom: asset::TracePrefixed, bridge_address: Address, state: Arc, } impl Watcher { pub(crate) async fn run(mut self) -> Result<()> { - let (provider, contract, fee_asset, asset_withdrawal_divisor, next_rollup_block_height) = - self.startup() - .await - .wrap_err("watcher failed to start up")?; + let (provider, action_fetcher, next_rollup_block_height) = self + .startup() + .await + .wrap_err("watcher failed to start up")?; let Self { - rollup_asset_denom, - bridge_address, state, shutdown_token, submitter_handle, .. } = self; - let converter = EventToActionConvertConfig { - fee_asset, - rollup_asset_denom, - bridge_address, - asset_withdrawal_divisor, - }; - state.set_watcher_ready(); tokio::select! { res = watch_for_blocks( provider, - contract.address(), + action_fetcher, next_rollup_block_height, - converter, submitter_handle, shutdown_token.clone(), ) => { @@ -183,13 +144,7 @@ impl Watcher { /// - If the asset withdrawal decimals cannot be fetched. async fn startup( &mut self, - ) -> eyre::Result<( - Arc>, - IAstriaWithdrawer>, - asset::Denom, - u128, - u64, - )> { + ) -> eyre::Result<(Arc>, GetWithdrawalActions>, u64)> { let startup::Info { fee_asset, starting_rollup_height, @@ -233,39 +188,41 @@ impl Watcher { .with_config(retry_config) .await .wrap_err("failed connecting to rollup after several retries; giving up")?; - let provider = Arc::new(provider); - - // get contract handle - let contract = IAstriaWithdrawer::new(self.contract_address, provider.clone()); - // get asset withdrawal decimals - let base_chain_asset_precision = contract - .base_chain_asset_precision() - .call() + let provider = Arc::new(provider); + let ics20_asset_to_withdraw = if self.rollup_asset_denom.last_channel().is_some() { + info!( + rollup_asset_denom = %self.rollup_asset_denom, + "configured rollup asset contains an ics20 channel; ics20 withdrawals will be emitted" + ); + Some(self.rollup_asset_denom.clone()) + } else { + info!( + rollup_asset_denom = %self.rollup_asset_denom, + "configured rollup asset does not contain an ics20 channel; ics20 withdrawals will not be emitted" + ); + None + }; + let action_fetcher = GetWithdrawalActionsBuilder::new() + .provider(provider.clone()) + .fee_asset(fee_asset) + .contract_address(self.contract_address) + .bridge_address(self.bridge_address) + .sequencer_asset_to_withdraw(self.rollup_asset_denom.clone().into()) + .set_ics20_asset_to_withdraw(ics20_asset_to_withdraw) + .try_build() .await - .wrap_err("failed to get asset withdrawal decimals")?; - let asset_withdrawal_divisor = - 10u128.pow(18u32.checked_sub(base_chain_asset_precision).expect( - "base_chain_asset_precision must be <= 18, as the contract constructor enforces \ - this", - )); + .wrap_err("failed to construct contract event to sequencer action fetcher")?; self.state.set_watcher_ready(); - Ok(( - provider.clone(), - contract, - fee_asset, - asset_withdrawal_divisor, - starting_rollup_height, - )) + Ok((provider.clone(), action_fetcher, starting_rollup_height)) } } async fn sync_from_next_rollup_block_height( provider: Arc>, - contract_address: ethers::types::Address, - converter: &EventToActionConvertConfig, + action_fetcher: &GetWithdrawalActions>, submitter_handle: &submitter::Handle, next_rollup_block_height_to_check: u64, current_rollup_block_height: u64, @@ -283,15 +240,9 @@ async fn sync_from_next_rollup_block_height( bail!("block with number {i} missing"); }; - get_and_send_events_at_block( - provider.clone(), - contract_address, - block, - converter, - submitter_handle, - ) - .await - .wrap_err("failed to get and send events at block")?; + get_and_send_events_at_block(action_fetcher, block, submitter_handle) + .await + .wrap_err("failed to get and send events at block")?; } info!("synced from {next_rollup_block_height_to_check} to {current_rollup_block_height}"); @@ -300,9 +251,8 @@ async fn sync_from_next_rollup_block_height( async fn watch_for_blocks( provider: Arc>, - contract_address: ethers::types::Address, + action_fetcher: GetWithdrawalActions>, next_rollup_block_height: u64, - converter: EventToActionConvertConfig, submitter_handle: submitter::Handle, shutdown_token: CancellationToken, ) -> Result<()> { @@ -325,8 +275,7 @@ async fn watch_for_blocks( // (inclusive). sync_from_next_rollup_block_height( provider.clone(), - contract_address, - &converter, + &action_fetcher, &submitter_handle, next_rollup_block_height, current_rollup_block_height.as_u64(), @@ -343,10 +292,8 @@ async fn watch_for_blocks( block = block_rx.next() => { if let Some(block) = block { get_and_send_events_at_block( - provider.clone(), - contract_address, + &action_fetcher, block, - &converter, &submitter_handle, ) .await @@ -360,151 +307,43 @@ async fn watch_for_blocks( } async fn get_and_send_events_at_block( - provider: Arc>, - contract_address: ethers::types::Address, + actions_fetcher: &GetWithdrawalActions>, block: Block, - converter: &EventToActionConvertConfig, submitter_handle: &submitter::Handle, ) -> Result<()> { - let Some(block_hash) = block.hash else { - bail!("block hash missing; skipping") - }; - - let Some(block_number) = block.number else { - bail!("block number missing; skipping") - }; - - let sequencer_withdrawal_events = - get_sequencer_withdrawal_events(provider.clone(), contract_address, block_hash) - .await - .wrap_err("failed to get sequencer withdrawal events")?; - let ics20_withdrawal_events = - get_ics20_withdrawal_events(provider.clone(), contract_address, block_hash) - .await - .wrap_err("failed to get ics20 withdrawal events")?; - let events = vec![sequencer_withdrawal_events, ics20_withdrawal_events] - .into_iter() - .flatten(); - let mut batch = Batch { - actions: Vec::new(), - rollup_height: block_number.as_u64(), - }; - for (event, log) in events { - let Some(transaction_hash) = log.transaction_hash else { - warn!("transaction hash missing; skipping"); - continue; - }; - - let event_with_metadata = EventWithMetadata { - event, - block_number, - transaction_hash, - }; - let action = converter - .convert(event_with_metadata) - .wrap_err("failed to convert event to action")?; - batch.actions.push(action); - } - - if batch.actions.is_empty() { - trace!("no actions to send at block {block_number}"); + let block_hash = block.hash.ok_or_eyre("block did not contain a hash")?; + let rollup_height = block + .number + .ok_or_eyre("block did not contain a rollup height")? + .as_u64(); + let actions = actions_fetcher + .get_for_block_hash(block_hash) + .await + .wrap_err_with(|| { + format!( + "failed getting actions for block; block hash: `{block_hash}`, block height: \ + `{rollup_height}`" + ) + })?; + + if actions.is_empty() { + info!( + "no withdrawal actions found for block `{block_hash}` at rollup height \ + `{rollup_height}; skipping" + ); } else { - let actions_len = batch.actions.len(); submitter_handle - .send_batch(batch) + .send_batch(Batch { + actions, + rollup_height, + }) .await .wrap_err("failed to send batched events; receiver dropped?")?; - debug!( - "sent batch with {} actions at block {block_number}", - actions_len - ); } Ok(()) } -async fn get_sequencer_withdrawal_events( - provider: Arc>, - contract_address: ethers::types::Address, - block_hash: H256, -) -> Result> { - let sequencer_withdrawal_event_sig = SequencerWithdrawalFilter::signature(); - let sequencer_withdrawal_filter = Filter::new() - .at_block_hash(block_hash) - .address(contract_address) - .topic0(sequencer_withdrawal_event_sig); - - let logs = provider - .get_logs(&sequencer_withdrawal_filter) - .await - .wrap_err("failed to get sequencer withdrawal events")?; - - let events = logs - .into_iter() - .map(|log| { - let raw_log = ethers::abi::RawLog { - topics: log.topics.clone(), - data: log.data.to_vec(), - }; - let event = SequencerWithdrawalFilter::decode_log(&raw_log)?; - Ok((WithdrawalEvent::Sequencer(event), log)) - }) - .collect::>>()?; - - Ok(events) -} - -async fn get_ics20_withdrawal_events( - provider: Arc>, - contract_address: ethers::types::Address, - block_hash: H256, -) -> Result> { - let ics20_withdrawal_event_sig = Ics20WithdrawalFilter::signature(); - let ics20_withdrawal_filter = Filter::new() - .at_block_hash(block_hash) - .address(contract_address) - .topic0(ics20_withdrawal_event_sig); - - let logs = provider - .get_logs(&ics20_withdrawal_filter) - .await - .wrap_err("failed to get ics20 withdrawal events")?; - - let events = logs - .into_iter() - .map(|log| { - let raw_log = ethers::abi::RawLog { - topics: log.topics.clone(), - data: log.data.to_vec(), - }; - let event = Ics20WithdrawalFilter::decode_log(&raw_log)?; - Ok((WithdrawalEvent::Ics20(event), log)) - }) - .collect::>>()?; - - Ok(events) -} - -#[derive(Clone)] -struct EventToActionConvertConfig { - fee_asset: Denom, - rollup_asset_denom: Denom, - bridge_address: Address, - asset_withdrawal_divisor: u128, -} - -impl EventToActionConvertConfig { - fn convert(&self, event: EventWithMetadata) -> Result { - event_to_action( - event, - self.fee_asset.clone(), - self.rollup_asset_denom.clone(), - self.asset_withdrawal_divisor, - self.bridge_address, - ) - } -} - // converts an ethereum address string to an `ethers::types::Address`. // the input string may be prefixed with "0x" or not. fn address_from_string(s: &str) -> Result { @@ -518,588 +357,3 @@ fn address_from_string(s: &str) -> Result { })?; Ok(address.into()) } - -#[cfg(test)] -mod tests { - use astria_bridge_contracts::{ - astria_bridgeable_erc20::AstriaBridgeableERC20, - astria_withdrawer::AstriaWithdrawer, - i_astria_withdrawer::{ - Ics20WithdrawalFilter, - SequencerWithdrawalFilter, - }, - }; - use astria_core::{ - primitive::v1::{ - asset, - Address, - }, - protocol::transaction::v1alpha1::Action, - }; - use ethers::{ - prelude::SignerMiddleware, - providers::Middleware, - signers::Signer as _, - types::{ - TransactionReceipt, - U256, - }, - utils::hex, - }; - use tokio::sync::mpsc::{ - self, - error::TryRecvError, - }; - - use super::*; - use crate::bridge_withdrawer::ethereum::{ - convert::EventWithMetadata, - test_utils::{ - ConfigureAstriaBridgeableERC20Deployer, - ConfigureAstriaWithdrawerDeployer, - }, - }; - - fn default_native_asset() -> asset::Denom { - "nria".parse().unwrap() - } - - #[test] - fn address_from_string_prefix() { - let address = address_from_string("0x1234567890123456789012345678901234567890").unwrap(); - let bytes: [u8; 20] = hex::decode("1234567890123456789012345678901234567890") - .unwrap() - .try_into() - .unwrap(); - assert_eq!(address, ethers::types::Address::from(bytes)); - } - - #[test] - fn address_from_string_no_prefix() { - let address = address_from_string("1234567890123456789012345678901234567890").unwrap(); - let bytes: [u8; 20] = hex::decode("1234567890123456789012345678901234567890") - .unwrap() - .try_into() - .unwrap(); - assert_eq!(address, ethers::types::Address::from(bytes)); - } - - async fn send_sequencer_withdraw_transaction( - contract: &AstriaWithdrawer, - value: U256, - recipient: Address, - ) -> TransactionReceipt { - let tx = contract - .withdraw_to_sequencer(recipient.to_string()) - .value(value); - let receipt = tx - .send() - .await - .expect("failed to submit transaction") - .await - .expect("failed to await pending transaction") - .expect("no receipt found"); - - assert!( - receipt.status == Some(ethers::types::U64::from(1)), - "`withdraw` transaction failed: {receipt:?}", - ); - - receipt - } - - #[tokio::test] - #[ignore = "requires foundry to be installed"] - async fn astria_withdrawer_invalid_value_fails() { - let (contract_address, provider, wallet, _anvil) = ConfigureAstriaWithdrawerDeployer { - base_chain_asset_precision: 15, - ..Default::default() - } - .deploy() - .await; - let signer = Arc::new(SignerMiddleware::new(provider, wallet.clone())); - let contract = AstriaWithdrawer::new(contract_address, signer.clone()); - - let value: U256 = 999.into(); // 10^3 - 1 - let recipient = crate::astria_address([1u8; 20]); - let tx = contract - .withdraw_to_sequencer(recipient.to_string()) - .value(value); - tx.send() - .await - .expect_err("`withdraw` transaction should have failed due to value < 10^3"); - } - - #[tokio::test] - #[ignore = "requires foundry to be installed"] - async fn watcher_can_watch_sequencer_withdrawals_astria_withdrawer() { - let (contract_address, provider, wallet, anvil) = - ConfigureAstriaWithdrawerDeployer::default().deploy().await; - let signer = Arc::new(SignerMiddleware::new(provider, wallet.clone())); - let contract = AstriaWithdrawer::new(contract_address, signer.clone()); - - let value = 1_000_000_000.into(); - let recipient = crate::astria_address([1u8; 20]); - let bridge_address = crate::astria_address([1u8; 20]); - let denom = "nria".parse::().unwrap(); - - let state = Arc::new(State::new()); - let startup_handle = startup::InfoHandle::new(state.subscribe()); - state.set_startup_info(startup::Info { - starting_rollup_height: 1, - fee_asset: denom.clone(), - chain_id: "astria".to_string(), - }); - let (batch_tx, mut batch_rx) = mpsc::channel(100); - let submitter_handle = submitter::Handle::new(batch_tx); - - let watcher = Builder { - ethereum_contract_address: hex::encode(contract_address), - ethereum_rpc_endpoint: anvil.ws_endpoint(), - startup_handle, - submitter_handle, - shutdown_token: CancellationToken::new(), - state: Arc::new(State::new()), - rollup_asset_denom: denom.clone(), - bridge_address, - } - .build() - .unwrap(); - - tokio::task::spawn(watcher.run()); - let receipt = send_sequencer_withdraw_transaction(&contract, value, recipient).await; - let expected_event = EventWithMetadata { - event: WithdrawalEvent::Sequencer(SequencerWithdrawalFilter { - sender: wallet.address(), - destination_chain_address: recipient.to_string(), - amount: value, - }), - block_number: receipt.block_number.unwrap(), - transaction_hash: receipt.transaction_hash, - }; - let expected_action = - event_to_action(expected_event, denom.clone(), denom, 1, bridge_address).unwrap(); - let Action::BridgeUnlock(expected_action) = expected_action else { - panic!("expected action to be BridgeUnlock, got {expected_action:?}"); - }; - - let batch = batch_rx.recv().await.unwrap(); - assert_eq!(batch.actions.len(), 1); - let Action::BridgeUnlock(action) = &batch.actions[0] else { - panic!( - "expected action to be BridgeUnlock, got {:?}", - batch.actions[0] - ); - }; - assert_eq!(action, &expected_action); - assert_eq!(batch_rx.try_recv().unwrap_err(), TryRecvError::Empty); - } - - #[tokio::test] - #[ignore = "requires foundry to be installed"] - async fn watcher_can_watch_sequencer_withdrawals_astria_withdrawer_sync_from_next_rollup_height() - { - let (contract_address, provider, wallet, anvil) = - ConfigureAstriaWithdrawerDeployer::default().deploy().await; - let signer = Arc::new(SignerMiddleware::new(provider, wallet.clone())); - let contract = AstriaWithdrawer::new(contract_address, signer.clone()); - - let value = 1_000_000_000.into(); - let recipient = crate::astria_address([1u8; 20]); - let bridge_address = crate::astria_address([1u8; 20]); - let denom = default_native_asset(); - - // send tx before watcher starts - let receipt = send_sequencer_withdraw_transaction(&contract, value, recipient).await; - - let expected_event = EventWithMetadata { - event: WithdrawalEvent::Sequencer(SequencerWithdrawalFilter { - sender: wallet.address(), - destination_chain_address: recipient.to_string(), - amount: value, - }), - block_number: receipt.block_number.unwrap(), - transaction_hash: receipt.transaction_hash, - }; - let expected_action = event_to_action( - expected_event, - denom.clone(), - denom.clone(), - 1, - bridge_address, - ) - .unwrap(); - let Action::BridgeUnlock(expected_action) = expected_action else { - panic!("expected action to be BridgeUnlock, got {expected_action:?}"); - }; - - let state = Arc::new(State::new()); - let startup_handle = startup::InfoHandle::new(state.subscribe()); - state.set_startup_info(startup::Info { - starting_rollup_height: 1, - fee_asset: denom.clone(), - chain_id: "astria".to_string(), - }); - let (batch_tx, mut batch_rx) = mpsc::channel(100); - - let watcher = Builder { - ethereum_contract_address: hex::encode(contract_address), - ethereum_rpc_endpoint: anvil.ws_endpoint(), - startup_handle, - shutdown_token: CancellationToken::new(), - state: Arc::new(State::new()), - rollup_asset_denom: denom.clone(), - bridge_address, - submitter_handle: submitter::Handle::new(batch_tx), - } - .build() - .unwrap(); - - tokio::task::spawn(watcher.run()); - - // send another tx to trigger a new block - send_sequencer_withdraw_transaction(&contract, value, recipient).await; - - let batch = batch_rx.recv().await.unwrap(); - assert_eq!(batch.actions.len(), 1); - let Action::BridgeUnlock(action) = &batch.actions[0] else { - panic!( - "expected action to be BridgeUnlock, got {:?}", - batch.actions[0] - ); - }; - assert_eq!(action, &expected_action); - - // should receive a second batch containing the second tx - let batch = batch_rx.recv().await.unwrap(); - assert_eq!(batch.actions.len(), 1); - } - - async fn send_ics20_withdraw_transaction( - contract: &AstriaWithdrawer, - value: U256, - recipient: String, - ) -> TransactionReceipt { - let tx = contract - .withdraw_to_ibc_chain(recipient, "nootwashere".to_string()) - .value(value); - let receipt = tx - .send() - .await - .expect("failed to submit transaction") - .await - .expect("failed to await pending transaction") - .expect("no receipt found"); - - assert!( - receipt.status == Some(ethers::types::U64::from(1)), - "`withdraw` transaction failed: {receipt:?}", - ); - - receipt - } - - #[tokio::test] - #[ignore = "requires foundry to be installed"] - async fn watcher_can_watch_ics20_withdrawals_astria_withdrawer() { - let (contract_address, provider, wallet, anvil) = - ConfigureAstriaWithdrawerDeployer::default().deploy().await; - let signer = Arc::new(SignerMiddleware::new(provider, wallet.clone())); - let contract = AstriaWithdrawer::new(contract_address, signer.clone()); - - let value = 1_000_000_000.into(); - let recipient = "somebech32address".to_string(); - - let bridge_address = crate::astria_address([1u8; 20]); - let denom = "transfer/channel-0/utia".parse::().unwrap(); - - let state = Arc::new(State::new()); - let startup_handle = startup::InfoHandle::new(state.subscribe()); - state.set_startup_info(startup::Info { - starting_rollup_height: 1, - fee_asset: denom.clone(), - chain_id: "astria".to_string(), - }); - let (batch_tx, mut batch_rx) = mpsc::channel(100); - - let watcher = Builder { - ethereum_contract_address: hex::encode(contract_address), - ethereum_rpc_endpoint: anvil.ws_endpoint(), - startup_handle, - shutdown_token: CancellationToken::new(), - state: Arc::new(State::new()), - rollup_asset_denom: denom.clone(), - bridge_address, - submitter_handle: submitter::Handle::new(batch_tx), - } - .build() - .unwrap(); - - tokio::task::spawn(watcher.run()); - - let receipt = send_ics20_withdraw_transaction(&contract, value, recipient.clone()).await; - let expected_event = EventWithMetadata { - event: WithdrawalEvent::Ics20(Ics20WithdrawalFilter { - sender: wallet.address(), - destination_chain_address: recipient.clone(), - amount: value, - memo: "nootwashere".to_string(), - }), - block_number: receipt.block_number.unwrap(), - transaction_hash: receipt.transaction_hash, - }; - - let Action::Ics20Withdrawal(mut expected_action) = event_to_action( - expected_event, - denom.clone(), - denom.clone(), - 1, - bridge_address, - ) - .unwrap() else { - panic!("expected action to be Ics20Withdrawal"); - }; - expected_action.timeout_time = 0; // zero this for testing - - let mut batch = batch_rx.recv().await.unwrap(); - assert_eq!(batch.actions.len(), 1); - let Action::Ics20Withdrawal(ref mut action) = batch.actions[0] else { - panic!( - "expected action to be Ics20Withdrawal, got {:?}", - batch.actions[0] - ); - }; - action.timeout_time = 0; // zero this for testing - assert_eq!(action, &expected_action); - assert_eq!(batch_rx.try_recv().unwrap_err(), TryRecvError::Empty); - } - - async fn mint_tokens( - contract: &AstriaBridgeableERC20, - amount: U256, - recipient: ethers::types::Address, - ) -> TransactionReceipt { - let mint_tx = contract.mint(recipient, amount); - let receipt = mint_tx - .send() - .await - .expect("failed to submit mint transaction") - .await - .expect("failed to await pending mint transaction") - .expect("no mint receipt found"); - - assert!( - receipt.status == Some(ethers::types::U64::from(1)), - "`mint` transaction failed: {receipt:?}", - ); - - receipt - } - - async fn send_sequencer_withdraw_transaction_erc20( - contract: &AstriaBridgeableERC20, - value: U256, - recipient: Address, - ) -> TransactionReceipt { - let tx = contract.withdraw_to_sequencer(value, recipient.to_string()); - let receipt = tx - .send() - .await - .expect("failed to submit transaction") - .await - .expect("failed to await pending transaction") - .expect("no receipt found"); - - assert!( - receipt.status == Some(ethers::types::U64::from(1)), - "`withdraw` transaction failed: {receipt:?}", - ); - - receipt - } - - #[tokio::test] - #[ignore = "requires foundry to be installed"] - async fn watcher_can_watch_sequencer_withdrawals_astria_bridgeable_erc20() { - let (contract_address, provider, wallet, anvil) = ConfigureAstriaBridgeableERC20Deployer { - base_chain_asset_precision: 18, - ..Default::default() - } - .deploy() - .await; - let signer = Arc::new(SignerMiddleware::new(provider, wallet.clone())); - let contract = AstriaBridgeableERC20::new(contract_address, signer.clone()); - - // mint some tokens to the wallet - mint_tokens(&contract, 2_000_000_000.into(), wallet.address()).await; - - let value = 1_000_000_000.into(); - let recipient = crate::astria_address([1u8; 20]); - let bridge_address = crate::astria_address([1u8; 20]); - let denom = default_native_asset(); - - let state = Arc::new(State::new()); - let startup_handle = startup::InfoHandle::new(state.subscribe()); - state.set_startup_info(startup::Info { - starting_rollup_height: 1, - fee_asset: denom.clone(), - chain_id: "astria".to_string(), - }); - let (batch_tx, mut batch_rx) = mpsc::channel(100); - - let watcher = Builder { - ethereum_contract_address: hex::encode(contract_address), - ethereum_rpc_endpoint: anvil.ws_endpoint(), - startup_handle, - shutdown_token: CancellationToken::new(), - state: Arc::new(State::new()), - rollup_asset_denom: denom.clone(), - bridge_address, - submitter_handle: submitter::Handle::new(batch_tx), - } - .build() - .unwrap(); - - tokio::task::spawn(watcher.run()); - - let receipt = send_sequencer_withdraw_transaction_erc20(&contract, value, recipient).await; - let expected_event = EventWithMetadata { - event: WithdrawalEvent::Sequencer(SequencerWithdrawalFilter { - sender: wallet.address(), - destination_chain_address: recipient.to_string(), - amount: value, - }), - block_number: receipt.block_number.unwrap(), - transaction_hash: receipt.transaction_hash, - }; - let expected_action = event_to_action( - expected_event, - denom.clone(), - denom.clone(), - 1, - bridge_address, - ) - .unwrap(); - let Action::BridgeUnlock(expected_action) = expected_action else { - panic!("expected action to be BridgeUnlock, got {expected_action:?}"); - }; - - let batch = batch_rx.recv().await.unwrap(); - assert_eq!(batch.actions.len(), 1); - let Action::BridgeUnlock(action) = &batch.actions[0] else { - panic!( - "expected action to be BridgeUnlock, got {:?}", - batch.actions[0] - ); - }; - assert_eq!(action, &expected_action); - assert_eq!(batch_rx.try_recv().unwrap_err(), TryRecvError::Empty); - } - - async fn send_ics20_withdraw_transaction_astria_bridgeable_erc20( - contract: &AstriaBridgeableERC20, - value: U256, - recipient: String, - ) -> TransactionReceipt { - let tx = contract.withdraw_to_ibc_chain(value, recipient, "nootwashere".to_string()); - let receipt = tx - .send() - .await - .expect("failed to submit transaction") - .await - .expect("failed to await pending transaction") - .expect("no receipt found"); - - assert!( - receipt.status == Some(ethers::types::U64::from(1)), - "`withdraw` transaction failed: {receipt:?}", - ); - - receipt - } - - #[tokio::test] - #[ignore = "requires foundry to be installed"] - async fn watcher_can_watch_ics20_withdrawals_astria_bridgeable_erc20() { - let (contract_address, provider, wallet, anvil) = ConfigureAstriaBridgeableERC20Deployer { - base_chain_asset_precision: 18, - ..Default::default() - } - .deploy() - .await; - let signer = Arc::new(SignerMiddleware::new(provider, wallet.clone())); - let contract = AstriaBridgeableERC20::new(contract_address, signer.clone()); - - // mint some tokens to the wallet - mint_tokens(&contract, 2_000_000_000.into(), wallet.address()).await; - - let value = 1_000_000_000.into(); - let recipient = "somebech32address".to_string(); - let bridge_address = crate::astria_address([1u8; 20]); - let denom = "transfer/channel-0/utia".parse::().unwrap(); - - let state = Arc::new(State::new()); - let startup_handle = startup::InfoHandle::new(state.subscribe()); - state.set_startup_info(startup::Info { - starting_rollup_height: 1, - fee_asset: denom.clone(), - chain_id: "astria".to_string(), - }); - let (batch_tx, mut batch_rx) = mpsc::channel(100); - - let watcher = Builder { - ethereum_contract_address: hex::encode(contract_address), - ethereum_rpc_endpoint: anvil.ws_endpoint(), - startup_handle, - shutdown_token: CancellationToken::new(), - state: Arc::new(State::new()), - rollup_asset_denom: denom.clone(), - bridge_address, - submitter_handle: submitter::Handle::new(batch_tx), - } - .build() - .unwrap(); - - tokio::task::spawn(watcher.run()); - - let receipt = send_ics20_withdraw_transaction_astria_bridgeable_erc20( - &contract, - value, - recipient.clone(), - ) - .await; - let expected_event = EventWithMetadata { - event: WithdrawalEvent::Ics20(Ics20WithdrawalFilter { - sender: wallet.address(), - destination_chain_address: recipient.clone(), - amount: value, - memo: "nootwashere".to_string(), - }), - block_number: receipt.block_number.unwrap(), - transaction_hash: receipt.transaction_hash, - }; - let Action::Ics20Withdrawal(mut expected_action) = event_to_action( - expected_event, - denom.clone(), - denom.clone(), - 1, - bridge_address, - ) - .unwrap() else { - panic!("expected action to be Ics20Withdrawal"); - }; - expected_action.timeout_time = 0; // zero this for testing - - let mut batch = batch_rx.recv().await.unwrap(); - assert_eq!(batch.actions.len(), 1); - let Action::Ics20Withdrawal(ref mut action) = batch.actions[0] else { - panic!( - "expected action to be Ics20Withdrawal, got {:?}", - batch.actions[0] - ); - }; - action.timeout_time = 0; // zero this for testing - assert_eq!(action, &expected_action); - assert_eq!(batch_rx.try_recv().unwrap_err(), TryRecvError::Empty); - } -} diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs index 216ec60770..6f58679c08 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs @@ -7,7 +7,6 @@ use std::{ time::Duration, }; -use astria_core::primitive::v1::asset::Denom; use astria_eyre::eyre::{ self, WrapErr as _, @@ -119,9 +118,7 @@ impl BridgeWithdrawer { startup_handle, shutdown_token: shutdown_handle.token(), state: state.clone(), - rollup_asset_denom: rollup_asset_denomination - .parse::() - .wrap_err("failed to parse ROLLUP_ASSET_DENOMINATION as Denom")?, + rollup_asset_denom: rollup_asset_denomination, bridge_address: sequencer_bridge_address, submitter_handle, } diff --git a/crates/astria-bridge-withdrawer/src/config.rs b/crates/astria-bridge-withdrawer/src/config.rs index 7a2f33f754..85ac347d6c 100644 --- a/crates/astria-bridge-withdrawer/src/config.rs +++ b/crates/astria-bridge-withdrawer/src/config.rs @@ -19,7 +19,7 @@ pub struct Config { // The fee asset denomination to use for the bridge account's transactions. pub fee_asset_denomination: asset::Denom, // The asset denomination being withdrawn from the rollup. - pub rollup_asset_denomination: String, + pub rollup_asset_denomination: asset::denom::TracePrefixed, // The bridge address corresponding to the bridged rollup asset on the sequencer. pub sequencer_bridge_address: String, // The address of the AstriaWithdrawer contract on the evm rollup. diff --git a/crates/astria-cli/src/commands/bridge/collect.rs b/crates/astria-cli/src/commands/bridge/collect.rs index 46cf8dbcf2..317b9d574f 100644 --- a/crates/astria-cli/src/commands/bridge/collect.rs +++ b/crates/astria-cli/src/commands/bridge/collect.rs @@ -8,30 +8,18 @@ use std::{ time::Duration, }; -use astria_bridge_contracts::i_astria_withdrawer::{ - IAstriaWithdrawer, - Ics20WithdrawalFilter, - SequencerWithdrawalFilter, +use astria_bridge_contracts::{ + GetWithdrawalActions, + GetWithdrawalActionsBuilder, }; use astria_core::{ - bridge::{ - self, - Ics20WithdrawalFromRollupMemo, - }, primitive::v1::{ asset::{ self, - TracePrefixed, }, Address, }, - protocol::transaction::v1alpha1::{ - action::{ - BridgeUnlockAction, - Ics20Withdrawal, - }, - Action, - }, + protocol::transaction::v1alpha1::Action, }; use clap::Args; use color_eyre::eyre::{ @@ -43,7 +31,6 @@ use color_eyre::eyre::{ WrapErr as _, }; use ethers::{ - contract::EthEvent, core::types::Block, providers::{ Middleware, @@ -52,11 +39,7 @@ use ethers::{ StreamExt as _, Ws, }, - types::{ - Filter, - Log, - H256, - }, + types::H256, }; use futures::stream::BoxStream; use tracing::{ @@ -85,9 +68,12 @@ pub(crate) struct WithdrawalEvents { /// actions be submitted to the Sequencer). #[arg(long, default_value = "nria")] fee_asset: asset::Denom, - /// The asset denomination of the asset that's withdrawn from the bridge. + /// The sequencer asset withdrawn through the bridge. #[arg(long)] - rollup_asset_denom: asset::Denom, + sequencer_asset_to_withdraw: Option, + /// The is20 asset withdrawn through the bridge. + #[arg(long)] + ics20_asset_to_withdraw: Option, /// The bech32-encoded bridge address corresponding to the bridged rollup /// asset on the sequencer. Should match the bridge address in the geth /// rollup's bridge configuration for that asset. @@ -106,8 +92,9 @@ impl WithdrawalEvents { contract_address, from_rollup_height, to_rollup_height, + sequencer_asset_to_withdraw, + ics20_asset_to_withdraw, fee_asset, - rollup_asset_denom, bridge_address, output, } = self; @@ -118,10 +105,16 @@ impl WithdrawalEvents { .await .wrap_err("failed to connect to rollup")?; - let asset_withdrawal_divisor = - get_asset_withdrawal_divisor(contract_address, block_provider.clone()) - .await - .wrap_err("failed determining asset withdrawal divisor")?; + let actions_fetcher = GetWithdrawalActionsBuilder::new() + .provider(block_provider.clone()) + .contract_address(contract_address) + .fee_asset(fee_asset) + .set_ics20_asset_to_withdraw(ics20_asset_to_withdraw) + .set_sequencer_asset_to_withdraw(sequencer_asset_to_withdraw) + .bridge_address(bridge_address) + .try_build() + .await + .wrap_err("failed to initialize contract events to sequencer actions converter")?; let mut incoming_blocks = create_stream_of_blocks(&block_provider, from_rollup_height, to_rollup_height) @@ -139,21 +132,20 @@ impl WithdrawalEvents { block = incoming_blocks.next() => { match block { - Some(Ok(block)) => - if let Err(err) = actions_by_rollup_height.convert_and_insert(BlockToActions { - block_provider: block_provider.clone(), - contract_address, + Some(Ok(block)) => { + if let Err(e) = block_to_actions( block, - fee_asset: fee_asset.clone(), - rollup_asset_denom: rollup_asset_denom.clone(), - bridge_address, - asset_withdrawal_divisor, - }).await { - error!( - err = AsRef::::as_ref(&err), - "failed converting contract block to Sequencer actions and storing them; exiting stream"); - break; - } + &mut actions_by_rollup_height, + &actions_fetcher, + ).await { + error!( + error = AsRef::::as_ref(&e), + "failed converting contract block to sequencer actions; + exiting stream", + ); + break; + } + } Some(Err(error)) => { error!( error = AsRef::::as_ref(&error), @@ -170,12 +162,46 @@ impl WithdrawalEvents { } } + info!( + "collected a total of {} actions across {} rollup heights; writing to file", + actions_by_rollup_height + .0 + .values() + .map(Vec::len) + .sum::(), + actions_by_rollup_height.0.len(), + ); + actions_by_rollup_height .write_to_output(output) .wrap_err("failed to write actions to file") } } +async fn block_to_actions( + block: Block, + actions_by_rollup_height: &mut ActionsByRollupHeight, + actions_fetcher: &GetWithdrawalActions>, +) -> eyre::Result<()> { + let block_hash = block + .hash + .ok_or_eyre("block did not contain a hash; skipping")?; + let rollup_height = block + .number + .ok_or_eyre("block did not contain a rollup height; skipping")? + .as_u64(); + let actions = actions_fetcher + .get_for_block_hash(block_hash) + .await + .wrap_err_with(|| { + format!( + "failed getting actions for block; block hash: `{block_hash}`, block height: \ + `{rollup_height}`" + ) + })?; + actions_by_rollup_height.insert(rollup_height, actions) +} + #[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] #[serde(transparent)] pub(crate) struct ActionsByRollupHeight(BTreeMap>); @@ -190,13 +216,7 @@ impl ActionsByRollupHeight { } #[instrument(skip_all, err)] - async fn convert_and_insert(&mut self, block_to_actions: BlockToActions) -> eyre::Result<()> { - let rollup_height = block_to_actions - .block - .number - .ok_or_eyre("block was missing a number")? - .as_u64(); - let actions = block_to_actions.run().await; + fn insert(&mut self, rollup_height: u64, actions: Vec) -> eyre::Result<()> { ensure!( self.0.insert(rollup_height, actions).is_none(), "already collected actions for block at rollup height `{rollup_height}`; no 2 blocks \ @@ -277,7 +297,7 @@ fn open_output>(target: P) -> eyre::Result { .write(true) .create_new(true) .open(&target) - .wrap_err("failed to open specified fil}e for writing")?; + .wrap_err("failed to open specified file for writing")?; Ok(Output { handle, path: target.as_ref().to_path_buf(), @@ -309,274 +329,3 @@ async fn connect_to_rollup(rollup_endpoint: &str) -> eyre::Result>, -) -> eyre::Result { - let contract = IAstriaWithdrawer::new(contract_address, provider); - - let base_chain_asset_precision = contract - .base_chain_asset_precision() - .call() - .await - .wrap_err("failed to get asset withdrawal decimals")?; - - let exponent = 18u32.checked_sub(base_chain_asset_precision).ok_or_eyre( - "failed calculating asset divisor. The base chain asset precision should be <= 18 as \ - that's enforced by the contract, so the construction should work. Did the precision \ - change?", - )?; - Ok(10u128.pow(exponent)) -} - -fn packet_timeout_time() -> eyre::Result { - tendermint::Time::now() - .checked_add(Duration::from_secs(300)) - .ok_or_eyre("adding 5 minutes to current time caused overflow")? - .unix_timestamp_nanos() - .try_into() - .wrap_err("failed to i128 nanoseconds to u64") -} - -struct BlockToActions { - block_provider: Arc>, - contract_address: ethers::types::Address, - block: Block, - fee_asset: asset::Denom, - rollup_asset_denom: asset::Denom, - bridge_address: Address, - asset_withdrawal_divisor: u128, -} - -impl BlockToActions { - async fn run(self) -> Vec { - let mut actions = Vec::new(); - - let Some(block_hash) = self.block.hash else { - warn!("block hash missing; skipping"); - return actions; - }; - - match get_log::( - self.block_provider.clone(), - self.contract_address, - block_hash, - ) - .await - { - Err(error) => warn!( - error = AsRef::::as_ref(&error), - "encountered an error getting logs for sequencer withdrawal events", - ), - Ok(logs) => { - for log in logs { - match self.log_to_sequencer_withdrawal_action(log) { - Ok(action) => actions.push(action), - Err(error) => { - warn!( - error = AsRef::::as_ref(&error), - "failed converting ethers contract log to sequencer withdrawal \ - action; skipping" - ); - } - } - } - } - } - match get_log::( - self.block_provider.clone(), - self.contract_address, - block_hash, - ) - .await - { - Err(error) => warn!( - error = AsRef::::as_ref(&error), - "encountered an error getting logs for ics20 withdrawal events", - ), - Ok(logs) => { - for log in logs { - match self.log_to_ics20_withdrawal_action(log) { - Ok(action) => actions.push(action), - Err(error) => { - warn!( - error = AsRef::::as_ref(&error), - "failed converting ethers contract log to ics20 withdrawal \ - action; skipping" - ); - } - } - } - } - } - actions - } - - fn log_to_ics20_withdrawal_action(&self, log: Log) -> eyre::Result { - LogToIcs20WithdrawalAction { - log, - fee_asset: self.fee_asset.clone(), - rollup_asset_denom: self.rollup_asset_denom.clone(), - asset_withdrawal_divisor: self.asset_withdrawal_divisor, - bridge_address: self.bridge_address, - } - .try_convert() - .wrap_err("failed converting log to ics20 withdrawal action") - } - - fn log_to_sequencer_withdrawal_action(&self, log: Log) -> eyre::Result { - LogToSequencerWithdrawalAction { - log, - bridge_address: self.bridge_address, - fee_asset: self.fee_asset.clone(), - asset_withdrawal_divisor: self.asset_withdrawal_divisor, - } - .try_into_action() - .wrap_err("failed converting log to sequencer withdrawal action") - } -} - -fn action_inputs_from_log(log: Log) -> eyre::Result<(T, u64, [u8; 32])> { - let block_number = log - .block_number - .ok_or_eyre("log did not contain block number")? - .as_u64(); - let transaction_hash = log - .transaction_hash - .ok_or_eyre("log did not contain transaction hash")? - .into(); - - let event = T::decode_log(&log.into()) - .wrap_err_with(|| format!("failed decoding contract log as `{}`", T::name()))?; - Ok((event, block_number, transaction_hash)) -} - -#[derive(Debug)] -struct LogToIcs20WithdrawalAction { - log: Log, - fee_asset: asset::Denom, - rollup_asset_denom: asset::Denom, - asset_withdrawal_divisor: u128, - bridge_address: Address, -} - -impl LogToIcs20WithdrawalAction { - fn try_convert(self) -> eyre::Result { - let Self { - log, - fee_asset, - rollup_asset_denom, - asset_withdrawal_divisor, - bridge_address, - } = self; - - let (event, block_number, transaction_hash) = - action_inputs_from_log::(log) - .wrap_err("failed getting required data from log")?; - - let source_channel = rollup_asset_denom - .as_trace_prefixed() - .and_then(TracePrefixed::last_channel) - .ok_or_eyre("rollup asset denom must have a channel to be withdrawn via IBC")? - .parse() - .wrap_err("failed to parse channel from rollup asset denom")?; - - let memo = Ics20WithdrawalFromRollupMemo { - memo: event.memo, - block_number, - rollup_return_address: event.sender.to_string(), - transaction_hash, - }; - - let action = Ics20Withdrawal { - denom: rollup_asset_denom, - destination_chain_address: event.destination_chain_address, - // note: this is actually a rollup address; we expect failed ics20 withdrawals to be - // returned to the rollup. - // this is only ok for now because addresses on the sequencer and the rollup are both 20 - // bytes, but this won't work otherwise. - return_address: bridge_address, - amount: event - .amount - .as_u128() - .checked_div(asset_withdrawal_divisor) - .ok_or(eyre::eyre!( - "failed to divide amount by asset withdrawal multiplier" - ))?, - memo: serde_json::to_string(&memo).wrap_err("failed to serialize memo to json")?, - fee_asset, - // note: this refers to the timeout on the destination chain, which we are unaware of. - // thus, we set it to the maximum possible value. - timeout_height: ibc_types::core::client::Height::new(u64::MAX, u64::MAX) - .wrap_err("failed to generate timeout height")?, - timeout_time: packet_timeout_time() - .wrap_err("failed to calculate packet timeout time")?, - source_channel, - bridge_address: Some(bridge_address), - }; - Ok(Action::Ics20Withdrawal(action)) - } -} - -#[derive(Debug)] -struct LogToSequencerWithdrawalAction { - log: Log, - fee_asset: asset::Denom, - asset_withdrawal_divisor: u128, - bridge_address: Address, -} - -impl LogToSequencerWithdrawalAction { - fn try_into_action(self) -> eyre::Result { - let Self { - log, - fee_asset, - asset_withdrawal_divisor, - bridge_address, - } = self; - let (event, block_number, transaction_hash) = - action_inputs_from_log::(log) - .wrap_err("failed getting required data from log")?; - - let memo = bridge::UnlockMemo { - block_number, - transaction_hash, - }; - - let action = BridgeUnlockAction { - to: event - .destination_chain_address - .parse() - .wrap_err("failed to parse destination chain address")?, - amount: event - .amount - .as_u128() - .checked_div(asset_withdrawal_divisor) - .ok_or_eyre("failed to divide amount by asset withdrawal multiplier")?, - memo: serde_json::to_string(&memo).wrap_err("failed to serialize memo to json")?, - fee_asset, - bridge_address: Some(bridge_address), - }; - - Ok(Action::BridgeUnlock(action)) - } -} - -async fn get_log( - provider: Arc>, - contract_address: ethers::types::Address, - block_hash: H256, -) -> eyre::Result> { - let event_sig = T::signature(); - let filter = Filter::new() - .at_block_hash(block_hash) - .address(contract_address) - .topic0(event_sig); - - provider - .get_logs(&filter) - .await - .wrap_err("failed to get sequencer withdrawal events") -} From bb2f96c01607a30806cb2195b6a7feb9ca325826 Mon Sep 17 00:00:00 2001 From: Itamar Reif <9663129+itamarreif@users.noreply.github.com> Date: Mon, 15 Jul 2024 08:50:18 -0400 Subject: [PATCH 18/24] fix(bridge-withdrawer)!: fix nonce handling (#1215) ## Summary Adds a check that waits for an empty mempool on startup and then uses the pending nonce instead of latest finalized nonce for transaction submission. ## Background Before these changes we used the latest nonce, which could lead to data races if there are batches awaiting inclusion in the mempool. See https://github.com/astriaorg/astria/issues/1228 for more context. ## Changes - Add `ASTRIA_BRIDGE_WITHDRAWER_SEQUENCER_GRPC_ENDPOINT` config - Wait for mempool to be empty as part of startup - Submitter connects to the grpc service as part of its startup - Submitter uses pending nonces for batch submission instead of latest nonce ## Testing Removed most testing; reworked in https://github.com/astriaorg/astria/issues/1232 ## Metrics - Punted: https://github.com/astriaorg/astria/issues/1272 ## Breaking Changelist - Adds a required env var `ASTRIA_BRIDGE_WITHDRAWER_SEQUENCER_GRPC_ENDPOINT` ## Related Issues closes https://github.com/astriaorg/astria/issues/1228 --------- Co-authored-by: Richard Janis Goldschmidt --- .github/workflows/docker-build.yml | 2 +- Cargo.lock | 1 + charts/evm-bridge-withdrawer/Chart.yaml | 2 +- .../templates/configmaps.yaml | 1 + charts/evm-bridge-withdrawer/values.yaml | 1 + crates/astria-bridge-withdrawer/Cargo.toml | 7 +- .../local.env.example | 4 + .../src/bridge_withdrawer/mod.rs | 16 +- .../src/bridge_withdrawer/startup.rs | 219 +++++++- .../bridge_withdrawer/submitter/builder.rs | 11 +- .../src/bridge_withdrawer/submitter/mod.rs | 131 +++-- .../src/bridge_withdrawer/submitter/tests.rs | 502 ------------------ crates/astria-bridge-withdrawer/src/config.rs | 2 + crates/astria-bridge-withdrawer/src/lib.rs | 2 - .../astria-bridge-withdrawer/src/metrics.rs | 39 -- dev/values/withdrawer/values.yaml | 1 + 16 files changed, 319 insertions(+), 622 deletions(-) delete mode 100644 crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 147f455e64..886c629b1d 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -135,7 +135,7 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Setup Smoke Test Environment - timeout-minutes: 5 + timeout-minutes: 10 run: | TAG=sha-$(git rev-parse --short HEAD) just deploy cluster diff --git a/Cargo.lock b/Cargo.lock index 71b83ee9c2..d1e3fa6a17 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -528,6 +528,7 @@ dependencies = [ "tendermint-rpc", "tokio", "tokio-util 0.7.10", + "tonic 0.10.2", "tracing", "tryhard", "wiremock", diff --git a/charts/evm-bridge-withdrawer/Chart.yaml b/charts/evm-bridge-withdrawer/Chart.yaml index 10054ccec2..2cb93045bf 100644 --- a/charts/evm-bridge-withdrawer/Chart.yaml +++ b/charts/evm-bridge-withdrawer/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.0.2 +version: 0.0.3 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/evm-bridge-withdrawer/templates/configmaps.yaml b/charts/evm-bridge-withdrawer/templates/configmaps.yaml index d0e7c8c0e2..ff5af74c5c 100644 --- a/charts/evm-bridge-withdrawer/templates/configmaps.yaml +++ b/charts/evm-bridge-withdrawer/templates/configmaps.yaml @@ -31,6 +31,7 @@ data: {{- if not .Values.global.dev }} ASTRIA_BRIDGE_WITHDRAWER_MIN_EXPECTED_FEE_ASSET_BALANCE: "{{ .Values.config.minExpectedFeeAssetBalance }}" {{- else }} + ASTRIA_BRIDGE_WITHDRAWER_SEQUENCER_GRPC_ENDPOINT: "{{ .Values.config.sequencerGrpcEndpoint }}" {{- end }} --- {{- if not .Values.secretProvider.enabled }} diff --git a/charts/evm-bridge-withdrawer/values.yaml b/charts/evm-bridge-withdrawer/values.yaml index 92113eaee8..d774cd3e85 100644 --- a/charts/evm-bridge-withdrawer/values.yaml +++ b/charts/evm-bridge-withdrawer/values.yaml @@ -15,6 +15,7 @@ images: devTag: latest config: + sequencerGrpcEndpoint: "" sequencerCometbftEndpoint: "" sequencerChainId: "" sequencerAddressPrefix: "astria" diff --git a/crates/astria-bridge-withdrawer/Cargo.toml b/crates/astria-bridge-withdrawer/Cargo.toml index e09b8d7555..2dc2fbab61 100644 --- a/crates/astria-bridge-withdrawer/Cargo.toml +++ b/crates/astria-bridge-withdrawer/Cargo.toml @@ -33,10 +33,15 @@ tracing = { workspace = true } tryhard = { workspace = true } tokio = { workspace = true, features = ["macros", "rt-multi-thread", "signal"] } tokio-util = { workspace = true } +tonic = { workspace = true } astria-bridge-contracts = { path = "../astria-bridge-contracts" } astria-build-info = { path = "../astria-build-info", features = ["runtime"] } -astria-core = { path = "../astria-core", features = ["serde", "server"] } +astria-core = { path = "../astria-core", features = [ + "serde", + "server", + "client", +] } astria-eyre = { path = "../astria-eyre" } config = { package = "astria-config", path = "../astria-config" } sequencer-client = { package = "astria-sequencer-client", path = "../astria-sequencer-client", features = [ diff --git a/crates/astria-bridge-withdrawer/local.env.example b/crates/astria-bridge-withdrawer/local.env.example index e9f0e77145..4eef267954 100644 --- a/crates/astria-bridge-withdrawer/local.env.example +++ b/crates/astria-bridge-withdrawer/local.env.example @@ -19,6 +19,10 @@ ASTRIA_BRIDGE_WITHDRAWER_PRETTY_PRINT=false # `ASTRIA_BRIDGE_WITHDRAWER_PRETTY_PRINT` is set to `true`. NO_COLOR= +# The sequencer application gRPC service used for fetching the pending nonce. +# 127.0.0.1:8080 is the default socket address for its gRPC server. +ASTRIA_BRIDGE_WITHDRAWER_SEQUENCER_GRPC_ENDPOINT="http://127.0.0.1:8080" + # Address of cometbft/tendermint to request new block heights. # 127.0.0.1:26657 is the default socket address at which cometbft # serves RPCs. diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs index 6f58679c08..77657953b7 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/mod.rs @@ -76,6 +76,7 @@ impl BridgeWithdrawer { ethereum_rpc_endpoint, rollup_asset_denomination, sequencer_bridge_address, + sequencer_grpc_endpoint, .. } = cfg; @@ -92,6 +93,7 @@ impl BridgeWithdrawer { sequencer_chain_id, sequencer_cometbft_endpoint: sequencer_cometbft_endpoint.clone(), sequencer_bridge_address, + sequencer_grpc_endpoint: sequencer_grpc_endpoint.clone(), expected_fee_asset: fee_asset_denomination, } .build() @@ -104,6 +106,7 @@ impl BridgeWithdrawer { shutdown_token: shutdown_handle.token(), startup_handle: startup_handle.clone(), sequencer_cometbft_endpoint, + sequencer_grpc_endpoint, sequencer_key_path, sequencer_address_prefix: sequencer_address_prefix.clone(), state: state.clone(), @@ -404,16 +407,3 @@ pub(crate) fn flatten_result(res: Result, JoinError>) -> eyre Err(err) => Err(err).wrap_err("task panicked"), } } - -#[cfg(test)] -/// Constructs an [`Address`] prefixed by `"astria"`. -#[cfg(test)] -pub(crate) fn astria_address( - array: [u8; astria_core::primitive::v1::ADDRESS_LEN], -) -> astria_core::primitive::v1::Address { - astria_core::primitive::v1::Address::builder() - .array(array) - .prefix("astria") - .try_build() - .unwrap() -} diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs index 2959ccd324..e3a2e8c5a0 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/startup.rs @@ -8,6 +8,13 @@ use astria_core::{ self, Ics20WithdrawalFromRollupMemo, }, + generated::sequencerblock::v1alpha1::{ + sequencer_service_client::{ + self, + SequencerServiceClient, + }, + GetPendingNonceRequest, + }, primitive::v1::asset, protocol::{ asset::v1alpha1::AllowedFeeAssetsResponse, @@ -38,10 +45,15 @@ use tendermint_rpc::{ }; use tokio::sync::watch; use tokio_util::sync::CancellationToken; +use tonic::transport::Channel; use tracing::{ + debug, info, + info_span, instrument, warn, + Instrument as _, + Span, }; use tryhard::backoff_strategies::ExponentialBackoff; @@ -55,6 +67,7 @@ pub(super) struct Builder { pub(super) state: Arc, pub(super) sequencer_chain_id: String, pub(super) sequencer_cometbft_endpoint: String, + pub(super) sequencer_grpc_endpoint: String, pub(super) sequencer_bridge_address: Address, pub(super) expected_fee_asset: asset::Denom, } @@ -67,6 +80,7 @@ impl Builder { sequencer_chain_id, sequencer_cometbft_endpoint, sequencer_bridge_address, + sequencer_grpc_endpoint, expected_fee_asset, } = self; @@ -79,6 +93,7 @@ impl Builder { state, sequencer_chain_id, sequencer_cometbft_client, + sequencer_grpc_endpoint, sequencer_bridge_address, expected_fee_asset, }) @@ -123,6 +138,7 @@ pub(super) struct Startup { state: Arc, sequencer_chain_id: String, sequencer_cometbft_client: sequencer_client::HttpClient, + sequencer_grpc_endpoint: String, sequencer_bridge_address: Address, expected_fee_asset: asset::Denom, } @@ -136,6 +152,16 @@ impl Startup { self.confirm_sequencer_config() .await .wrap_err("failed to confirm sequencer config")?; + + wait_for_empty_mempool( + self.sequencer_cometbft_client.clone(), + self.sequencer_grpc_endpoint.clone(), + self.sequencer_bridge_address, + self.state.clone(), + ) + .await + .wrap_err("failed to wait for mempool to be empty")?; + let starting_rollup_height = self .get_starting_rollup_height() .await @@ -167,13 +193,13 @@ impl Startup { /// /// - `self.sequencer_chain_id` matches the value returned from the sequencer node's genesis /// - `self.fee_asset_id` is a valid fee asset on the sequencer node - /// - `self.sequencer_key.address` has a sufficient balance of `self.fee_asset_id` + /// - `self.sequencer_bridge_address` has a sufficient balance of `self.fee_asset_id` /// /// # Errors /// /// - `self.chain_id` does not match the value returned from the sequencer node /// - `self.fee_asset_id` is not a valid fee asset on the sequencer node - /// - `self.sequencer_key.address` does not have a sufficient balance of `self.fee_asset_id`. + /// - `self.sequencer_bridge_address` does not have a sufficient balance of `self.fee_asset`. async fn confirm_sequencer_config(&self) -> eyre::Result<()> { // confirm the sequencer chain id let actual_chain_id = @@ -309,6 +335,87 @@ impl Startup { } } +async fn ensure_mempool_empty( + cometbft_client: sequencer_client::HttpClient, + sequencer_client: sequencer_service_client::SequencerServiceClient, + address: Address, + state: Arc, +) -> eyre::Result<()> { + let pending = get_pending_nonce(sequencer_client, state.clone(), address) + .await + .wrap_err("failed to get pending nonce")?; + let latest = get_latest_nonce(cometbft_client, state, address) + .await + .wrap_err("failed to get latest nonce")?; + ensure!(pending == latest, "mempool is not yet emoty"); + Ok(()) +} + +/// Waits for the mempool to be empty of transactions by the given address (i.e. the bridge +/// withdrawer's). This is used to make sure that batches are submitted under the correct nonce. +/// +/// This function checks that the mempool is empty by querying: +/// 1. the pending nonce from the Sequencer's app-side mempool +/// 2. the latest nonce from cometBFT's mempool. +/// If the pending nonce is equal to the latest nonce, then the mempool has no unexecuted +/// transactions by the address. +/// +/// This ensures that future submitted batches will continue to maintain the one-to-one +/// relationship between rollup block and withdrawer nonce that is needed to simplify the sync +/// process. +/// +/// This function runs the above check with an exponential backoff until the nonces match and the +/// mempool can be considered empty. The backoff starts at 1 second and is capped at 60 seconds. +/// +/// # Errors +/// +/// 1. Failing to get the pending nonce from the Sequencer's app-side mempool. +/// 2. Failing to get the latest nonce from cometBFT's mempool. +/// 3. The pending nonce from the Sequencer's app-side mempool does not match the latest nonce from +/// cometBFT's mempool after the exponential backoff times out. +async fn wait_for_empty_mempool( + cometbft_client: sequencer_client::HttpClient, + sequencer_grpc_endpoint: String, + address: Address, + state: Arc, +) -> eyre::Result<()> { + let retry_config = tryhard::RetryFutureConfig::new(u32::MAX) + .exponential_backoff(Duration::from_secs(1)) + .max_delay(Duration::from_secs(60)) + .on_retry( + |attempt: u32, next_delay: Option, error: &eyre::Report| { + let wait_duration = next_delay + .map(humantime::format_duration) + .map(tracing::field::display); + warn!( + error = error.as_ref() as &dyn std::error::Error, + attempt, + wait_duration, + "failed getting pending nonce from sequencing; retrying after backoff", + ); + + // TODO(https://github.com/astriaorg/astria/issues/1272): update metrics here? + futures::future::ready(()) + }, + ); + let sequencer_client = SequencerServiceClient::connect(sequencer_grpc_endpoint.clone()) + .await + .wrap_err_with(|| { + format!("failed to connect to sequencer at `{sequencer_grpc_endpoint}`") + })?; + tryhard::retry_fn(|| { + let sequencer_client = sequencer_client.clone(); + let cometbft_client = cometbft_client.clone(); + let state = state.clone(); + ensure_mempool_empty(cometbft_client, sequencer_client, address, state) + }) + .with_config(retry_config) + .await + .wrap_err("failed to wait for empty mempool")?; + + Ok(()) +} + /// Extracts the rollup height from the last transaction by the bridge account on the sequencer. /// Since all the withdrawals from a rollup block are batched into a single sequencer transaction, /// he rollup height can be extracted from the memo of any withdrawal action in the batch. @@ -364,7 +471,7 @@ async fn get_bridge_account_last_transaction_hash( address: Address, ) -> eyre::Result { let res = tryhard::retry_fn(|| client.get_bridge_account_last_transaction_hash(address)) - .with_config(make_sequencer_retry_config( + .with_config(make_cometbft_ext_retry_config( "attempt to fetch last bridge account's transaction hash from Sequencer; retrying \ after backoff", )) @@ -420,7 +527,7 @@ async fn get_allowed_fee_asset_ids( state: Arc, ) -> eyre::Result { let res = tryhard::retry_fn(|| client.get_allowed_fee_assets()) - .with_config(make_sequencer_retry_config( + .with_config(make_cometbft_ext_retry_config( "attempt to get allowed fee assets from Sequencer; retrying after backoff", )) .await @@ -431,6 +538,83 @@ async fn get_allowed_fee_asset_ids( res } +#[instrument(skip_all)] +async fn get_latest_nonce( + client: sequencer_client::HttpClient, + state: Arc, + address: Address, +) -> eyre::Result { + debug!("fetching latest nonce from sequencer"); + let span = Span::current(); + let retry_config = tryhard::RetryFutureConfig::new(1024) + .exponential_backoff(Duration::from_millis(200)) + .max_delay(Duration::from_secs(60)) + .on_retry( + |attempt, + next_delay: Option, + err: &sequencer_client::extension_trait::Error| { + let state = Arc::clone(&state); + state.set_sequencer_connected(false); + + let wait_duration = next_delay + .map(humantime::format_duration) + .map(tracing::field::display); + warn!( + parent: span.clone(), + error = err as &dyn std::error::Error, + attempt, + wait_duration, + "failed getting latest nonce from sequencer; retrying after backoff", + ); + async move {} + }, + ); + let res = tryhard::retry_fn(|| { + let client = client.clone(); + let span = info_span!(parent: span.clone(), "attempt get nonce"); + async move { client.get_latest_nonce(address).await.map(|rsp| rsp.nonce) }.instrument(span) + }) + .with_config(retry_config) + .await + .wrap_err("failed getting latest nonce from sequencer after 1024 attempts"); + + state.set_sequencer_connected(res.is_ok()); + + res +} + +// TODO(https://github.com/astriaorg/astria/issues/1274): deduplicate here and in crate::bridge_withdrawer::submitter +#[instrument(skip_all)] +async fn get_pending_nonce( + client: sequencer_service_client::SequencerServiceClient, + state: Arc, + address: Address, +) -> eyre::Result { + let span = Span::current(); + let res = tryhard::retry_fn(|| { + let mut client = client.clone(); + let span = info_span!(parent: span.clone(), "attempt get pending nonce"); + async move { + client + .get_pending_nonce(GetPendingNonceRequest { + address: Some(address.into_raw()), + }) + .await + .map(|rsp| rsp.into_inner().inner) + } + .instrument(span) + }) + .with_config(make_sequencer_grpc_retry_config( + "attempt to get pending nonce from sequencer; retrying after backoff", + )) + .await + .wrap_err("failed getting pending nonce from sequencing after 1024 attempts"); + + state.set_sequencer_connected(res.is_ok()); + + res +} + fn make_cometbft_retry_config( retry_message: &'static str, ) -> tryhard::RetryFutureConfig< @@ -456,7 +640,7 @@ fn make_cometbft_retry_config( ) } -fn make_sequencer_retry_config( +fn make_cometbft_ext_retry_config( retry_message: &'static str, ) -> tryhard::RetryFutureConfig< ExponentialBackoff, @@ -486,3 +670,28 @@ fn make_sequencer_retry_config( }, ) } + +fn make_sequencer_grpc_retry_config( + retry_message: &'static str, +) -> tryhard::RetryFutureConfig< + ExponentialBackoff, + impl Fn(u32, Option, &tonic::Status) -> futures::future::Ready<()>, +> { + tryhard::RetryFutureConfig::new(u32::MAX) + .exponential_backoff(Duration::from_millis(100)) + .max_delay(Duration::from_secs(20)) + .on_retry( + move |attempt: u32, next_delay: Option, error: &tonic::Status| { + let wait_duration = next_delay + .map(humantime::format_duration) + .map(tracing::field::display); + warn!( + attempt, + wait_duration, + error = error as &dyn std::error::Error, + retry_message, + ); + futures::future::ready(()) + }, + ) +} diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/builder.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/builder.rs index 8e61704e53..3104728058 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/builder.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/builder.rs @@ -1,11 +1,13 @@ use std::sync::Arc; +use astria_core::generated::sequencerblock::v1alpha1::sequencer_service_client::SequencerServiceClient; use astria_eyre::eyre::{ self, Context as _, }; use tokio::sync::mpsc; use tokio_util::sync::CancellationToken; +use tonic::transport::Endpoint; use tracing::info; use super::state::State; @@ -34,7 +36,7 @@ impl Handle { self.batches_tx .send(batch) .await - .wrap_err("failed to send batch") + .wrap_err("failed send batch") } } @@ -44,6 +46,7 @@ pub(crate) struct Builder { pub(crate) sequencer_key_path: String, pub(crate) sequencer_address_prefix: String, pub(crate) sequencer_cometbft_endpoint: String, + pub(crate) sequencer_grpc_endpoint: String, pub(crate) state: Arc, pub(crate) metrics: &'static Metrics, } @@ -57,6 +60,7 @@ impl Builder { sequencer_key_path, sequencer_address_prefix, sequencer_cometbft_endpoint, + sequencer_grpc_endpoint, state, metrics, } = self; @@ -72,6 +76,10 @@ impl Builder { sequencer_client::HttpClient::new(&*sequencer_cometbft_endpoint) .wrap_err("failed constructing cometbft http client")?; + let endpoint = Endpoint::new(sequencer_grpc_endpoint.clone()) + .wrap_err_with(|| format!("invalid grpc endpoint: {sequencer_grpc_endpoint}"))?; + let sequencer_grpc_client = SequencerServiceClient::new(endpoint.connect_lazy()); + let (batches_tx, batches_rx) = tokio::sync::mpsc::channel(BATCH_QUEUE_SIZE); let handle = Handle::new(batches_tx); @@ -82,6 +90,7 @@ impl Builder { state, batches_rx, sequencer_cometbft_client, + sequencer_grpc_client, signer, metrics, }, diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs index b869d7c7f3..bb4d62fc27 100644 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs +++ b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/mod.rs @@ -3,10 +3,19 @@ use std::{ time::Duration, }; -use astria_core::protocol::transaction::v1alpha1::{ - Action, - TransactionParams, - UnsignedTransaction, +use astria_core::{ + generated::sequencerblock::v1alpha1::{ + sequencer_service_client::{ + self, + SequencerServiceClient, + }, + GetPendingNonceRequest, + }, + protocol::transaction::v1alpha1::{ + Action, + TransactionParams, + UnsignedTransaction, + }, }; use astria_eyre::eyre::{ self, @@ -26,9 +35,9 @@ use state::State; use tokio::{ select, sync::mpsc, - time::Instant, }; use tokio_util::sync::CancellationToken; +use tonic::transport::Channel; use tracing::{ debug, error, @@ -49,8 +58,6 @@ use crate::metrics::Metrics; mod builder; pub(crate) mod signer; -#[cfg(test)] -mod tests; pub(super) struct Submitter { shutdown_token: CancellationToken, @@ -58,6 +65,7 @@ pub(super) struct Submitter { state: Arc, batches_rx: mpsc::Receiver, sequencer_cometbft_client: sequencer_client::HttpClient, + sequencer_grpc_client: SequencerServiceClient, signer: SequencerKey, metrics: &'static Metrics, } @@ -72,10 +80,10 @@ impl Submitter { startup_info = self.startup_handle.get_info() => { let startup::Info { chain_id, .. } = startup_info.wrap_err("submitter failed to get startup info")?; - self.state.set_submitter_ready(); chain_id } }; + self.state.set_submitter_ready(); let reason = loop { select!( @@ -94,6 +102,7 @@ impl Submitter { // if batch submission fails, halt the submitter if let Err(e) = process_batch( self.sequencer_cometbft_client.clone(), + self.sequencer_grpc_client.clone(), &self.signer, self.state.clone(), &sequencer_chain_id, @@ -124,8 +133,12 @@ impl Submitter { } } +// TODO(https://github.com/astriaorg/astria/issues/1273): +// refactor this allow +#[allow(clippy::too_many_arguments)] async fn process_batch( sequencer_cometbft_client: sequencer_client::HttpClient, + sequencer_grpc_client: sequencer_service_client::SequencerServiceClient, sequencer_key: &SequencerKey, state: Arc, sequencer_chain_id: &str, @@ -134,11 +147,10 @@ async fn process_batch( metrics: &'static Metrics, ) -> eyre::Result<()> { // get nonce and make unsigned transaction - let nonce = get_latest_nonce( - sequencer_cometbft_client.clone(), + let nonce = get_pending_nonce( + sequencer_grpc_client.clone(), *sequencer_key.address(), state.clone(), - metrics, ) .await .wrap_err("failed to get nonce from sequencer")?; @@ -200,16 +212,26 @@ async fn process_batch( } } -async fn get_latest_nonce( +/// Submits a `SignedTransaction` to the sequencer with an exponential backoff +#[instrument( + name = "submit_tx", + skip_all, + fields( + nonce = tx.nonce(), + transaction.hash = %telemetry::display::hex(&tx.sha256_of_proto_encoding()), + ) +)] +async fn submit_tx( client: sequencer_client::HttpClient, - address: Address, + tx: SignedTransaction, state: Arc, metrics: &'static Metrics, -) -> eyre::Result { - debug!("fetching latest nonce from sequencer"); - metrics.increment_nonce_fetch_count(); +) -> eyre::Result { + let nonce = tx.nonce(); + metrics.set_current_nonce(nonce); + let start = std::time::Instant::now(); + debug!("submitting signed transaction to sequencer"); let span = Span::current(); - let start = Instant::now(); let retry_config = tryhard::RetryFutureConfig::new(1024) .exponential_backoff(Duration::from_millis(200)) .max_delay(Duration::from_secs(60)) @@ -217,7 +239,7 @@ async fn get_latest_nonce( |attempt, next_delay: Option, err: &sequencer_client::extension_trait::Error| { - metrics.increment_nonce_fetch_failure_count(); + metrics.increment_sequencer_submission_failure_count(); let state = Arc::clone(&state); state.set_sequencer_connected(false); @@ -227,59 +249,47 @@ async fn get_latest_nonce( .map(tracing::field::display); warn!( parent: span.clone(), - error = err as &dyn std::error::Error, attempt, wait_duration, - "failed getting latest nonce from sequencer; retrying after backoff", + error = err as &dyn std::error::Error, + "failed sending transaction to sequencer; retrying after backoff", ); async move {} }, ); let res = tryhard::retry_fn(|| { let client = client.clone(); - let span = info_span!(parent: span.clone(), "attempt get nonce"); - async move { client.get_latest_nonce(address).await.map(|rsp| rsp.nonce) }.instrument(span) + let tx = tx.clone(); + let span = info_span!(parent: span.clone(), "attempt send"); + async move { client.submit_transaction_commit(tx).await }.instrument(span) }) .with_config(retry_config) .await - .wrap_err("failed getting latest nonce from sequencer after 1024 attempts"); + .wrap_err("failed sending transaction after 1024 attempts"); state.set_sequencer_connected(res.is_ok()); - metrics.record_nonce_fetch_latency(start.elapsed()); + metrics.record_sequencer_submission_latency(start.elapsed()); res } -/// Submits a `SignedTransaction` to the sequencer with an exponential backoff -#[instrument( - name = "submit_tx", - skip_all, - fields( - nonce = tx.nonce(), - transaction.hash = %telemetry::display::hex(&tx.sha256_of_proto_encoding()), - ) -)] -async fn submit_tx( - client: sequencer_client::HttpClient, - tx: SignedTransaction, +// TODO(https://github.com/astriaorg/astria/issues/1274): deduplicate here and in crate::bridge_withdrawer::startup +async fn get_pending_nonce( + client: sequencer_service_client::SequencerServiceClient, + address: Address, state: Arc, - metrics: &'static Metrics, -) -> eyre::Result { - let nonce = tx.nonce(); - metrics.set_current_nonce(nonce); - let start = std::time::Instant::now(); - debug!("submitting signed transaction to sequencer"); + // metrics: &'static Metrics, +) -> eyre::Result { + debug!("fetching pending nonce from sequencing"); + // TODO(https://github.com/astriaorg/astria/issues/1272): add metric and start time let span = Span::current(); let retry_config = tryhard::RetryFutureConfig::new(1024) .exponential_backoff(Duration::from_millis(200)) .max_delay(Duration::from_secs(60)) .on_retry( - |attempt, - next_delay: Option, - err: &sequencer_client::extension_trait::Error| { - metrics.increment_sequencer_submission_failure_count(); - + |attempt, next_delay: Option, err: &tonic::Status| { + // TODO(https://github.com/astriaorg/astria/issues/1272): update metrics here let state = Arc::clone(&state); state.set_sequencer_connected(false); @@ -287,28 +297,35 @@ async fn submit_tx( .map(humantime::format_duration) .map(tracing::field::display); warn!( - parent: span.clone(), + error = err as &dyn std::error::Error, attempt, wait_duration, - error = err as &dyn std::error::Error, - "failed sending transaction to sequencer; retrying after backoff", + "failed getting pending nonce from sequencing; retrying after backoff", ); - async move {} + futures::future::ready(()) }, ); + let res = tryhard::retry_fn(|| { - let client = client.clone(); - let tx = tx.clone(); - let span = info_span!(parent: span.clone(), "attempt send"); - async move { client.submit_transaction_commit(tx).await }.instrument(span) + let mut client = client.clone(); + let span = info_span!(parent: span.clone(), "attempt get pending nonce"); + async move { + client + .get_pending_nonce(GetPendingNonceRequest { + address: Some(address.into_raw()), + }) + .await + .map(|rsp| rsp.into_inner().inner) + } + .instrument(span) }) .with_config(retry_config) .await - .wrap_err("failed sending transaction after 1024 attempts"); + .wrap_err("failed getting pending nonce from sequencing after 1024 attempts"); state.set_sequencer_connected(res.is_ok()); - metrics.record_sequencer_submission_latency(start.elapsed()); + // TODO(https://github.com/astriaorg/astria/issues/1272): record latency metric res } diff --git a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs b/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs deleted file mode 100644 index 1c4e693ed5..0000000000 --- a/crates/astria-bridge-withdrawer/src/bridge_withdrawer/submitter/tests.rs +++ /dev/null @@ -1,502 +0,0 @@ -use std::{ - io::Write as _, - sync::Arc, - time::Duration, - vec, -}; - -use astria_core::{ - bridge::{ - self, - Ics20WithdrawalFromRollupMemo, - }, - generated::protocol::account::v1alpha1::NonceResponse, - primitive::v1::asset, - protocol::transaction::v1alpha1::{ - action::{ - BridgeUnlockAction, - Ics20Withdrawal, - }, - Action, - }, -}; -use astria_eyre::eyre::{ - self, -}; -use ibc_types::core::client::Height as IbcHeight; -use once_cell::sync::Lazy; -use prost::Message as _; -use sequencer_client::{ - tendermint_rpc::{ - endpoint::broadcast::tx_commit, - response, - }, - SignedTransaction, -}; -use serde_json::json; -use tempfile::NamedTempFile; -use tendermint::{ - abci::{ - response::CheckTx, - types::ExecTxResult, - }, - block::Height, -}; -use tendermint_rpc::{ - endpoint::broadcast::tx_sync, - request, -}; -use tokio::task::JoinHandle; -use tokio_util::sync::CancellationToken; -use tracing::debug; -use wiremock::{ - matchers::{ - body_partial_json, - body_string_contains, - }, - Mock, - MockGuard, - MockServer, - Request, - ResponseTemplate, -}; - -use super::Submitter; -use crate::{ - bridge_withdrawer::{ - batch::Batch, - startup, - state, - submitter, - }, - metrics::Metrics, -}; - -const SEQUENCER_CHAIN_ID: &str = "test_sequencer-1000"; -const DEFAULT_LAST_ROLLUP_HEIGHT: u64 = 1; -const DEFAULT_IBC_DENOM: &str = "transfer/channel-0/utia"; - -fn default_native_asset() -> asset::Denom { - "nria".parse().unwrap() -} - -static TELEMETRY: Lazy<()> = Lazy::new(|| { - if std::env::var_os("TEST_LOG").is_some() { - let filter_directives = std::env::var("RUST_LOG").unwrap_or_else(|_| "info".into()); - telemetry::configure() - .no_otel() - .stdout_writer(std::io::stdout) - .set_pretty_print(true) - .filter_directives(&filter_directives) - .try_init() - .unwrap(); - } else { - telemetry::configure() - .no_otel() - .stdout_writer(std::io::sink) - .try_init() - .unwrap(); - } -}); - -struct TestSubmitter { - submitter: Option, - submitter_handle: submitter::Handle, - cometbft_mock: MockServer, - submitter_task_handle: Option>>, -} - -impl TestSubmitter { - async fn setup() -> Self { - Lazy::force(&TELEMETRY); - - // set up external resources - let shutdown_token = CancellationToken::new(); - - // sequencer signer key - let keyfile = NamedTempFile::new().unwrap(); - (&keyfile) - .write_all( - "2bd806c97f0e00af1a1fc3328fa763a9269723c8db8fac4f93af71db186d6e90".as_bytes(), - ) - .unwrap(); - let sequencer_key_path = keyfile.path().to_str().unwrap().to_string(); - - // cometbft - let cometbft_mock = MockServer::start().await; - let sequencer_cometbft_endpoint = format!("http://{}", cometbft_mock.address()); - - let state = Arc::new(state::State::new()); - let startup_handle = startup::InfoHandle::new(state.subscribe()); - state.set_watcher_ready(); - - let metrics = Box::leak(Box::new(Metrics::new())); - - let (submitter, submitter_handle) = submitter::Builder { - shutdown_token: shutdown_token.clone(), - startup_handle, - sequencer_key_path, - sequencer_address_prefix: "astria".into(), - sequencer_cometbft_endpoint, - state, - metrics, - } - .build() - .unwrap(); - - Self { - submitter: Some(submitter), - submitter_task_handle: None, - submitter_handle, - cometbft_mock, - } - } - - async fn startup(&mut self) { - let submitter = self.submitter.take().unwrap(); - - let mut state = submitter.state.subscribe(); - - submitter.state.set_startup_info(startup::Info { - fee_asset: "fee-asset".parse::().unwrap(), - starting_rollup_height: 1, - chain_id: SEQUENCER_CHAIN_ID.to_string(), - }); - - self.submitter_task_handle = Some(tokio::spawn(submitter.run())); - - // wait for the submitter to be ready - state - .wait_for(state::StateSnapshot::is_ready) - .await - .unwrap(); - } - - async fn spawn() -> Self { - let mut submitter = Self::setup().await; - submitter.startup().await; - submitter - } -} - -fn make_ics20_withdrawal_action() -> Action { - let denom = DEFAULT_IBC_DENOM.parse::().unwrap(); - let destination_chain_address = "address".to_string(); - let inner = Ics20Withdrawal { - denom: denom.clone(), - destination_chain_address, - return_address: crate::astria_address([0u8; 20]), - amount: 99, - memo: serde_json::to_string(&Ics20WithdrawalFromRollupMemo { - memo: "hello".to_string(), - block_number: DEFAULT_LAST_ROLLUP_HEIGHT, - rollup_return_address: ethers::types::Address::from([0u8; 20]).to_string(), - transaction_hash: [2u8; 32], - }) - .unwrap(), - fee_asset: denom, - timeout_height: IbcHeight::new(u64::MAX, u64::MAX).unwrap(), - timeout_time: 0, // zero this for testing - source_channel: "channel-0".parse().unwrap(), - bridge_address: None, - }; - - Action::Ics20Withdrawal(inner) -} - -fn make_bridge_unlock_action() -> Action { - let denom = default_native_asset(); - let inner = BridgeUnlockAction { - to: crate::astria_address([0u8; 20]), - amount: 99, - memo: serde_json::to_string(&bridge::UnlockMemo { - block_number: DEFAULT_LAST_ROLLUP_HEIGHT, - transaction_hash: [1u8; 32], - }) - .unwrap(), - fee_asset: denom, - bridge_address: None, - }; - Action::BridgeUnlock(inner) -} - -fn make_batch_with_bridge_unlock_and_ics20_withdrawal() -> Batch { - Batch { - actions: vec![make_ics20_withdrawal_action(), make_bridge_unlock_action()], - rollup_height: 10, - } -} - -fn make_tx_commit_success_response() -> tx_commit::Response { - tx_commit::Response { - check_tx: CheckTx::default(), - tx_result: ExecTxResult::default(), - hash: vec![0u8; 32].try_into().unwrap(), - height: Height::default(), - } -} - -fn make_tx_commit_check_tx_failure_response() -> tx_commit::Response { - tx_commit::Response { - check_tx: CheckTx { - code: 1.into(), - ..CheckTx::default() - }, - tx_result: ExecTxResult::default(), - hash: vec![0u8; 32].try_into().unwrap(), - height: Height::default(), - } -} - -fn make_tx_commit_deliver_tx_failure_response() -> tx_commit::Response { - tx_commit::Response { - check_tx: CheckTx::default(), - tx_result: ExecTxResult { - code: 1.into(), - ..ExecTxResult::default() - }, - hash: vec![0u8; 32].try_into().unwrap(), - height: Height::default(), - } -} - -/// Convert a `Request` object to a `SignedTransaction` -fn signed_tx_from_request(request: &Request) -> SignedTransaction { - use astria_core::generated::protocol::transaction::v1alpha1::SignedTransaction as RawSignedTransaction; - use prost::Message as _; - - let wrapped_tx_sync_req: request::Wrapper = - serde_json::from_slice(&request.body) - .expect("deserialize to JSONRPC wrapped tx_sync::Request"); - let raw_signed_tx = RawSignedTransaction::decode(&*wrapped_tx_sync_req.params().tx) - .expect("can't deserialize signed sequencer tx from broadcast jsonrpc request"); - let signed_tx = SignedTransaction::try_from_raw(raw_signed_tx) - .expect("can't convert raw signed tx to checked signed tx"); - debug!(?signed_tx, "sequencer mock received signed transaction"); - - signed_tx -} - -async fn register_get_nonce_response(server: &MockServer, response: NonceResponse) -> MockGuard { - let response = tendermint_rpc::endpoint::abci_query::Response { - response: tendermint_rpc::endpoint::abci_query::AbciQuery { - value: response.encode_to_vec(), - ..Default::default() - }, - }; - let wrapper = response::Wrapper::new_with_id(tendermint_rpc::Id::Num(1), Some(response), None); - Mock::given(body_partial_json(json!({"method": "abci_query"}))) - .and(body_string_contains("accounts/nonce")) - .respond_with( - ResponseTemplate::new(200) - .set_body_json(&wrapper) - .append_header("Content-Type", "application/json"), - ) - .expect(1) - .mount_as_scoped(server) - .await -} - -async fn register_broadcast_tx_commit_response( - server: &MockServer, - response: tx_commit::Response, -) -> MockGuard { - let wrapper = response::Wrapper::new_with_id(tendermint_rpc::Id::Num(1), Some(response), None); - Mock::given(body_partial_json(json!({ - "method": "broadcast_tx_commit" - }))) - .respond_with( - ResponseTemplate::new(200) - .set_body_json(&wrapper) - .append_header("Content-Type", "application/json"), - ) - .expect(1) - .mount_as_scoped(server) - .await -} - -fn compare_actions(expected: &Action, actual: &Action) { - match (expected, actual) { - (Action::BridgeUnlock(expected), Action::BridgeUnlock(actual)) => { - assert_eq!(expected, actual, "BridgeUnlock actions do not match"); - } - (Action::Ics20Withdrawal(expected), Action::Ics20Withdrawal(actual)) => { - assert_eq!(expected, actual, "Ics20Withdrawal actions do not match"); - } - _ => panic!("Actions do not match"), - } -} - -/// Test that the submitter starts up successfully -#[tokio::test] -async fn submitter_startup_success() { - let _submitter = TestSubmitter::spawn().await; -} - -/// Sanity check to check that batch submission works -#[tokio::test] -async fn submitter_submit_success() { - let submitter = TestSubmitter::spawn().await; - let TestSubmitter { - submitter_handle, - cometbft_mock, - .. - } = submitter; - - // set up guards on mock cometbft - let nonce_guard = register_get_nonce_response( - &cometbft_mock, - NonceResponse { - height: 1, - nonce: 0, - }, - ) - .await; - - let broadcast_guard = - register_broadcast_tx_commit_response(&cometbft_mock, make_tx_commit_success_response()) - .await; - - // send batch to submitter - let batch = make_batch_with_bridge_unlock_and_ics20_withdrawal(); - submitter_handle.send_batch(batch).await.unwrap(); - - // wait for nonce and broadcast guards to be satisfied - tokio::time::timeout( - Duration::from_millis(100), - nonce_guard.wait_until_satisfied(), - ) - .await - .unwrap(); - tokio::time::timeout( - Duration::from_millis(100), - broadcast_guard.wait_until_satisfied(), - ) - .await - .unwrap(); - - // check the submitted transaction against the batch - let requests = broadcast_guard.received_requests().await; - assert_eq!(requests.len(), 1); - let signed_transaction = signed_tx_from_request(&requests[0]); - let actions = signed_transaction.actions(); - let expected_batch = make_batch_with_bridge_unlock_and_ics20_withdrawal(); - - expected_batch - .actions - .iter() - .zip(actions.iter()) - .for_each(|(expected, actual)| compare_actions(expected, actual)); -} - -/// Test that the submitter halts when transaction submissions fails to be included in the -/// mempool (CheckTx) -#[tokio::test] -async fn submitter_submit_check_tx_failure() { - let submitter = TestSubmitter::spawn().await; - let TestSubmitter { - submitter_handle, - cometbft_mock, - mut submitter_task_handle, - .. - } = submitter; - - // set up guards on mock cometbft - let nonce_guard = register_get_nonce_response( - &cometbft_mock, - NonceResponse { - height: 1, - nonce: 0, - }, - ) - .await; - - let broadcast_guard = register_broadcast_tx_commit_response( - &cometbft_mock, - make_tx_commit_check_tx_failure_response(), - ) - .await; - - // send batch to submitter - let batch = make_batch_with_bridge_unlock_and_ics20_withdrawal(); - submitter_handle.send_batch(batch).await.unwrap(); - - // wait for the nonce and broadcast guards to be satisfied - tokio::time::timeout( - Duration::from_millis(100), - nonce_guard.wait_until_satisfied(), - ) - .await - .unwrap(); - tokio::time::timeout( - Duration::from_millis(100), - broadcast_guard.wait_until_satisfied(), - ) - .await - .unwrap(); - - // make sure the submitter halts and the task returns - let _submitter_result = tokio::time::timeout( - Duration::from_millis(100), - submitter_task_handle.take().unwrap(), - ) - .await - .unwrap() - .unwrap(); -} - -/// Test that the submitter halts when transaction submissions fails to be executed in a block -/// (DeliverTx) -#[tokio::test] -async fn submitter_submit_deliver_tx_failure() { - let submitter = TestSubmitter::spawn().await; - let TestSubmitter { - submitter_handle, - cometbft_mock, - mut submitter_task_handle, - .. - } = submitter; - - // set up guards on mock cometbft - let nonce_guard = register_get_nonce_response( - &cometbft_mock, - NonceResponse { - height: 1, - nonce: 0, - }, - ) - .await; - - let broadcast_guard = register_broadcast_tx_commit_response( - &cometbft_mock, - make_tx_commit_deliver_tx_failure_response(), - ) - .await; - - // send batch to submitter - let batch = make_batch_with_bridge_unlock_and_ics20_withdrawal(); - submitter_handle.send_batch(batch).await.unwrap(); - - // wait for the nonce and broadcast guards to be satisfied - tokio::time::timeout( - Duration::from_millis(100), - nonce_guard.wait_until_satisfied(), - ) - .await - .unwrap(); - tokio::time::timeout( - Duration::from_millis(100), - broadcast_guard.wait_until_satisfied(), - ) - .await - .unwrap(); - - // make sure the submitter halts and the task returns - let _submitter_result = tokio::time::timeout( - Duration::from_millis(100), - submitter_task_handle.take().unwrap(), - ) - .await - .unwrap() - .unwrap(); -} diff --git a/crates/astria-bridge-withdrawer/src/config.rs b/crates/astria-bridge-withdrawer/src/config.rs index 85ac347d6c..0cbf462daf 100644 --- a/crates/astria-bridge-withdrawer/src/config.rs +++ b/crates/astria-bridge-withdrawer/src/config.rs @@ -10,6 +10,8 @@ use serde::{ #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] /// The single config for creating an astria-bridge service. pub struct Config { + // The sequencer service grpc endpoint used to fetch pending nonce. + pub sequencer_grpc_endpoint: String, // The cometbft rpc endpoint for submitting transactions to the sequencer. pub sequencer_cometbft_endpoint: String, // The chain id of the sequencer chain. diff --git a/crates/astria-bridge-withdrawer/src/lib.rs b/crates/astria-bridge-withdrawer/src/lib.rs index 8ce4dae86e..867015602b 100644 --- a/crates/astria-bridge-withdrawer/src/lib.rs +++ b/crates/astria-bridge-withdrawer/src/lib.rs @@ -4,8 +4,6 @@ mod build_info; pub(crate) mod config; pub(crate) mod metrics; -#[cfg(test)] -pub(crate) use bridge_withdrawer::astria_address; pub use bridge_withdrawer::BridgeWithdrawer; pub use build_info::BUILD_INFO; pub use config::Config; diff --git a/crates/astria-bridge-withdrawer/src/metrics.rs b/crates/astria-bridge-withdrawer/src/metrics.rs index a409768d8e..c0aab7c5c0 100644 --- a/crates/astria-bridge-withdrawer/src/metrics.rs +++ b/crates/astria-bridge-withdrawer/src/metrics.rs @@ -15,9 +15,6 @@ use metrics::{ use telemetry::metric_names; pub(crate) struct Metrics { - nonce_fetch_count: Counter, - nonce_fetch_failure_count: Counter, - nonce_fetch_latency: Histogram, current_nonce: Gauge, sequencer_submission_failure_count: Counter, sequencer_submission_latency: Histogram, @@ -26,27 +23,6 @@ pub(crate) struct Metrics { impl Metrics { #[must_use] pub(crate) fn new() -> Self { - describe_counter!( - NONCE_FETCH_COUNT, - Unit::Count, - "The number of times we have attempted to fetch the nonce" - ); - let nonce_fetch_count = counter!(NONCE_FETCH_COUNT); - - describe_counter!( - NONCE_FETCH_FAILURE_COUNT, - Unit::Count, - "The number of times we have failed to fetch the nonce" - ); - let nonce_fetch_failure_count = counter!(NONCE_FETCH_FAILURE_COUNT); - - describe_histogram!( - NONCE_FETCH_LATENCY, - Unit::Seconds, - "The latency of nonce fetch" - ); - let nonce_fetch_latency = histogram!(NONCE_FETCH_LATENCY); - describe_gauge!(CURRENT_NONCE, Unit::Count, "The current nonce"); let current_nonce = gauge!(CURRENT_NONCE); @@ -65,27 +41,12 @@ impl Metrics { let sequencer_submission_latency = histogram!(SEQUENCER_SUBMISSION_LATENCY); Self { - nonce_fetch_count, - nonce_fetch_failure_count, - nonce_fetch_latency, current_nonce, sequencer_submission_failure_count, sequencer_submission_latency, } } - pub(crate) fn increment_nonce_fetch_count(&self) { - self.nonce_fetch_count.increment(1); - } - - pub(crate) fn increment_nonce_fetch_failure_count(&self) { - self.nonce_fetch_failure_count.increment(1); - } - - pub(crate) fn record_nonce_fetch_latency(&self, latency: Duration) { - self.nonce_fetch_latency.record(latency); - } - pub(crate) fn set_current_nonce(&self, nonce: u32) { self.current_nonce.set(nonce); } diff --git a/dev/values/withdrawer/values.yaml b/dev/values/withdrawer/values.yaml index 40324268d0..45211e589f 100644 --- a/dev/values/withdrawer/values.yaml +++ b/dev/values/withdrawer/values.yaml @@ -17,6 +17,7 @@ config: evmRpcEndpoint: "ws://astria-evm-service.astria-dev-cluster.svc.cluster.local:8546/" sequencerPrivateKey: devContent: "dfa7108e38ab71f89f356c72afc38600d5758f11a8c337164713e4471411d2e0" + sequencerGrpcEndpoint: "http://node0-sequencer-grpc-service.astria-dev-cluster.svc.cluster.local:8080" metrics: enabled: false From 269edce7eb73a53a1ee2b09ac723c4a20d43253f Mon Sep 17 00:00:00 2001 From: Fraser Hutchison <190532+Fraser999@users.noreply.github.com> Date: Mon, 15 Jul 2024 15:31:29 +0100 Subject: [PATCH 19/24] chore(sequencer): add mempool benchmarks (#1238) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Added benchmarks to `mempool` using [Divan](https://github.com/nvzqz/divan). ## Background There are further upcoming changes to mempool planned, and benchmarks will allow us to avoid committing poor performing code or blindly regressing on current performance. ## Changes - Added benchmarks. ## Testing These are tests.

Example of running cargo bench -q -p astria-sequencer on my Ryzen 7900X ``` Timer precision: 10 ns astria_sequencer fastest │ slowest │ median │ mean │ samples │ iters ╰─ mempool │ │ │ │ │ ╰─ benchmarks │ │ │ │ │ ├─ check_removed_comet_bft 44.82 µs │ 50 µs │ 45.23 µs │ 45.52 µs │ 100 │ 100 ├─ track_removal_comet_bft 44.98 µs │ 49.35 µs │ 45.33 µs │ 45.55 µs │ 100 │ 100 ├─ insert │ │ │ │ │ │ ├─ mempool_with_100000_txs 25.41 ms │ 40.04 ms │ 30.1 ms │ 30.87 ms │ 100 │ 100 │ ├─ mempool_with_10000_txs 2.228 ms │ 3.706 ms │ 2.58 ms │ 2.618 ms │ 100 │ 100 │ ├─ mempool_with_1000_txs 74.06 µs │ 101.5 µs │ 74.85 µs │ 75.82 µs │ 100 │ 100 │ ╰─ mempool_with_100_txs 12.5 µs │ 17.56 µs │ 12.76 µs │ 12.85 µs │ 100 │ 100 ├─ pending_nonce │ │ │ │ │ │ ├─ mempool_with_100000_txs 29.88 ms │ 38.8 ms │ 31.07 ms │ 31.78 ms │ 100 │ 100 │ ├─ mempool_with_10000_txs 2.099 ms │ 3.572 ms │ 2.73 ms │ 2.76 ms │ 100 │ 100 │ ├─ mempool_with_1000_txs 74.23 µs │ 105.4 µs │ 75.42 µs │ 76.26 µs │ 100 │ 100 │ ╰─ mempool_with_100_txs 11.66 µs │ 13.35 µs │ 11.93 µs │ 11.95 µs │ 100 │ 100 ├─ pop │ │ │ │ │ │ ├─ mempool_with_100000_txs 27.57 ms │ 37 ms │ 30.05 ms │ 31.3 ms │ 100 │ 100 │ ├─ mempool_with_10000_txs 1.971 ms │ 3.022 ms │ 2.614 ms │ 2.658 ms │ 100 │ 100 │ ├─ mempool_with_1000_txs 70.68 µs │ 96.14 µs │ 71.96 µs │ 72.71 µs │ 100 │ 100 │ ╰─ mempool_with_100_txs 11.3 µs │ 17.47 µs │ 11.76 µs │ 11.81 µs │ 100 │ 100 ├─ remove │ │ │ │ │ │ ├─ mempool_with_100000_txs 28.3 ms │ 34.65 ms │ 29.81 ms │ 30.56 ms │ 100 │ 100 │ ├─ mempool_with_10000_txs 2.093 ms │ 3.29 ms │ 2.643 ms │ 2.668 ms │ 100 │ 100 │ ├─ mempool_with_1000_txs 72.59 µs │ 96.33 µs │ 73.56 µs │ 74.09 µs │ 100 │ 100 │ ╰─ mempool_with_100_txs 11.69 µs │ 16.5 µs │ 11.94 µs │ 12.09 µs │ 100 │ 100 ╰─ run_maintenance │ │ │ │ │ ├─ mempool_with_100000_txs 47.28 ms │ 56.7 ms │ 50.59 ms │ 51.12 ms │ 100 │ 100 ├─ mempool_with_10000_txs 4.488 ms │ 6.047 ms │ 5.172 ms │ 5.176 ms │ 100 │ 100 ├─ mempool_with_1000_txs 310.5 µs │ 344 µs │ 315.4 µs │ 316.7 µs │ 100 │ 100 ╰─ mempool_with_100_txs 34.69 µs │ 40.81 µs │ 35.56 µs │ 35.74 µs │ 100 │ 100 ```
--- Cargo.lock | 1 + Cargo.toml | 15 +- crates/astria-merkle/Cargo.toml | 2 +- crates/astria-merkle/benches/benchmark.rs | 2 - crates/astria-sequencer/Cargo.toml | 18 +- crates/astria-sequencer/benches/benchmark.rs | 17 + .../src/mempool/benchmarks.rs | 368 ++++++++++++++++++ .../src/{mempool.rs => mempool/mod.rs} | 9 +- 8 files changed, 410 insertions(+), 22 deletions(-) create mode 100644 crates/astria-sequencer/benches/benchmark.rs create mode 100644 crates/astria-sequencer/src/mempool/benchmarks.rs rename crates/astria-sequencer/src/{mempool.rs => mempool/mod.rs} (99%) diff --git a/Cargo.lock b/Cargo.lock index d1e3fa6a17..7e69d84f82 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -787,6 +787,7 @@ dependencies = [ "bytes", "cnidarium", "cnidarium-component", + "divan", "futures", "hex", "ibc-proto", diff --git a/Cargo.toml b/Cargo.toml index 2d6795d817..e55b7f13e5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,6 +60,7 @@ celestia-tendermint = "0.32.1" celestia-types = "0.1.1" clap = "4.5.4" const_format = "0.2.32" +divan = "0.1.14" ethers = { version = "2.0.11", default-features = false } futures = "0.3" hex = "0.4" @@ -68,20 +69,18 @@ humantime = "2.1.0" hyper = "0.14" ibc-types = "0.12" indexmap = "2.1.0" +insta = "1.36.1" itertools = "0.12.1" itoa = "1.0.10" jsonrpsee = { version = "0.20" } -once_cell = "1.17.1" -pin-project-lite = "0.2.13" -sha2 = "0.10" -serde = "1" -serde_json = "1" metrics = "0.22.1" +once_cell = "1.17.1" pbjson-types = "0.6" # Note that when updating the penumbra versions, vendored types in `proto/sequencerapis/astria_vendored` may need to be updated as well. penumbra-ibc = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0", default-features = false } penumbra-proto = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0" } penumbra-tower-trace = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0" } +pin-project-lite = "0.2.13" prost = "0.12" rand = "0.8.5" regex = "1.9" @@ -90,6 +89,9 @@ regex = "1.9" reqwest = { version = "0.11", default-features = false, features = [ "rustls-tls", ] } +serde = "1" +serde_json = "1" +sha2 = "0.10" tempfile = "3.6.0" tendermint = "0.34.0" tendermint-config = "0.34.0" @@ -97,6 +99,7 @@ tendermint-proto = "0.34.0" tendermint-rpc = "0.34.0" thiserror = "1" tokio = "1.28" +tokio-stream = { version = "0.1.14" } tokio-test = "0.4.2" tokio-util = "0.7.9" tonic = "0.10" @@ -104,5 +107,3 @@ tracing = "0.1" tryhard = "0.5.1" which = "4.4.0" wiremock = "0.5" -insta = "1.36.1" -tokio-stream = { version = "0.1.14" } diff --git a/crates/astria-merkle/Cargo.toml b/crates/astria-merkle/Cargo.toml index c80203cfa7..922689da44 100644 --- a/crates/astria-merkle/Cargo.toml +++ b/crates/astria-merkle/Cargo.toml @@ -13,7 +13,7 @@ sha2 = { workspace = true } [dev-dependencies] ct-merkle = "0.1.0" -divan = "0.1.14" +divan = { workspace = true } hex-literal = { workspace = true } [features] diff --git a/crates/astria-merkle/benches/benchmark.rs b/crates/astria-merkle/benches/benchmark.rs index cb70e988a7..3d10f45d34 100644 --- a/crates/astria-merkle/benches/benchmark.rs +++ b/crates/astria-merkle/benches/benchmark.rs @@ -1,5 +1,3 @@ -#![allow(clippy::wildcard_imports)] - use astria_merkle::Tree; use ct_merkle::CtMerkleTree; use divan::{ diff --git a/crates/astria-sequencer/Cargo.toml b/crates/astria-sequencer/Cargo.toml index a85fb864d5..03bab5a051 100644 --- a/crates/astria-sequencer/Cargo.toml +++ b/crates/astria-sequencer/Cargo.toml @@ -25,18 +25,21 @@ telemetry = { package = "astria-telemetry", path = "../astria-telemetry", featur anyhow = "1" borsh = { version = "1", features = ["derive"] } +cnidarium = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0", features = [ + "metrics", +] } +cnidarium-component = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0" } +ibc-proto = { version = "0.41.0", features = ["server"] } matchit = "0.7.2" priority-queue = "2.0.2" tower = "0.4" tower-abci = "0.12.0" tower-actor = "0.1.0" -cnidarium = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0", features = [ - "metrics", -] } -cnidarium-component = { git = "https://github.com/penumbra-zone/penumbra.git", tag = "v0.78.0" } +tower-http = { version = "0.4", features = ["cors"] } async-trait = { workspace = true } bytes = { workspace = true } +divan = { workspace = true } futures = { workspace = true } hex = { workspace = true, features = ["serde"] } ibc-types = { workspace = true, features = ["with_serde"] } @@ -57,9 +60,6 @@ tokio = { workspace = true, features = ["rt", "tracing"] } tonic = { workspace = true } tracing = { workspace = true } -ibc-proto = { version = "0.41.0", features = ["server"] } -tower-http = { version = "0.4", features = ["cors"] } - [dev-dependencies] astria-core = { path = "../astria-core", features = [ "server", @@ -75,3 +75,7 @@ tokio = { workspace = true, features = ["test-util"] } [build-dependencies] astria-build-info = { path = "../astria-build-info", features = ["build"] } + +[[bench]] +name = "benchmark" +harness = false diff --git a/crates/astria-sequencer/benches/benchmark.rs b/crates/astria-sequencer/benches/benchmark.rs new file mode 100644 index 0000000000..57c6df628a --- /dev/null +++ b/crates/astria-sequencer/benches/benchmark.rs @@ -0,0 +1,17 @@ +// Required to force the benchmark target to actually register the divan benchmark cases. +use astria_sequencer as _; + +fn main() { + // Handle `nextest` querying the benchmark binary for tests. Currently `divan` is incompatible + // with `nextest`, so just report no tests available. + // See https://github.com/nvzqz/divan/issues/43 for further details. + let args: Vec<_> = std::env::args().collect(); + if args.contains(&"--list".to_string()) + && args.contains(&"--format".to_string()) + && args.contains(&"terse".to_string()) + { + return; + } + // Run registered benchmarks. + divan::main(); +} diff --git a/crates/astria-sequencer/src/mempool/benchmarks.rs b/crates/astria-sequencer/src/mempool/benchmarks.rs new file mode 100644 index 0000000000..dae8b13143 --- /dev/null +++ b/crates/astria-sequencer/src/mempool/benchmarks.rs @@ -0,0 +1,368 @@ +#![allow(non_camel_case_types)] + +use std::{ + collections::HashMap, + sync::OnceLock, + time::Duration, +}; + +use astria_core::{ + crypto::SigningKey, + primitive::v1::{ + asset::{ + Denom, + IbcPrefixed, + }, + Address, + RollupId, + }, + protocol::transaction::v1alpha1::{ + action::{ + Action, + SequenceAction, + }, + SignedTransaction, + TransactionParams, + UnsignedTransaction, + }, +}; +use sha2::{ + Digest as _, + Sha256, +}; + +use super::{ + Mempool, + RemovalReason, +}; + +/// The maximum number of transactions with which to initialize the mempool. +const MAX_INITIAL_TXS: usize = 100_000; +/// The max time for any benchmark. +const MAX_TIME: Duration = Duration::from_secs(30); +/// The number of different signers of transactions, and also the number of different chain IDs. +const SIGNER_COUNT: u8 = 10; + +/// Returns an endlessly-repeating iterator over `SIGNER_COUNT` separate signing keys. +fn signing_keys() -> impl Iterator { + static SIGNING_KEYS: OnceLock> = OnceLock::new(); + SIGNING_KEYS + .get_or_init(|| { + (0..SIGNER_COUNT) + .map(|i| SigningKey::from([i; 32])) + .collect() + }) + .iter() + .cycle() +} + +/// Returns a static ref to a collection of `MAX_INITIAL_TXS + 1` transactions. +fn transactions() -> &'static Vec { + static TXS: OnceLock> = OnceLock::new(); + TXS.get_or_init(|| { + crate::address::initialize_base_prefix("benchmarks").unwrap(); + let mut nonces_and_chain_ids = HashMap::new(); + signing_keys() + .map(move |signing_key| { + let verification_key = signing_key.verification_key(); + let (nonce, chain_id) = nonces_and_chain_ids + .entry(verification_key) + .or_insert_with(|| { + (0_u32, format!("chain-{}", signing_key.verification_key())) + }); + *nonce = (*nonce).wrapping_add(1); + let params = TransactionParams::builder() + .nonce(*nonce) + .chain_id(chain_id.as_str()) + .build(); + let sequence_action = SequenceAction { + rollup_id: RollupId::new([1; 32]), + data: vec![2; 1000], + fee_asset: Denom::IbcPrefixed(IbcPrefixed::new([3; 32])), + }; + UnsignedTransaction { + actions: vec![Action::Sequence(sequence_action)], + params, + } + .into_signed(signing_key) + }) + .take(MAX_INITIAL_TXS + 1) + .collect() + }) +} + +/// This trait exists so we can get better output from `divan` by configuring the various mempool +/// sizes as types rather than consts. With types we get output like: +/// ```text +/// ╰─ insert_new_tx +/// ├─ mempool_with_100_txs +/// ... +/// ╰─ mempool_with_100000_txs +/// ``` +/// rather than: +/// ```text +/// ╰─ insert_new_tx +/// ├─ 100 +/// ... +/// ╰─ 100000 +/// ``` +trait MempoolSize { + fn size() -> usize; + + fn checked_size() -> usize { + assert!(Self::size() <= MAX_INITIAL_TXS); + Self::size() + } +} + +struct mempool_with_100_txs; + +struct mempool_with_1000_txs; + +struct mempool_with_10000_txs; + +struct mempool_with_100000_txs; + +impl MempoolSize for mempool_with_100_txs { + fn size() -> usize { + 100 + } +} + +impl MempoolSize for mempool_with_1000_txs { + fn size() -> usize { + 1_000 + } +} + +impl MempoolSize for mempool_with_10000_txs { + fn size() -> usize { + 10_000 + } +} + +impl MempoolSize for mempool_with_100000_txs { + fn size() -> usize { + 100_000 + } +} + +/// Returns a new `Mempool` initialized with the number of transactions specified by `T::size()` +/// taken from the static `transactions()`, and with a full `comet_bft_removal_cache`. +fn init_mempool() -> Mempool { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + let mempool = Mempool::new(); + runtime.block_on(async { + for tx in transactions().iter().take(T::checked_size()) { + mempool.insert(tx.clone(), 0).await.unwrap(); + } + for i in 0..super::REMOVAL_CACHE_SIZE { + let hash = Sha256::digest(i.to_le_bytes()).into(); + mempool + .track_removal_comet_bft(hash, RemovalReason::Expired) + .await; + } + }); + mempool +} + +/// Returns the first transaction from the static `transactions()` not included in the initialized +/// mempool, i.e. the one at index `T::size()`. +fn get_unused_tx() -> SignedTransaction { + transactions().get(T::checked_size()).unwrap().clone() +} + +/// Benchmarks `Mempool::insert` for a single new transaction on a mempool with the given number of +/// existing entries. +#[divan::bench( + max_time = MAX_TIME, + types = [ + mempool_with_100_txs, + mempool_with_1000_txs, + mempool_with_10000_txs, + mempool_with_100000_txs + ] +)] +fn insert(bencher: divan::Bencher) { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + bencher + .with_inputs(|| (init_mempool::(), get_unused_tx::())) + .bench_values(move |(mempool, tx)| { + runtime.block_on(async { + mempool.insert(tx, 0).await.unwrap(); + }); + }); +} + +/// Benchmarks `Mempool::pop` on a mempool with the given number of existing entries. +#[divan::bench( + max_time = MAX_TIME, + types = [ + mempool_with_100_txs, + mempool_with_1000_txs, + mempool_with_10000_txs, + mempool_with_100000_txs + ] +)] +fn pop(bencher: divan::Bencher) { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + bencher + .with_inputs(|| init_mempool::()) + .bench_values(move |mempool| { + runtime.block_on(async { + mempool.pop().await.unwrap(); + }); + }); +} + +/// Benchmarks `Mempool::remove` for a single transaction on a mempool with the given number of +/// existing entries. +#[divan::bench( + max_time = MAX_TIME, + types = [ + mempool_with_100_txs, + mempool_with_1000_txs, + mempool_with_10000_txs, + mempool_with_100000_txs + ] +)] +fn remove(bencher: divan::Bencher) { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + bencher + .with_inputs(|| { + let tx_hash = transactions().first().unwrap().sha256_of_proto_encoding(); + (init_mempool::(), tx_hash) + }) + .bench_values(move |(mempool, tx_hash)| { + runtime.block_on(async { + mempool.remove(tx_hash).await; + }); + }); +} + +/// Benchmarks `Mempool::track_removal_comet_bft` for a single new transaction on a mempool with +/// the `comet_bft_removal_cache` filled. +/// +/// Note that the number of entries in the main cache is irrelevant here. +#[divan::bench(max_time = MAX_TIME)] +fn track_removal_comet_bft(bencher: divan::Bencher) { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + bencher + .with_inputs(|| { + let tx_hash = transactions().first().unwrap().sha256_of_proto_encoding(); + (init_mempool::(), tx_hash) + }) + .bench_values(move |(mempool, tx_hash)| { + runtime.block_on(async { + mempool + .track_removal_comet_bft(tx_hash, RemovalReason::Expired) + .await; + }); + }); +} + +/// Benchmarks `Mempool::check_removed_comet_bft` for a single transaction on a mempool with the +/// `comet_bft_removal_cache` filled. +/// +/// Note that the number of entries in the main cache is irrelevant here. +#[divan::bench(max_time = MAX_TIME)] +fn check_removed_comet_bft(bencher: divan::Bencher) { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + bencher + .with_inputs(|| { + let tx_hash = Sha256::digest(0_usize.to_le_bytes()).into(); + (init_mempool::(), tx_hash) + }) + .bench_values(move |(mempool, tx_hash)| { + runtime.block_on(async { + mempool.check_removed_comet_bft(tx_hash).await.unwrap(); + }); + }); +} + +/// Benchmarks `Mempool::run_maintenance` on a mempool with the given number of existing entries. +#[divan::bench( + max_time = MAX_TIME, + types = [ + mempool_with_100_txs, + mempool_with_1000_txs, + mempool_with_10000_txs, + mempool_with_100000_txs + ] +)] +fn run_maintenance(bencher: divan::Bencher) { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + // Set the new nonce so that the entire `REMOVAL_CACHE_SIZE` entries in the + // `comet_bft_removal_cache` are replaced (assuming this test case has enough txs). + // allow: this is test-only code, using small values, and where the result is not critical. + #[allow(clippy::arithmetic_side_effects, clippy::cast_possible_truncation)] + let new_nonce = (super::REMOVAL_CACHE_SIZE as u32 / u32::from(SIGNER_COUNT)) + 1; + // Although in production this getter will be hitting the state store and will be slower than + // this test one, it's probably insignificant as the getter is only called once per address, + // and we don't expect a high number of discrete addresses in the mempool entries. + let current_account_nonce_getter = |_: Address| async { Ok(new_nonce) }; + bencher + .with_inputs(|| init_mempool::()) + .bench_values(move |mempool| { + runtime.block_on(async { + mempool + .run_maintenance(current_account_nonce_getter) + .await + .unwrap(); + }); + }); +} + +/// Benchmarks `Mempool::pending_nonce` on a mempool with the given number of existing entries. +#[divan::bench( + max_time = MAX_TIME, + types = [ + mempool_with_100_txs, + mempool_with_1000_txs, + mempool_with_10000_txs, + mempool_with_100000_txs + ] +)] +fn pending_nonce(bencher: divan::Bencher) { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + bencher + .with_inputs(|| { + let address = crate::address::base_prefixed( + transactions() + .first() + .unwrap() + .verification_key() + .address_bytes(), + ); + (init_mempool::(), address) + }) + .bench_values(move |(mempool, address)| { + runtime.block_on(async { + mempool.pending_nonce(&address).await.unwrap(); + }); + }); +} diff --git a/crates/astria-sequencer/src/mempool.rs b/crates/astria-sequencer/src/mempool/mod.rs similarity index 99% rename from crates/astria-sequencer/src/mempool.rs rename to crates/astria-sequencer/src/mempool/mod.rs index a33035d65e..dcce7c3fc8 100644 --- a/crates/astria-sequencer/src/mempool.rs +++ b/crates/astria-sequencer/src/mempool/mod.rs @@ -1,3 +1,5 @@ +mod benchmarks; + use std::{ cmp::{ self, @@ -139,7 +141,7 @@ pub(crate) enum RemovalReason { FailedPrepareProposal(String), } -const TX_TTL: Duration = Duration::from_secs(600); // 10 minutes +const TX_TTL: Duration = Duration::from_secs(600); // 10 minutes const REMOVAL_CACHE_SIZE: usize = 4096; /// `RemovalCache` is used to signal to `CometBFT` that a @@ -310,10 +312,7 @@ impl Mempool { /// checks if a transaction was flagged to be removed from the `CometBFT` mempool /// and removes entry - pub(crate) async fn check_removed_comet_bft( - &mut self, - tx_hash: [u8; 32], - ) -> Option { + pub(crate) async fn check_removed_comet_bft(&self, tx_hash: [u8; 32]) -> Option { self.comet_bft_removal_cache.write().await.remove(tx_hash) } From 7cd1f5db66721005b446dc2075025aa3a3c9450b Mon Sep 17 00:00:00 2001 From: Richard Janis Goldschmidt Date: Tue, 16 Jul 2024 11:31:51 +0200 Subject: [PATCH 20/24] chore(test): use a temporary file to not pollute the workspace (#1269) ## Summary Uses a temporary file instead of writing to the workspace during smoke tests. ## Background It's bad practice to pollute the workspace when running tests. This change writes the output to a temporary file instead, and ensures that it is cleaned up after exit. ## Changes - Update the `run-smoke-cli` recipe in `charts/deploy.just` to write to a temporary generated by `mktemp` (without arguments to not run into differences between Darwin/BSD and GNU/Linux) - Use a trap to clean up the temporary on `EXIT`. ## Testing Run `just run-smoke-cli`, observe that the workspace is clean. --- charts/deploy.just | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/charts/deploy.just b/charts/deploy.just index 846bba7e29..463e3ded7a 100644 --- a/charts/deploy.just +++ b/charts/deploy.just @@ -381,7 +381,12 @@ run-smoke-cli: CURRENT_BLOCK_HEX=$(just evm-get-block-by-number latest | jq -r '.number') CURRENT_BLOCK=$(just hex-to-dec $CURRENT_BLOCK_HEX) - echo {{sequencer_bridge_pkey}} > test_se + + withdrawals_dst="$(mktemp)" + # uncomment this line if you want to inspect `withdrawals_dst` + trap "rm -f ${withdrawals_dst@Q}" EXIT + + echo "passing ${withdrawals_dst} to astria-cli" astria-cli bridge collect-withdrawals \ --rollup-endpoint {{eth_ws_url}} \ @@ -390,13 +395,12 @@ run-smoke-cli: --to-rollup-height $CURRENT_BLOCK \ --sequencer-asset-to-withdraw nria \ --bridge-address {{sequencer_bridge_address}} \ - --output ./withdrawals.json + --output "${withdrawals_dst}" astria-cli bridge submit-withdrawals \ --signing-key <(printf "%s" "{{sequencer_bridge_pkey}}") \ --sequencer-chain-id {{sequencer_chain_id}} \ --sequencer-url {{sequencer_rpc_url}} \ - --input ./withdrawals.json - + --input "${withdrawals_dst}" CHECKS=0 EXPECTED_BALANCE=$(echo "1 * {{sequencer_base_amount}}" | bc) From 6999a52d64db2e8ee5403d8c3d38891bbae59b79 Mon Sep 17 00:00:00 2001 From: jesse snyder Date: Wed, 17 Jul 2024 09:42:57 +0200 Subject: [PATCH 21/24] chore(cli): remove unused rollup cli code (#1275) ## Summary This PR removes leftover unneeded code in the cli for managing rollup deployments. ## Background Just cleaning up the repo! ## Changes - Removes `crates/astria-cli/src/cli/rollup.rs` ## Testing - Locally built and ran the cli - Ran tests ## Related Issues Some previous work done here https://github.com/astriaorg/astria/pull/1235, but this PR removes the last of the unneeded bits. --- crates/astria-cli/src/cli/rollup.rs | 323 ---------------------------- 1 file changed, 323 deletions(-) delete mode 100644 crates/astria-cli/src/cli/rollup.rs diff --git a/crates/astria-cli/src/cli/rollup.rs b/crates/astria-cli/src/cli/rollup.rs deleted file mode 100644 index 3199397e13..0000000000 --- a/crates/astria-cli/src/cli/rollup.rs +++ /dev/null @@ -1,323 +0,0 @@ -use std::str::FromStr; - -use clap::{ - Args, - Subcommand, -}; -use color_eyre::eyre; -use serde::Serialize; - -const DEFAULT_ROLLUP_CHART_PATH: &str = - "https://github.com/astriaorg/charts/releases/download/evm-rollup-0.18.4/evm-rollup-0.18.4.tgz"; -const DEFAULT_SEQUENCER_GRPC: &str = "https://grpc.sequencer.dusk-7.devnet.astria.org/"; -const DEFAULT_LOG_LEVEL: &str = "debug"; -const DEFAULT_NETWORK_ID: u64 = 1337; -const DEFAULT_EXECUTION_COMMIT_LEVEL: &str = "SoftOnly"; -const DEFAULT_HOSTNAME: &str = "localdev.me"; -const DEFAULT_NAMESPACE: &str = "astria-dev-cluster"; -const DEFAULT_ROLLUP_GENESIS_BRIDGE_ALLOWED_ASSET_DENOM: &str = "nria"; - -/// Remove the 0x prefix from a hex string if present -fn strip_0x_prefix(s: &str) -> &str { - if let Some(stripped) = s.strip_prefix("0x") { - stripped - } else { - s - } -} - -/// Manage your rollups -#[derive(Debug, Subcommand)] -pub enum Command { - /// Manage your rollup configs - Config { - #[command(subcommand)] - command: ConfigCommand, - }, - /// Manage your rollup deployments - Deployment { - #[command(subcommand)] - command: DeploymentCommand, - }, -} - -/// Commands for managing rollup configs. -// Allowing large enum size variation here because this is a CLI tool -// not huge performance critical code. -#[allow(clippy::large_enum_variant)] -#[derive(Debug, Subcommand)] -pub enum ConfigCommand { - /// Create a new rollup config - Create(ConfigCreateArgs), - /// Edit a rollup config - Edit(ConfigEditArgs), - /// Delete a rollup config - Delete(ConfigDeleteArgs), -} - -#[derive(Args, Debug, Serialize, Clone)] -pub struct ConfigCreateArgs { - #[arg(long, env = "ROLLUP_USE_TTY", default_value = "true")] - pub use_tty: bool, - #[arg(long, env = "ROLLUP_LOG_LEVEL", default_value = DEFAULT_LOG_LEVEL)] - pub log_level: String, - - // rollup config - /// The name of the rollup - #[arg(long = "rollup.name", env = "ROLLUP_NAME")] - pub name: String, - /// The Network ID for the EVM chain - #[arg(long = "rollup.network-id", env = "ROLLUP_NETWORK_ID", default_value_t = DEFAULT_NETWORK_ID)] - pub network_id: u64, - /// The Execution Commit level - #[arg( - long = "rollup.execution-commit-level", - env = "ROLLUP_EXECUTION_COMMIT_LEVEL", - default_value = DEFAULT_EXECUTION_COMMIT_LEVEL - )] - pub execution_commit_level: String, - /// Choose to allow genesis extra data override - #[arg( - long = "rollup.override-genesis-extra-data", - env = "ROLLUP_OVERRIDE_GENESIS_EXTRA_DATA" - )] - pub override_genesis_extra_data: bool, - /// Optional. If set, will be used as the bridge address. If not set, nothing happens. - #[arg(long = "rollup.bridge-address", env = "ROLLUP_BRIDGE_ADDRESS")] - pub bridge_address: Option, - /// The allowed asset denom for the bridge - #[arg( - long = "rollup.bridge-allowed-asset-denom", - env = "ROLLUP_BRIDGE_ALLOWED_ASSET_DENOM", - default_value = DEFAULT_ROLLUP_GENESIS_BRIDGE_ALLOWED_ASSET_DENOM - )] - pub bridge_allowed_asset_denom: String, - /// List of genesis accounts to fund, in the form of `address:balance` - #[arg( - long = "rollup.genesis-accounts", - env = "ROLLUP_GENESIS_ACCOUNTS", - num_args = 1.., - value_delimiter = ',' - )] - pub genesis_accounts: Vec, - // sequencer config - /// Optional. If not set, will be determined from the current block height of the sequencer - #[arg( - long = "sequencer.initial-block-height", - env = "ROLLUP_SEQUENCER_INITIAL_BLOCK_HEIGHT" - )] - #[arg(value_parser = validate_initial_block_height)] - pub sequencer_initial_block_height: Option, - /// Optional. If not set, will be default to the devnet sequencer websocket address - #[arg( - long = "sequencer.grpc", - env = "ROLLUP_SEQUENCER_GRPC", - default_value = DEFAULT_SEQUENCER_GRPC - )] - pub sequencer_grpc: String, - /// Optional. If not set, will be default to the devnet sequencer rpc address - #[arg( - long = "sequencer.rpc", - env = "ROLLUP_SEQUENCER_RPC", - default_value = crate::cli::DEFAULT_SEQUENCER_RPC - )] - pub sequencer_rpc: String, - /// The chain id of the sequencing chain being used - #[arg( - long = "sequencer.chain-id", - env = "ROLLUP_SEQUENCER_CHAIN_ID", - default_value = crate::cli::DEFAULT_SEQUENCER_CHAIN_ID - )] - pub sequencer_chain_id: String, - /// Optional. Will default to 'localdev.me' for local deployments. Will need to separately - /// configure other hosts - #[arg( - long = "hostname", - env = "ROLLUP_HOSTNAME", - default_value = DEFAULT_HOSTNAME - )] - pub hostname: String, - /// Configures the k8s namespace rollup will be deployed to - #[arg(long, env = "ROLLUP_NAMESPACE", default_value = DEFAULT_NAMESPACE)] - pub namespace: String, - /// Choose to enable the Celestia feature - #[arg(long = "celestia-node.enabled", env = "ROLLUP_ENABLE_CELESTIA_NODE")] - pub enable_celestia_node: bool, -} - -fn validate_initial_block_height(val: &str) -> Result { - match val.parse::() { - Ok(height) if height >= 2 => Ok(height), - Ok(_) => Err(String::from("the block height must be at least 2.")), - Err(e) => Err(format!("parsing to u64: {e}")), - } -} - -/// `GenesisAccountArg` is a struct that represents a genesis account to be funded. -/// It has the form of `address:balance`. -#[derive(Clone, Debug, PartialEq, Serialize)] -pub struct GenesisAccountArg { - pub address: String, - pub balance: u128, -} - -impl FromStr for GenesisAccountArg { - type Err = eyre::Report; - - /// Parse a string of the form `address:balance` into a `GenesisAccountArg`. - /// If the balance is not provided, it will default to 0. - /// - /// # Errors - /// - /// * If the address is missing - /// * If the address is empty - /// * If the address is not a valid hex string that decodes to 20 bytes - fn from_str(s: &str) -> eyre::Result { - let mut parts = s.splitn(2, ':'); - - let address = parts.next().ok_or_else(|| eyre::eyre!("Missing address"))?; - let address = strip_0x_prefix(address).to_string(); - if address.is_empty() { - return Err(eyre::eyre!("Empty address")); - } - let decoded = - hex::decode(&address).map_err(|e| eyre::eyre!("Invalid hex address: {}", e))?; - if decoded.len() != 20 { - return Err(eyre::eyre!( - "Address must be a 20-byte hex string, or 40 characters." - )); - } - - let balance_str = parts.next().unwrap_or("1000000000000000000"); - let balance = balance_str - .parse::() - .map_err(|e| eyre::eyre!("Invalid balance. Could not parse to u128: {}", e))?; - - Ok(GenesisAccountArg { - address, - balance, - }) - } -} - -#[derive(Args, Debug)] -pub struct ConfigEditArgs { - /// The filepath of the config to edit - #[arg(long = "config", env = "ROLLUP_CONFIG_PATH")] - pub(crate) config_path: String, - /// The key of the field to edit. Accepts dot notated yaml path. - pub(crate) key: String, - /// The value to set the field to - pub(crate) value: String, -} - -#[derive(Args, Debug)] -pub struct ConfigDeleteArgs { - /// The filepath of the config to delete - #[arg(long = "config", env = "ROLLUP_CONFIG_PATH")] - pub(crate) config_path: String, -} - -#[derive(Debug, Subcommand)] -pub enum DeploymentCommand { - /// Deploy a rollup - Create(DeploymentCreateArgs), - /// Delete a rollup - Delete(DeploymentDeleteArgs), - /// List all deployed rollups - List, -} - -#[derive(Args, Debug, Serialize)] -pub struct DeploymentCreateArgs { - /// Filepath of the config to deploy - #[arg(long = "config", env = "ROLLUP_CONFIG_PATH")] - pub(crate) config_path: String, - /// Optional path to a rollup chart that can override the default remote helm chart - #[arg( - long, - env = "ROLLUP_CHART_PATH", - default_value = DEFAULT_ROLLUP_CHART_PATH - )] - pub(crate) chart_path: String, - /// Set if you want to do a dry run of the deployment - #[arg(long, env = "ROLLUP_DRY_RUN", default_value = "false")] - pub(crate) dry_run: bool, - // TODO: https://github.com/astriaorg/astria/issues/594 - // Don't use a plain text private, prefer wrapper like from - // the secrecy crate with specialized `Debug` and `Drop` implementations - // that overwrite the key on drop and don't reveal it when printing. - /// Faucet private key - #[arg(long, env = "ROLLUP_FAUCET_PRIVATE_KEY")] - pub(crate) faucet_private_key: String, - /// Sequencer private key - // TODO: https://github.com/astriaorg/astria/issues/594 - // Don't use a plain text private, prefer wrapper like from - // the secrecy crate with specialized `Debug` and `Drop` implementations - // that overwrite the key on drop and don't reveal it when printing. - #[arg(long, env = "ROLLUP_SEQUENCER_PRIVATE_KEY")] - pub(crate) sequencer_private_key: String, - /// Set if you want to see all k8s resources created by the deployment - /// Set if you want to do a dry run of the deployment - #[arg(long, env = "ROLLUP_DEBUG_DEPLOY", default_value = "false")] - pub(crate) debug: bool, -} - -#[derive(Args, Debug)] -pub struct DeploymentDeleteArgs { - /// The filepath of the target deployment's config - #[arg(long = "config")] - pub(crate) config_path: String, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_genesis_account_arg_from_str_with_balance() { - let input = "0xaC21B97d35Bf75A7dAb16f35b111a50e78A72F30:1000"; - let expected = GenesisAccountArg { - address: "aC21B97d35Bf75A7dAb16f35b111a50e78A72F30".to_string(), - balance: 1000, - }; - let result: GenesisAccountArg = input.parse().unwrap(); - assert_eq!(result, expected); - } - - #[test] - fn test_genesis_account_arg_from_str_without_balance() { - let input = "0xaC21B97d35Bf75A7dAb16f35b111a50e78A72F30"; - let expected = GenesisAccountArg { - address: "aC21B97d35Bf75A7dAb16f35b111a50e78A72F30".to_string(), - balance: 1_000_000_000_000_000_000, - }; - let result: GenesisAccountArg = input.parse().unwrap(); - assert_eq!(result, expected); - } - - #[test] - fn test_genesis_account_arg_from_str_invalid_balance() { - let input = "0xaC21B97d35Bf75A7dAb16f35b111a50e78A72F30:invalid_balance"; - let result: Result = input.parse(); - assert!(result.is_err()); - } - - #[test] - fn test_genesis_account_arg_from_str_invalid_address() { - let input = "0x1234abcd:1000"; - let result: Result = input.parse(); - assert!(result.is_err()); - } - - #[test] - fn test_genesis_account_arg_from_str_no_0x_prefix() { - let input = "aC21B97d35Bf75A7dAb16f35b111a50e78A72F30:1000"; - let expected = GenesisAccountArg { - address: "aC21B97d35Bf75A7dAb16f35b111a50e78A72F30".to_string(), - balance: 1000, - }; - let result: GenesisAccountArg = input.parse().unwrap(); - assert_eq!(result, expected); - } -} From bd34b3839c370bfc116c4eff639f094518759c28 Mon Sep 17 00:00:00 2001 From: jesse snyder Date: Wed, 17 Jul 2024 10:10:19 +0200 Subject: [PATCH 22/24] feature(charts): hermes chart fixes, bech32 updates, ibc bridge test (#1130) ## Summary Fixes hermes helm chart ## Background We want to run hermes via helm chart so we can automate e2e testing. ## Changes * add 2nd funded celestia account * use this account's key in hermes * fix hermes chart values * add new chart `bridge-test` * add new justfile `dev/bridgetester.just` * add `AstriaSequencerHrpPrefix` to evm rollup geth-genesis ## Testing - `just deploy-cluster` - `just deploy-ibc-test-infra` - deploys ingress controller - deploys celestia-local - deploys single astria sequencer - `./charts/sequencer/values.yaml` - `./dev/values/validators/single.yaml` - deploys evm rollup - `./charts/evm-rollup/values.yaml` - `./dev/values/rollup/dev.yaml` - `./dev/values/rollup/ibc-bridge-test.yaml` - deploy hermes - `./charts/hermes/values.yaml` - `./dev/values/hermes/local.yml` - `just deploy-bridge-tester` - initContainer handles initializing bridge account on the sequencer - container runs scripts - `celestia-appd keys add` - `celestia-appd tx ibc-transfer transfer` - checks balance before and after and fails if ibc-transfer did not go through ## Metrics - List out metrics added by PR, delete section if none. ## Breaking Changelist - Bulleted list of breaking changes, any notes on migration. Delete section if none. ## Related Issues Link any issues that are related, prefer full github links. closes --- .github/workflows/docker-build.yml | 48 ++++++++++++- charts/README.md | 6 +- charts/bridge-test/.helmignore | 23 ++++++ charts/bridge-test/Chart.yaml | 28 ++++++++ .../files/scripts/test-ibc-transfer.sh | 58 +++++++++++++++ charts/bridge-test/templates/_helpers.tpl | 23 ++++++ charts/bridge-test/templates/configmap.yaml | 26 +++++++ charts/bridge-test/templates/job.yaml | 41 +++++++++++ charts/bridge-test/values.yaml | 38 ++++++++++ charts/celestia-local/Chart.yaml | 2 +- .../files/scripts/init-celestia-appd.sh | 24 +++++++ .../files/scripts/start-celestia-appd.sh | 1 + .../celestia-local/templates/configmap.yaml | 4 ++ .../celestia-local/templates/deployment.yaml | 2 + charts/celestia-local/templates/ingress.yaml | 13 ++++ charts/celestia-local/templates/service.yaml | 3 + charts/celestia-local/values.yaml | 11 ++- charts/deploy.just | 2 +- charts/evm-rollup/Chart.yaml | 4 +- charts/hermes/Chart.yaml | 4 +- charts/hermes/files/config.toml | 1 - charts/hermes/templates/deployment.yaml | 5 ++ charts/hermes/values.yaml | 4 +- dev/bridgetester.just | 70 +++++++++++++++++++ .../bridgetesterutility.Dockerfile | 44 ++++++++++++ dev/values/hermes/{local.yml => local.yaml} | 45 +++++++----- dev/values/rollup/ibc-bridge-test.yaml | 10 +++ dev/values/validators/all.yml | 3 + justfile | 1 + 29 files changed, 515 insertions(+), 29 deletions(-) create mode 100644 charts/bridge-test/.helmignore create mode 100644 charts/bridge-test/Chart.yaml create mode 100644 charts/bridge-test/files/scripts/test-ibc-transfer.sh create mode 100644 charts/bridge-test/templates/_helpers.tpl create mode 100644 charts/bridge-test/templates/configmap.yaml create mode 100644 charts/bridge-test/templates/job.yaml create mode 100644 charts/bridge-test/values.yaml create mode 100644 dev/bridgetester.just create mode 100644 dev/containerfiles/bridgetesterutility.Dockerfile rename dev/values/hermes/{local.yml => local.yaml} (63%) create mode 100644 dev/values/rollup/ibc-bridge-test.yaml diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 886c629b1d..2f6a00a918 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -26,7 +26,7 @@ on: - "**-v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+" - "**-v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+" - "**-v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+" - + pull_request: types: - opened @@ -179,6 +179,52 @@ jobs: timeout-minutes: 3 run: just run-smoke-cli + ibc-bridge-test: + needs: [ run_checker, composer, conductor, sequencer, sequencer-relayer ] + if: (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'astriaorg/astria') && (github.event_name == 'merge_group' || needs.run_checker.outputs.run_docker == 'true') + runs-on: buildjet-8vcpu-ubuntu-2204 + steps: + - uses: actions/checkout@v4 + - name: Install just + uses: taiki-e/install-action@just + - name: Install kind + uses: helm/kind-action@v1 + with: + install_only: true + - name: Log in to GHCR + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Setup IBC Bridge Test Environment + timeout-minutes: 5 + run: | + TAG=sha-$(git rev-parse --short HEAD) + just deploy cluster + kubectl create secret generic regcred --from-file=.dockerconfigjson=$HOME/.docker/config.json --type=kubernetes.io/dockerconfigjson + echo -e "\n\nDeploying with astria images tagged $TAG" + just deploy ibc-test-infra $TAG + just build-and-load-bridge-tester-image $TAG + - name: Run IBC Bridge Tester + timeout-minutes: 2 + run: | + TAG=sha-$(git rev-parse --short HEAD) + printlogs() { + echo "IBC Transfer Test failed, printing logs..." + kubectl describe job bridge-tester-chart -n astria-dev-cluster + kubectl logs job/bridge-tester-chart --all-containers -n astria-dev-cluster + exit 1 + } + just deploy bridge-tester $TAG + # timeout before the gh job times out so we can print logs + kubectl wait --for=condition=complete --timeout=90s job/bridge-tester-chart -n astria-dev-cluster || printlogs + JOB_STATUS=$(kubectl get job bridge-tester-chart -n astria-dev-cluster -o jsonpath='{.status.succeeded}') + if [ "$JOB_STATUS" != "1" ]; then + printlogs + else + echo "IBC Transfer Test passed" + fi docker: if: ${{ always() && !cancelled() }} diff --git a/charts/README.md b/charts/README.md index ced4dedb97..e2bafbf0e5 100644 --- a/charts/README.md +++ b/charts/README.md @@ -148,7 +148,7 @@ to `local`. You can now deploy the chart with your local image. ### Dev vs Prod -All of our charts should run against both the lastest code in monorepo AND +All of our charts should run against both the latest code in monorepo AND against the latest release. Sometimes, there are configuration changes between releases though. To manage this in various templates you will see the following pattern (especially in config maps and genesis files): @@ -183,8 +183,8 @@ To deploy and run this: ## Examining Deployments [k9s](https://k9scli.io/) is a useful utility for inspecting deployed -containers, logs and services. Additionally you may interact directly with the -kubernetes API some helpful commands below. +containers, logs and services. Additionally, you may interact directly with the +kubernetes API using some helpful commands below. ### Helpful commands diff --git a/charts/bridge-test/.helmignore b/charts/bridge-test/.helmignore new file mode 100644 index 0000000000..0e8a0eb36f --- /dev/null +++ b/charts/bridge-test/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/charts/bridge-test/Chart.yaml b/charts/bridge-test/Chart.yaml new file mode 100644 index 0000000000..7beb394e73 --- /dev/null +++ b/charts/bridge-test/Chart.yaml @@ -0,0 +1,28 @@ +apiVersion: v2 +name: bridge-test +description: A Helm chart for Kubernetes + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +# It is recommended to use it with quotes. +appVersion: "0.1.0" + +maintainers: + - name: steezeburger + url: astria.org diff --git a/charts/bridge-test/files/scripts/test-ibc-transfer.sh b/charts/bridge-test/files/scripts/test-ibc-transfer.sh new file mode 100644 index 0000000000..d3e836ba95 --- /dev/null +++ b/charts/bridge-test/files/scripts/test-ibc-transfer.sh @@ -0,0 +1,58 @@ +#!/bin/sh + +get_evm_balance() { + HEX_NUM=$(curl -X POST "$evm_url" -s -d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getBalance\",\"params\":[\"$evm_to_address\", \"latest\"],\"id\":1}" -H 'Content-Type: application/json' | jq -r '.result') + # strip 0x + HEX_NUM=$(echo "$HEX_NUM" | sed 's/^0x//') + # capitalize all lowercase letters + HEX_NUM=$(echo "$HEX_NUM" | tr '[:lower:]' '[:upper:]') + # print as integer + echo "ibase=16; $HEX_NUM" | bc +} + +addKeyForCelestiaAccount() { + # add key for the celestia dev account using the mnemonic + echo "Adding key for the celestia dev account..." + echo "$celestia_dev_account_mnemonic" | celestia-appd keys add \ + "$celestia_dev_account_key_name" \ + --home "$home_dir" \ + --keyring-backend="$keyring_backend" \ + --recover +} + +performIBCTransfer() { + # perform ibc transfer + echo "Performing IBC transfer..." + celestia-appd tx ibc-transfer transfer \ + transfer \ + channel-0 \ + "$bridge_account_address_bech32" \ + 53000utia \ + --memo="{\"rollupAddress\":\"$evm_to_address\"}" \ + --chain-id="$celestia_chain_id" \ + --node="$celestia_node_url" \ + --from="$celestia_dev_account_address" \ + --fees=26000utia \ + --yes \ + --log_level=debug \ + --home "$home_dir" \ + --keyring-backend="$keyring_backend" +} + +initial_balance=$(get_evm_balance) + +addKeyForCelestiaAccount +performIBCTransfer + +# FIXME - should probably poll w/ timeout instead of sleeping? +sleep 30 + +final_balance=$(get_evm_balance) +expected_balance=$(echo "$initial_balance + 53000000000000000" | bc) +if [ "$(echo "$final_balance == $expected_balance" | bc)" -eq 0 ]; then + echo "IBC Transfer failed!" + echo "Expected balance $expected_balance, got $final_balance" + exit 1 +else + echo "IBC Transfer successful!" +fi diff --git a/charts/bridge-test/templates/_helpers.tpl b/charts/bridge-test/templates/_helpers.tpl new file mode 100644 index 0000000000..ad997565bb --- /dev/null +++ b/charts/bridge-test/templates/_helpers.tpl @@ -0,0 +1,23 @@ +{{/* +Namespace to deploy elements into. +*/}} +{{- define "bridge-test.namespace" -}} +{{- default .Release.Namespace .Values.namespaceOverride | trunc 63 | trimSuffix "-" -}} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +*/}} +{{- define "bridge-test.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} diff --git a/charts/bridge-test/templates/configmap.yaml b/charts/bridge-test/templates/configmap.yaml new file mode 100644 index 0000000000..c31803c370 --- /dev/null +++ b/charts/bridge-test/templates/configmap.yaml @@ -0,0 +1,26 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: bridge-test-env + namespace: {{ include "bridge-test.namespace" . }} +data: + home_dir: "/home/celestia" + keyring_backend: "test" + bridge_account_address: "{{ .Values.bridgeAccount.address }}" + bridge_account_address_bech32: "{{ .Values.bridgeAccount.bech32 }}" + evm_to_address: "{{ .Values.evmToAddress }}" + evm_url: "{{ .Values.evmURL }}" + celestia_chain_id: "{{ .Values.celestiaChainID }}" + celestia_node_url: "{{ .Values.celestiaNodeURL }}" + celestia_dev_account_address: "{{ .Values.celestiaDevAccount.address }}" + celestia_dev_account_mnemonic: "{{ .Values.celestiaDevAccount.mnemonic }}" + celestia_dev_account_key_name: "{{ .Values.celestiaDevAccount.name }}" +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: bridge-test-scripts + namespace: {{ include "bridge-test.namespace" . }} +data: + test-ibc-transfer.sh: | + {{- .Files.Get "files/scripts/test-ibc-transfer.sh" | nindent 4 }} diff --git a/charts/bridge-test/templates/job.yaml b/charts/bridge-test/templates/job.yaml new file mode 100644 index 0000000000..391e47288d --- /dev/null +++ b/charts/bridge-test/templates/job.yaml @@ -0,0 +1,41 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: {{ include "bridge-test.fullname" . }} + labels: + app: astria-dev-cluster + namespace: {{ include "bridge-test.namespace" . }} +spec: + template: + metadata: + name: {{ include "bridge-test.fullname" . }} + labels: + app: astria-dev-cluster + spec: + initContainers: + - name: init-bridge-account + image: {{ .Values.bridgeTesterUtilityImage }} + command: [ "astria-go", "sequencer", "bridge", "init", {{ .Values.evmRollupName }} ] + args: + - --privkey={{ .Values.bridgeAccount.privkey }} + - --sequencer-chain-id={{ .Values.sequencerChainId }} + - --sequencer-url={{ .Values.sequencerURL }} + - --asset={{ .Values.asset }} + - --fee-asset={{ .Values.feeAsset }} + - --log-level=debug + containers: + - name: test-ibc-transfer + image: {{ .Values.bridgeTesterUtilityImage }} + command: [ "/scripts/test-ibc-transfer.sh" ] + volumeMounts: + - mountPath: /scripts/ + name: bridge-test-scripts-volume + envFrom: + - configMapRef: + name: bridge-test-env + volumes: + - name: bridge-test-scripts-volume + configMap: + name: bridge-test-scripts + defaultMode: 0777 + restartPolicy: Never diff --git a/charts/bridge-test/values.yaml b/charts/bridge-test/values.yaml new file mode 100644 index 0000000000..66b0dc9545 --- /dev/null +++ b/charts/bridge-test/values.yaml @@ -0,0 +1,38 @@ +replicaCount: 1 + +# this image is overridden in ci/cd with the image built in ci/cd +bridgeTesterUtilityImage: ghcr.io/astriaorg/bridge-tester-utility:local + +imagePullSecrets: [] +nameOverride: "" +namespaceOverride: "" +fullnameOverride: "" + +# must match rollup name used in evm rollup chart values +evmRollupName: "astria" +# this is a shared dev address +evmToAddress: "0xaC21B97d35Bf75A7dAb16f35b111a50e78A72F30" +# evm execution api url +evmURL: "http://astria-evm-service.astria-dev-cluster.svc.cluster.local:8545" + +sequencerURL: "http://node0-sequencer-rpc-service.astria-dev-cluster.svc.cluster.local:26657" +# must match chain id used in sequencer chart values +sequencerChainId: "sequencer-test-chain-0" +asset: "transfer/channel-0/utia" +feeAsset: "nria" + +# sequencer bridge account. is funded during sequencer genesis. +bridgeAccount: + address: "6f85297e587b61b37695a1ac17189b3e907e318e" + bech32: "astria1d7zjjljc0dsmxa545xkpwxym86g8uvvwhtezcr" + privkey: "6015fbe1c365d3c5ef92dc891db8c5bb26ad454bec2db4762b96e9f8b2430285" + pubkey: "b78aa61c65f21e5fe0f31d221819053fa2286dd6eff83fc490e3ee746f144626" + +celestiaChainID: "celestia-local-0" +celestiaNodeURL: "http://celestia-app-service.astria-dev-cluster.svc.cluster.local:26657" + +# this account should be funded during celestia genesis +celestiaDevAccount: + name: "dev-account" + address: "celestia1m0ksdjl2p5nzhqy3p47fksv52at3ln885xvl96" + mnemonic: "enrich avocado local net will avoid dizzy truth column excuse ready lesson" diff --git a/charts/celestia-local/Chart.yaml b/charts/celestia-local/Chart.yaml index 244995a093..25b27f7bea 100644 --- a/charts/celestia-local/Chart.yaml +++ b/charts/celestia-local/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.4.0 +version: 0.5.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/celestia-local/files/scripts/init-celestia-appd.sh b/charts/celestia-local/files/scripts/init-celestia-appd.sh index 7b9402651f..4f0a427a15 100755 --- a/charts/celestia-local/files/scripts/init-celestia-appd.sh +++ b/charts/celestia-local/files/scripts/init-celestia-appd.sh @@ -29,6 +29,30 @@ celestia-appd gentx \ --chain-id "$chainid" \ --home "$home_dir" +# add ibc account +echo "$ibc_account_mnemonic" | celestia-appd keys add \ + "$ibc_account_key_name" \ + --home "$home_dir" \ + --keyring-backend="$keyring_backend" \ + --recover +ibc_account_key=$(celestia-appd keys show "$ibc_account_key_name" -a --keyring-backend="$keyring_backend" --home "$home_dir") +celestia-appd add-genesis-account \ + "$ibc_account_key" \ + --home "$home_dir" \ + "$coins" + +# add account used for development and testing +echo "$dev_account_mnemonic" | celestia-appd keys add \ + "$dev_account_key_name" \ + --home "$home_dir" \ + --keyring-backend="$keyring_backend" \ + --recover +dev_account_key=$(celestia-appd keys show "$dev_account_key_name" -a --keyring-backend="$keyring_backend" --home "$home_dir") +celestia-appd add-genesis-account \ + "$dev_account_key" \ + --home "$home_dir" \ + "$coins" + celestia-appd collect-gentxs --home "$home_dir" # Enable transaction indexing diff --git a/charts/celestia-local/files/scripts/start-celestia-appd.sh b/charts/celestia-local/files/scripts/start-celestia-appd.sh index e3d8a82c3b..334e39156c 100755 --- a/charts/celestia-local/files/scripts/start-celestia-appd.sh +++ b/charts/celestia-local/files/scripts/start-celestia-appd.sh @@ -26,5 +26,6 @@ exec celestia-appd start --home "${home_dir}" \ --grpc.address "0.0.0.0:$celestia_app_grpc_port" \ --rpc.laddr "tcp://0.0.0.0:$celestia_app_host_port" \ --api.enable \ + --api.enabled-unsafe-cors \ --grpc.enable \ --grpc-web.enable diff --git a/charts/celestia-local/templates/configmap.yaml b/charts/celestia-local/templates/configmap.yaml index 174f92ee3e..c1630791d6 100644 --- a/charts/celestia-local/templates/configmap.yaml +++ b/charts/celestia-local/templates/configmap.yaml @@ -16,6 +16,10 @@ data: bridge_rpc_port: "{{ .Values.ports.bridgeRPC }}" celestia_app_host_port: "{{ .Values.ports.celestiaAppHostPort }}" celestia_app_grpc_port: "{{ .Values.ports.celestiaAppGrpcPort }}" + ibc_account_mnemonic: "{{ .Values.ibcAccountMnemonic }}" + ibc_account_key_name: "{{ .Values.ibcAccountKeyName }}" + dev_account_mnemonic: "{{ .Values.devAccountMnemonic }}" + dev_account_key_name: "{{ .Values.devAccountName }}" --- apiVersion: v1 kind: ConfigMap diff --git a/charts/celestia-local/templates/deployment.yaml b/charts/celestia-local/templates/deployment.yaml index 91b740cc4d..42fb310e23 100644 --- a/charts/celestia-local/templates/deployment.yaml +++ b/charts/celestia-local/templates/deployment.yaml @@ -63,6 +63,8 @@ spec: name: app-rpc - containerPort: {{ .Values.ports.celestiaAppGrpcPort }} name: app-grpc + - containerPort: {{ .Values.ports.celestiaAppRestPort }} + name: app-rest - name: celestia-bridge command: [ "/scripts/start-bridge.sh" ] image: {{ .Values.celestiaNodeImage }} diff --git a/charts/celestia-local/templates/ingress.yaml b/charts/celestia-local/templates/ingress.yaml index 72ed4a93f1..978a3fd8d8 100644 --- a/charts/celestia-local/templates/ingress.yaml +++ b/charts/celestia-local/templates/ingress.yaml @@ -6,6 +6,9 @@ metadata: labels: app: celestia-local-network annotations: + nginx.ingress.kubernetes.io/enable-cors: "true" + # allow requests from bridge web app + nginx.ingress.kubernetes.io/cors-allow-origin: "http://localhost:3000" nginx.ingress.kubernetes.io/proxy-read-timeout: "3600" nginx.ingress.kubernetes.io/proxy-send-timeout: "3600" spec: @@ -51,3 +54,13 @@ spec: name: celestia-app-service port: name: app-rpc + - host: rest.app.celestia.localdev.me + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: celestia-app-service + port: + name: app-rest diff --git a/charts/celestia-local/templates/service.yaml b/charts/celestia-local/templates/service.yaml index dff906974a..50d52107f5 100644 --- a/charts/celestia-local/templates/service.yaml +++ b/charts/celestia-local/templates/service.yaml @@ -29,3 +29,6 @@ spec: - name: app-grpc port: {{ .Values.ports.celestiaAppGrpcPort }} targetPort: app-grpc + - name: app-rest + port: {{ .Values.ports.celestiaAppRestPort }} + targetPort: app-rest diff --git a/charts/celestia-local/values.yaml b/charts/celestia-local/values.yaml index 396e81778f..c98d2840e8 100644 --- a/charts/celestia-local/values.yaml +++ b/charts/celestia-local/values.yaml @@ -15,7 +15,7 @@ storage: persistentVolumeName: "celestia-shared-storage" path: "/data/celestia-data" -celestiaAppImage: "ghcr.io/celestiaorg/celestia-app:v1.8.0" +celestiaAppImage: "ghcr.io/celestiaorg/celestia-app:v1.9.0" celestiaNodeImage: "ghcr.io/celestiaorg/celestia-node:v0.13.4" podSecurityContext: @@ -37,10 +37,19 @@ validatorMnemonic: connect soon random stable toddler tired glove drastic comfor coins: "10000000000000000000000000utia" # Staking amount validatorStake: "5000000000utia" +# ibc account +ibcAccountKeyName: "ibc-account" +# mnemonic for ibc account, so we can add its key and get them funded during genesis +ibcAccountMnemonic: globe elegant people render embark whisper pumpkin local thing shallow front surround domain wave drill peasant duck midnight call slight ball segment token will +# a dev account to aid in development and testing. can use mnemonic to import into Keplr +# address celestia1m0ksdjl2p5nzhqy3p47fksv52at3ln885xvl96 +devAccountName: "dev-account" +devAccountMnemonic: enrich avocado local net will avoid dizzy truth column excuse ready lesson # Default service ports ports: celestiaAppHostPort: 26657 celestiaAppGrpcPort: 9090 + celestiaAppRestPort: 1317 bridgeRPC: 26658 bridgeHTTP: 26659 diff --git a/charts/deploy.just b/charts/deploy.just index 463e3ded7a..e00aaff400 100644 --- a/charts/deploy.just +++ b/charts/deploy.just @@ -82,7 +82,7 @@ deploy-sequencers: (deploy-sequencer "node0") (deploy-sequencer "node1") (deploy deploy-hermes-local: helm install hermes-local-chart ./charts/hermes \ -n astria-dev-cluster \ - -f dev/values/hermes/local.yml + -f dev/values/hermes/local.yaml delete-hermes-local: @just delete chart hermes-local diff --git a/charts/evm-rollup/Chart.yaml b/charts/evm-rollup/Chart.yaml index c0d1b8ccbb..3655046926 100644 --- a/charts/evm-rollup/Chart.yaml +++ b/charts/evm-rollup/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.23.1 +version: 0.23.2 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to @@ -34,5 +34,7 @@ maintainers: url: astria.org - name: quasystaty1 url: astria.org + - name: steezeburger + url: astria.org - name: joroshiba url: astria.org diff --git a/charts/hermes/Chart.yaml b/charts/hermes/Chart.yaml index c25069bfc6..5f27586f67 100644 --- a/charts/hermes/Chart.yaml +++ b/charts/hermes/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.3.0 +version: 0.4.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to @@ -26,5 +26,7 @@ appVersion: "1.8.2" maintainers: - name: wafflesvonmaple url: astria.org + - name: steezeburger + url: astria.org - name: joroshiba url: astria.org diff --git a/charts/hermes/files/config.toml b/charts/hermes/files/config.toml index 84bac59ace..cdc9a90ce2 100644 --- a/charts/hermes/files/config.toml +++ b/charts/hermes/files/config.toml @@ -57,7 +57,6 @@ account_prefix = '{{ $chain.accountPrefix }}' key_name = '{{ $chain.key.name }}' store_prefix = '{{ $chain.storePrefix }}' ccv_consumer_chain = {{ default false $chain.ccvConsumerChain }} -ccv_consumer_chain = {{ default false $chain.trustedNode }} {{- if eq $chain.eventSource.mode "push" }} event_source = { mode = 'push', url = '{{ $chain.eventSource.url }}', batch_delay='{{ $chain.eventSource.batchDelay }}' } {{- else if eq $chain.eventSource.mode "pull" }} diff --git a/charts/hermes/templates/deployment.yaml b/charts/hermes/templates/deployment.yaml index 90a082c54b..b3452a1b8f 100644 --- a/charts/hermes/templates/deployment.yaml +++ b/charts/hermes/templates/deployment.yaml @@ -54,6 +54,11 @@ spec: name: telemetry - containerPort: {{ .Values.ports.tracingServer }} name: tracing + # TODO - implement a readiness probe so we can wait on hermes startup correctly +{{/* readinessProbe:*/}} +{{/* httpGet:*/}} +{{/* path: /channels*/}} +{{/* port: {{ .Values.ports.rest }}*/}} volumes: - name: {{ include "hermes.fullname" . }}-config-volume configMap: diff --git a/charts/hermes/values.yaml b/charts/hermes/values.yaml index 73e5bc4bec..6da9886614 100644 --- a/charts/hermes/values.yaml +++ b/charts/hermes/values.yaml @@ -13,7 +13,7 @@ nameOverride: "" # the destination chain assets createChannel: enabled: false - chainA: '' # For local test is equencer-test-chain-0 + chainA: '' # For local test is sequencer-test-chain-0 portA: '' # likely "transfer" chainB: '' # For default local test celestia-local-0 portB: '' # likely "transfer" @@ -96,7 +96,7 @@ chains: # secret: # resourceName: projects/$PROJECT_ID/secrets/celestia-ibc-relay-key/versions/latest # gasPrice: 0.12 - # denom: 'utia' + # gasDenom: 'utia' # gasMultiplier: 1.1 # clockDrift: 20s # maxBlockTime: 10s diff --git a/dev/bridgetester.just b/dev/bridgetester.just new file mode 100644 index 0000000000..0396c84dba --- /dev/null +++ b/dev/bridgetester.just @@ -0,0 +1,70 @@ +# deploy infra for an ibc bridge smoke test +@deploy-ibc-test-infra tag=defaultTag: + echo "Deploying ingress controller..." && just deploy-ingress-controller > /dev/null + just wait-for-ingress-controller > /dev/null + echo "Deploying local celestia instance..." && just deploy celestia-local > /dev/null + helm dependency update ./charts/sequencer > /dev/null + helm dependency update ./charts/evm-rollup > /dev/null + echo "Setting up single astria sequencer..." && helm install \ + -n astria-validator-single single-sequencer-chart charts/sequencer \ + -f ./dev/values/validators/all.yml \ + -f ./dev/values/validators/single.yml \ + {{ if tag != '' { replace('--set images.sequencer.devTag=# --set sequencer-relayer.images.sequencerRelayer.devTag=#', '#', tag) } else { '' } }} \ + --create-namespace > /dev/null + just wait-for-sequencer > /dev/null + echo "Starting EVM rollup..." && helm install -n astria-dev-cluster astria-chain-chart ./charts/evm-rollup \ + -f ./dev/values/rollup/dev.yaml \ + -f ./dev/values/rollup/ibc-bridge-test.yaml \ + {{ if tag != '' { replace('--set images.conductor.devTag=# --set images.composer.devTag=#', '#', tag) } else { '' } }} \ + --set config.blockscout.enabled=false \ + --set config.faucet.enabled=false > /dev/null + just wait-for-dev-rollup > /dev/null + echo "Deploying Hermes and creating IBC channel..." + just deploy hermes-local > /dev/null + kubectl wait -n astria-dev-cluster deployment hermes-local-chart --for=condition=Available=True --timeout=300s + +# delete infra used for the ibc bridge test +delete-ibc-test-infra: + -just delete celestia-local + -just delete sequencer + -just delete rollup + -just delete hermes-local + -just delete bridge-tester + +# deploy a bridge tester chart that runs a job to test the ibc bridge +deploy-bridge-tester tag='local' namespace=defaultNamespace: + helm install --debug bridge-tester-chart ./charts/bridge-test \ + --namespace {{namespace}} \ + --set bridgeTesterUtilityImage=ghcr.io/astriaorg/bridge-tester-utility:{{tag}} + +# delete the bridge tester release +delete-bridge-tester namespace=defaultNamespace: + helm uninstall bridge-tester-chart --namespace {{namespace}} + +########### +# helpers # +########### + +# NOTE - can't build for darwin because base images don't support it +default_target_platform := 'linux/amd64' + +# build bridge tester utility image, load into cluster +build-and-load-bridge-tester-image tag='local' target_platform=default_target_platform: + docker buildx build \ + --load \ + --platform {{target_platform}} \ + -f dev/containerfiles/bridgetesterutility.Dockerfile \ + -t ghcr.io/astriaorg/bridge-tester-utility:{{tag}} . + just load-image ghcr.io/astriaorg/bridge-tester-utility:{{tag}} + +# build the astria-geth image and load into cluster. NOTE - assumes astria-geth is sibling directory to monorepo +build-and-load-astria-geth: + @echo "building astria-geth local docker image..." + cd ../astria-geth && docker buildx build -f Dockerfile -t ghcr.io/astriaorg/astria-geth:local . + just load-image ghcr.io/astriaorg/astria-geth:local + +# load astria-go and astria-geth images into cluster +load-images: + #just load-image ghcr.io/astriaorg/astria-go:local + just load-image ghcr.io/astriaorg/bridge-tester-utility:local + just load-image ghcr.io/astriaorg/astria-geth:local diff --git a/dev/containerfiles/bridgetesterutility.Dockerfile b/dev/containerfiles/bridgetesterutility.Dockerfile new file mode 100644 index 0000000000..83d755ebd3 --- /dev/null +++ b/dev/containerfiles/bridgetesterutility.Dockerfile @@ -0,0 +1,44 @@ +FROM ubuntu:22.04 + +# This is a utility image for testing the bridge. +# It contains the Celestia app and the Astria CLI, plus some bash utilities for testing. + +# dependencies needed for testing +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + bc \ + jq \ + sed \ + ca-certificates \ + coreutils \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /home + +ARG CELESTIA_VERSION=v1.9.0 +ARG ASTRIA_VERSION=nightly-2024-07-10 + +# download architecture-specific binaries +ARG TARGETPLATFORM +RUN echo "TARGETPLATFORM: $TARGETPLATFORM" +RUN if [ "$TARGETPLATFORM" = "darwin/arm64" ]; then \ + curl -L "https://github.com/celestiaorg/celestia-app/releases/download/$CELESTIA_VERSION/celestia-app_Darwin_arm64.tar.gz" -o celestia-appd.tar.gz; \ + curl -L "https://github.com/astriaorg/astria-cli-go/releases/download/$ASTRIA_VERSION/astria-go-$ASTRIA_VERSION-darwin-arm64.tar.gz" -o astria-go.tar.gz; \ + elif [ "$TARGETPLATFORM" = "darwin/amd64" ]; then \ + curl -L "https://github.com/celestiaorg/celestia-app/releases/download/$CELESTIA_VERSION/celestia-app_Darwin_x86_64.tar.gz" -o celestia-appd.tar.gz; \ + curl -L "https://github.com/astriaorg/astria-cli-go/releases/download/$ASTRIA_VERSION/astria-go-$ASTRIA_VERSION-darwin-amd64.tar.gz" -o astria-go.tar.gz; \ + elif [ "$TARGETPLATFORM" = "linux/amd64" ]; then \ + curl -L "https://github.com/celestiaorg/celestia-app/releases/download/$CELESTIA_VERSION/celestia-app_Linux_x86_64.tar.gz" -o celestia-appd.tar.gz; \ + curl -L "https://github.com/astriaorg/astria-cli-go/releases/download/$ASTRIA_VERSION/astria-go-$ASTRIA_VERSION-linux-amd64.tar.gz" -o astria-go.tar.gz; \ + else \ + echo "Unsupported architecture"; \ + echo "TARGETPLATFORM: $TARGETPLATFORM"; \ + exit 1; \ + fi + +# untar and move to bin +RUN tar -xzvf celestia-appd.tar.gz && mv celestia-appd /usr/local/bin/celestia-appd && \ + tar -xzvf astria-go.tar.gz && mv astria-go /usr/local/bin/astria-go && \ + chmod +x /usr/local/bin/celestia-appd /usr/local/bin/astria-go + +CMD ["echo", "This is the bridge tester utility image!"] diff --git a/dev/values/hermes/local.yml b/dev/values/hermes/local.yaml similarity index 63% rename from dev/values/hermes/local.yml rename to dev/values/hermes/local.yaml index c3c4d930e8..f9d7d0f789 100644 --- a/dev/values/hermes/local.yml +++ b/dev/values/hermes/local.yaml @@ -1,3 +1,5 @@ +image: "ghcr.io/astriaorg/hermes:0.2.0" + createChannel: enabled: true chainA: sequencer-test-chain-0 @@ -5,14 +7,18 @@ createChannel: chainB: celestia-local-0 portB: transfer +secretProvider: + enabled: false + chains: sequencer-test-chain-0: type: Astria rpcAddr: http://node0-sequencer-rpc-service.astria-dev-cluster.svc.cluster.local:26657 grpcAddr: http://node0-sequencer-grpc-service.astria-dev-cluster.svc.cluster.local:8080 - accountPrefix: not_used rpcTimeout: 15s - storePrefix: ibc_data + ccvConsumerChain: false + accountPrefix: not_used + storePrefix: ibc-data key: name: astria-wallet devContent: @@ -28,37 +34,42 @@ chains: gasPrice: 1 gasDenom: nria trustThreshold: 2/3 + trustingPeriod: 14days celestia-local-0: type: CosmosSdk compatMode: "0.34" rpcAddr: http://celestia-app-service.astria-dev-cluster.svc.cluster.local:26657 grpcAddr: http://celestia-app-service.astria-dev-cluster.svc.cluster.local:9090 - eventSource: - mode: push - url: http://celestia-app-service.astria-dev-cluster.svc.cluster.local:26657/websocket - batchDelay: 500ms rpcTimeout: 15s + ccvConsumerChain: false accountPrefix: celestia + storePrefix: ibc key: name: celestia + # ibc account key devContent: - private_key: 8241386890823ca14743e5d4d583f879a5236af29f454ed4da6fe62b8439e2ab - public_key: 03b0effa59e7eee365a888b4d2fa4c9be82a4925df255d4443151d61b11ac63714 - address: [39, 166, 219, 243, 73, 131, 245, 143, 2, 206, 64, 203, 217, 165, 252, 194, 189, 5, 171, 220] + private_key: cd8fe707b8e4743e681e0bc91d11583a9fc816ae34ad9cb844174b1e4afbae56 + public_key: 02b4348667a0b25ccb662c71dc0920d46f1d67a14424665f0162266b5747eb1b07 + address: [111, 5, 223, 20, 76, 255, 226, 98, 86, 19, 67, 73, 207, 27, 16, 245, 78, 102, 228, 160] address_type: Cosmos - account: celestia1y7ndhu6fs06c7qkwgr9anf0uc27st27uwdj6vq + account: celestia1duza79zvll3xy4sngdyu7xcs748xde9q2fcae0 secret: resourceName: projects/$PROJECT_ID/secrets/celestia-ibc-relay-key/versions/latest filename: celestia.json key: token - denom: "utia" - gasPrice: 0.1 - gasMultiplier: 1.5 - defaultGas: "100000" - maxGas: "400000" + eventSource: + mode: push + url: ws://celestia-app-service.astria-dev-cluster.svc.cluster.local:26657/websocket + batchDelay: 200ms + gasDenom: "utia" + gasPrice: 0.0026 + gasMultiplier: 1.2 + defaultGas: "1000000" + maxGas: "10000000" maxMsgNum: 30 - maxTxSize: "180000" + maxTxSize: "2097152" clockDrift: 5s maxBlockTime: 30s - trustThreshold: 1/3 + trustThreshold: 2/3 + trustingPeriod: 14days diff --git a/dev/values/rollup/ibc-bridge-test.yaml b/dev/values/rollup/ibc-bridge-test.yaml new file mode 100644 index 0000000000..825e2e4451 --- /dev/null +++ b/dev/values/rollup/ibc-bridge-test.yaml @@ -0,0 +1,10 @@ +# this file contains overrides that are used for the ibc bridge tests + +config: + rollup: + genesis: + bridgeAddresses: + - bridgeAddress: "astria1d7zjjljc0dsmxa545xkpwxym86g8uvvwhtezcr" + startHeight: 1 + assetDenom: "transfer/channel-0/utia" + assetPrecision: 6 diff --git a/dev/values/validators/all.yml b/dev/values/validators/all.yml index 621ce1d11e..0b482a4e62 100644 --- a/dev/values/validators/all.yml +++ b/dev/values/validators/all.yml @@ -33,6 +33,9 @@ genesis: # This account is used for bridge in smoke test. - address: astria13ahqz4pjqfmynk9ylrqv4fwe4957x2p0h5782u balance: "48" + # address of the bridge account. needs funds to sign bridge init tx + - address: astria1d7zjjljc0dsmxa545xkpwxym86g8uvvwhtezcr + balance: "69000000" resources: cometbft: diff --git a/justfile b/justfile index b9a98f653a..e6fc5e91f5 100644 --- a/justfile +++ b/justfile @@ -1,4 +1,5 @@ import 'charts/deploy.just' +import 'dev/bridgetester.just' default: @just --list From 45a6415bceeb9a52137d6ec909bbe32bfa9d344e Mon Sep 17 00:00:00 2001 From: Fraser Hutchison <190532+Fraser999@users.noreply.github.com> Date: Wed, 17 Jul 2024 16:51:04 +0100 Subject: [PATCH 23/24] fix(sequencer): improve and fix instrumentation (#1255) ## Summary Generally improved instrumentation, including a fix for `App::execute_transaction`. ## Background There were a couple of async blocks spawned in tokio tasks which were not instrumented, resulting in misleading tracing data for `App::execute_transaction`. While investigating this, I discovered several instances of tracing fields using `Debug` output, and also a few functions which seemed to me like they would benefit from being instrumented. ## Changes - Applied the parent tracing span to the two spawned tasks. - ~Replaced many `Debug` fields with `Display` ones. They were all down to not being skipped via `skip_all`, so I replaced all instances of `skip(...)` with `skip_all` meaning any fields to be included have to be explicitly listed.~ - Removed all fields from instrumentation to cut down on tracing/log noise. - Added instrumentation to the transaction-checking functions and mempool. **Note:** ~I tried to largely preserve the fields which were previously being instrumented, just changing them from `Debug` to `Display`. A very few had no `Display` impl, so I excluded them. I think that almost all of the fields could be omitted in a follow-up PR, since I don't think we get much benefit from including things like addresses or balances in instrumentation, and that only serves to clutter the output. Potentially some of these fields are duplicated in the call chain, so even restricting their inclusion to the relevant top-level function would help.~ ## Testing Manually ran some tests with instrumentation enabled and eyeballed the output. --- .../astria-sequencer/src/accounts/action.rs | 8 +--- .../src/accounts/component.rs | 6 +-- .../src/accounts/state_ext.rs | 16 +++---- .../astria-sequencer/src/address/state_ext.rs | 4 +- crates/astria-sequencer/src/app/mod.rs | 17 +++---- .../astria-sequencer/src/asset/state_ext.rs | 6 +-- .../src/authority/component.rs | 6 +-- .../src/authority/state_ext.rs | 14 +++--- .../astria-sequencer/src/bridge/component.rs | 6 +-- .../astria-sequencer/src/bridge/state_ext.rs | 48 +++++++++---------- crates/astria-sequencer/src/grpc/sequencer.rs | 4 +- crates/astria-sequencer/src/ibc/component.rs | 6 +-- .../src/ibc/ics20_withdrawal.rs | 6 +-- crates/astria-sequencer/src/ibc/state_ext.rs | 18 +++---- crates/astria-sequencer/src/mempool/mod.rs | 14 +++++- .../astria-sequencer/src/sequence/action.rs | 7 +-- .../src/sequence/component.rs | 6 +-- .../src/sequence/state_ext.rs | 8 ++-- .../astria-sequencer/src/service/consensus.rs | 28 ++--------- .../astria-sequencer/src/service/info/mod.rs | 2 +- .../astria-sequencer/src/service/mempool.rs | 6 ++- crates/astria-sequencer/src/state_ext.rs | 38 +++++++-------- .../src/transaction/checks.rs | 6 +++ .../astria-sequencer/src/transaction/mod.rs | 10 ++-- 24 files changed, 139 insertions(+), 151 deletions(-) diff --git a/crates/astria-sequencer/src/accounts/action.rs b/crates/astria-sequencer/src/accounts/action.rs index c4b7c036ca..a36e9f2c59 100644 --- a/crates/astria-sequencer/src/accounts/action.rs +++ b/crates/astria-sequencer/src/accounts/action.rs @@ -105,13 +105,7 @@ impl ActionHandler for TransferAction { .context("stateful transfer check failed") } - #[instrument( - skip_all, - fields( - to = self.to.to_string(), - amount = self.amount, - ) - )] + #[instrument(skip_all)] async fn execute(&self, state: &mut S, from: Address) -> Result<()> { let fee = state .get_transfer_base_fee() diff --git a/crates/astria-sequencer/src/accounts/component.rs b/crates/astria-sequencer/src/accounts/component.rs index e52e81aabe..870a5a381c 100644 --- a/crates/astria-sequencer/src/accounts/component.rs +++ b/crates/astria-sequencer/src/accounts/component.rs @@ -23,7 +23,7 @@ pub(crate) struct AccountsComponent; impl Component for AccountsComponent { type AppState = astria_core::sequencer::GenesisState; - #[instrument(name = "AccountsComponent::init_chain", skip(state))] + #[instrument(name = "AccountsComponent::init_chain", skip_all)] async fn init_chain(mut state: S, app_state: &Self::AppState) -> Result<()> { let native_asset = get_native_asset(); for account in app_state.accounts() { @@ -38,7 +38,7 @@ impl Component for AccountsComponent { Ok(()) } - #[instrument(name = "AccountsComponent::begin_block", skip(_state))] + #[instrument(name = "AccountsComponent::begin_block", skip_all)] async fn begin_block( _state: &mut Arc, _begin_block: &BeginBlock, @@ -46,7 +46,7 @@ impl Component for AccountsComponent { Ok(()) } - #[instrument(name = "AccountsComponent::end_block", skip(_state))] + #[instrument(name = "AccountsComponent::end_block", skip_all)] async fn end_block( _state: &mut Arc, _end_block: &EndBlock, diff --git a/crates/astria-sequencer/src/accounts/state_ext.rs b/crates/astria-sequencer/src/accounts/state_ext.rs index bee8646eef..34fc7edad6 100644 --- a/crates/astria-sequencer/src/accounts/state_ext.rs +++ b/crates/astria-sequencer/src/accounts/state_ext.rs @@ -65,7 +65,7 @@ fn nonce_storage_key(address: Address) -> String { #[async_trait] pub(crate) trait StateReadExt: StateRead { - #[instrument(skip_all, fields(address=%address))] + #[instrument(skip_all)] async fn get_account_balances(&self, address: Address) -> Result> { use crate::asset::state_ext::StateReadExt as _; @@ -117,7 +117,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(balances) } - #[instrument(skip_all, fields(address=%address, %asset))] + #[instrument(skip_all)] async fn get_account_balance<'a, TAsset>(&self, address: Address, asset: TAsset) -> Result where TAsset: Into + std::fmt::Display + Send, @@ -133,7 +133,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(balance) } - #[instrument(skip_all, fields(address=%address))] + #[instrument(skip_all)] async fn get_account_nonce(&self, address: Address) -> Result { let bytes = self .get_raw(&nonce_storage_key(address)) @@ -167,7 +167,7 @@ impl StateReadExt for T {} #[async_trait] pub(crate) trait StateWriteExt: StateWrite { - #[instrument(skip(self, asset), fields(%asset))] + #[instrument(skip_all)] fn put_account_balance( &mut self, address: Address, @@ -182,14 +182,14 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_account_nonce(&mut self, address: Address, nonce: u32) -> Result<()> { let bytes = borsh::to_vec(&Nonce(nonce)).context("failed to serialize nonce")?; self.put_raw(nonce_storage_key(address), bytes); Ok(()) } - #[instrument(skip(self, asset), fields(%asset))] + #[instrument(skip_all)] async fn increase_balance( &mut self, address: Address, @@ -215,7 +215,7 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self, asset), fields(%asset))] + #[instrument(skip_all)] async fn decrease_balance( &mut self, address: Address, @@ -241,7 +241,7 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_transfer_base_fee(&mut self, fee: u128) -> Result<()> { let bytes = borsh::to_vec(&Fee(fee)).context("failed to serialize fee")?; self.put_raw(TRANSFER_BASE_FEE_STORAGE_KEY.to_string(), bytes); diff --git a/crates/astria-sequencer/src/address/state_ext.rs b/crates/astria-sequencer/src/address/state_ext.rs index 69c8efc8c1..917e6ce422 100644 --- a/crates/astria-sequencer/src/address/state_ext.rs +++ b/crates/astria-sequencer/src/address/state_ext.rs @@ -16,7 +16,7 @@ fn base_prefix_key() -> &'static str { #[async_trait] pub(crate) trait StateReadExt: StateRead { - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_base_prefix(&self) -> Result { let Some(bytes) = self .get_raw(base_prefix_key()) @@ -33,7 +33,7 @@ impl StateReadExt for T {} #[async_trait] pub(crate) trait StateWriteExt: StateWrite { - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_base_prefix(&mut self, prefix: &str) { self.put_raw(base_prefix_key().into(), prefix.into()); } diff --git a/crates/astria-sequencer/src/app/mod.rs b/crates/astria-sequencer/src/app/mod.rs index 5ce65cd86c..3946032dfa 100644 --- a/crates/astria-sequencer/src/app/mod.rs +++ b/crates/astria-sequencer/src/app/mod.rs @@ -59,6 +59,7 @@ use tracing::{ debug, info, instrument, + Instrument as _, }; use crate::{ @@ -984,21 +985,21 @@ impl App { } /// Executes a signed transaction. - #[instrument(name = "App::execute_transaction", skip_all, fields( - signed_transaction_hash = %telemetry::display::base64(&signed_tx.sha256_of_proto_encoding()), - sender_address_bytes = %telemetry::display::base64(&signed_tx.address_bytes()), - ))] + #[instrument(name = "App::execute_transaction", skip_all)] pub(crate) async fn execute_transaction( &mut self, signed_tx: Arc, ) -> anyhow::Result> { let signed_tx_2 = signed_tx.clone(); - let stateless = - tokio::spawn(async move { transaction::check_stateless(&signed_tx_2).await }); + let stateless = tokio::spawn( + async move { transaction::check_stateless(&signed_tx_2).await }.in_current_span(), + ); let signed_tx_2 = signed_tx.clone(); let state2 = self.state.clone(); - let stateful = - tokio::spawn(async move { transaction::check_stateful(&signed_tx_2, &state2).await }); + let stateful = tokio::spawn( + async move { transaction::check_stateful(&signed_tx_2, &state2).await } + .in_current_span(), + ); stateless .await diff --git a/crates/astria-sequencer/src/asset/state_ext.rs b/crates/astria-sequencer/src/asset/state_ext.rs index 4a40d1c290..84a0e37601 100644 --- a/crates/astria-sequencer/src/asset/state_ext.rs +++ b/crates/astria-sequencer/src/asset/state_ext.rs @@ -27,7 +27,7 @@ fn asset_storage_key>(asset: TAsset) -> String #[async_trait] pub(crate) trait StateReadExt: StateRead { - #[instrument(skip(self, asset), fields(%asset))] + #[instrument(skip_all)] async fn has_ibc_asset(&self, asset: TAsset) -> Result where TAsset: Into + std::fmt::Display + Send, @@ -39,7 +39,7 @@ pub(crate) trait StateReadExt: StateRead { .is_some()) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn map_ibc_to_trace_prefixed_asset( &self, asset: asset::IbcPrefixed, @@ -65,7 +65,7 @@ impl StateReadExt for T {} #[async_trait] pub(crate) trait StateWriteExt: StateWrite { - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_ibc_asset(&mut self, asset: &denom::TracePrefixed) -> Result<()> { let bytes = borsh::to_vec(&DenominationTrace(asset.to_string())) .context("failed to serialize asset")?; diff --git a/crates/astria-sequencer/src/authority/component.rs b/crates/astria-sequencer/src/authority/component.rs index c3557fe28c..0d1aa6eb09 100644 --- a/crates/astria-sequencer/src/authority/component.rs +++ b/crates/astria-sequencer/src/authority/component.rs @@ -34,7 +34,7 @@ pub(crate) struct AuthorityComponentAppState { impl Component for AuthorityComponent { type AppState = AuthorityComponentAppState; - #[instrument(name = "AuthorityComponent::init_chain", skip(state))] + #[instrument(name = "AuthorityComponent::init_chain", skip_all)] async fn init_chain(mut state: S, app_state: &Self::AppState) -> Result<()> { // set sudo key and initial validator set state @@ -47,7 +47,7 @@ impl Component for AuthorityComponent { Ok(()) } - #[instrument(name = "AuthorityComponent::begin_block", skip(state))] + #[instrument(name = "AuthorityComponent::begin_block", skip_all)] async fn begin_block( state: &mut Arc, begin_block: &BeginBlock, @@ -69,7 +69,7 @@ impl Component for AuthorityComponent { Ok(()) } - #[instrument(name = "AuthorityComponent::end_block", skip(state))] + #[instrument(name = "AuthorityComponent::end_block", skip_all)] async fn end_block( state: &mut Arc, _end_block: &EndBlock, diff --git a/crates/astria-sequencer/src/authority/state_ext.rs b/crates/astria-sequencer/src/authority/state_ext.rs index ccfee70bc0..34adde2737 100644 --- a/crates/astria-sequencer/src/authority/state_ext.rs +++ b/crates/astria-sequencer/src/authority/state_ext.rs @@ -107,7 +107,7 @@ const VALIDATOR_UPDATES_KEY: &[u8] = b"valupdates"; #[async_trait] pub(crate) trait StateReadExt: StateRead { - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_sudo_address(&self) -> Result
{ let Some(bytes) = self .get_raw(SUDO_STORAGE_KEY) @@ -122,7 +122,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(crate::address::base_prefixed(address_bytes)) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_validator_set(&self) -> Result { let Some(bytes) = self .get_raw(VALIDATOR_SET_STORAGE_KEY) @@ -138,7 +138,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(ValidatorSet(validator_set)) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_validator_updates(&self) -> Result { let Some(bytes) = self .nonverifiable_get_raw(VALIDATOR_UPDATES_KEY) @@ -159,7 +159,7 @@ impl StateReadExt for T {} #[async_trait] pub(crate) trait StateWriteExt: StateWrite { - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_sudo_address(&mut self, address: Address) -> Result<()> { self.put_raw( SUDO_STORAGE_KEY.to_string(), @@ -169,7 +169,7 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_validator_set(&mut self, validator_set: ValidatorSet) -> Result<()> { self.put_raw( VALIDATOR_SET_STORAGE_KEY.to_string(), @@ -178,7 +178,7 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_validator_updates(&mut self, validator_updates: ValidatorSet) -> Result<()> { self.nonverifiable_put_raw( VALIDATOR_UPDATES_KEY.to_vec(), @@ -188,7 +188,7 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self))] + #[instrument(skip_all)] fn clear_validator_updates(&mut self) { self.nonverifiable_delete(VALIDATOR_UPDATES_KEY.to_vec()); } diff --git a/crates/astria-sequencer/src/bridge/component.rs b/crates/astria-sequencer/src/bridge/component.rs index e719d666a4..cdc8f9b98b 100644 --- a/crates/astria-sequencer/src/bridge/component.rs +++ b/crates/astria-sequencer/src/bridge/component.rs @@ -17,7 +17,7 @@ pub(crate) struct BridgeComponent; impl Component for BridgeComponent { type AppState = astria_core::sequencer::GenesisState; - #[instrument(name = "BridgeComponent::init_chain", skip(state))] + #[instrument(name = "BridgeComponent::init_chain", skip_all)] async fn init_chain(mut state: S, app_state: &Self::AppState) -> Result<()> { state.put_init_bridge_account_base_fee(app_state.fees().init_bridge_account_base_fee); state.put_bridge_lock_byte_cost_multiplier( @@ -27,7 +27,7 @@ impl Component for BridgeComponent { Ok(()) } - #[instrument(name = "BridgeComponent::begin_block", skip(_state))] + #[instrument(name = "BridgeComponent::begin_block", skip_all)] async fn begin_block( _state: &mut Arc, _begin_block: &BeginBlock, @@ -35,7 +35,7 @@ impl Component for BridgeComponent { Ok(()) } - #[instrument(name = "BridgeComponent::end_block", skip(_state))] + #[instrument(name = "BridgeComponent::end_block", skip_all)] async fn end_block( _state: &mut Arc, _end_block: &EndBlock, diff --git a/crates/astria-sequencer/src/bridge/state_ext.rs b/crates/astria-sequencer/src/bridge/state_ext.rs index ebd9f3132d..a4ad85d2c3 100644 --- a/crates/astria-sequencer/src/bridge/state_ext.rs +++ b/crates/astria-sequencer/src/bridge/state_ext.rs @@ -140,7 +140,7 @@ fn last_transaction_hash_for_bridge_account_storage_key(address: &Address) -> Ve #[async_trait] pub(crate) trait StateReadExt: StateRead { - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_bridge_account_rollup_id(&self, address: &Address) -> Result> { let Some(rollup_id_bytes) = self .get_raw(&rollup_id_storage_key(address)) @@ -156,7 +156,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(Some(rollup_id)) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_bridge_account_ibc_asset(&self, address: &Address) -> Result { let bytes = self .get_raw(&asset_id_storage_key(address)) @@ -168,7 +168,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(asset::IbcPrefixed::new(id.0)) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_bridge_account_sudo_address( &self, bridge_address: &Address, @@ -187,7 +187,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(Some(sudo_address)) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_bridge_account_withdrawer_address( &self, bridge_address: &Address, @@ -208,7 +208,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(Some(withdrawer_address)) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_deposit_nonce(&self, rollup_id: &RollupId) -> Result { let bytes = self .nonverifiable_get_raw(&deposit_nonce_storage_key(rollup_id)) @@ -226,7 +226,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(nonce) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_deposit_rollup_ids(&self) -> Result> { let mut stream = std::pin::pin!(self.nonverifiable_prefix_raw(DEPOSIT_PREFIX.as_bytes())); let mut rollup_ids = HashSet::new(); @@ -247,7 +247,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(rollup_ids) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_deposit_events(&self, rollup_id: &RollupId) -> Result> { let mut stream = std::pin::pin!( self.nonverifiable_prefix_raw(deposit_storage_key_prefix(rollup_id).as_bytes()) @@ -261,7 +261,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(deposits) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_block_deposits(&self) -> Result>> { let deposit_rollup_ids = self .get_deposit_rollup_ids() @@ -278,7 +278,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(deposit_events) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_init_bridge_account_base_fee(&self) -> Result { let bytes = self .get_raw(INIT_BRIDGE_ACCOUNT_BASE_FEE_STORAGE_KEY) @@ -289,7 +289,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(fee) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_bridge_lock_byte_cost_multiplier(&self) -> Result { let bytes = self .get_raw(BRIDGE_LOCK_BYTE_COST_MULTIPLIER_STORAGE_KEY) @@ -300,7 +300,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(fee) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_bridge_sudo_change_base_fee(&self) -> Result { let bytes = self .get_raw(BRIDGE_SUDO_CHANGE_FEE_STORAGE_KEY) @@ -311,7 +311,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(fee) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_last_transaction_hash_for_bridge_account( &self, address: &Address, @@ -337,12 +337,12 @@ impl StateReadExt for T {} #[async_trait] pub(crate) trait StateWriteExt: StateWrite { - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_bridge_account_rollup_id(&mut self, address: &Address, rollup_id: &RollupId) { self.put_raw(rollup_id_storage_key(address), rollup_id.to_vec()); } - #[instrument(skip(self, asset), fields(%asset))] + #[instrument(skip_all)] fn put_bridge_account_ibc_asset( &mut self, address: &Address, @@ -359,7 +359,7 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_bridge_account_sudo_address( &mut self, bridge_address: &Address, @@ -371,7 +371,7 @@ pub(crate) trait StateWriteExt: StateWrite { ); } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_bridge_account_withdrawer_address( &mut self, bridge_address: &Address, @@ -386,7 +386,7 @@ pub(crate) trait StateWriteExt: StateWrite { // the deposit "nonce" for a given rollup ID during a given block. // this is only used to generate storage keys for each of the deposits within a block, // and is reset to 0 at the beginning of each block. - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_deposit_nonce(&mut self, rollup_id: &RollupId, nonce: u32) { self.nonverifiable_put_raw( deposit_nonce_storage_key(rollup_id), @@ -394,7 +394,7 @@ pub(crate) trait StateWriteExt: StateWrite { ); } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn put_deposit_event(&mut self, deposit: Deposit) -> Result<()> { let nonce = self.get_deposit_nonce(deposit.rollup_id()).await?; self.put_deposit_nonce( @@ -408,7 +408,7 @@ pub(crate) trait StateWriteExt: StateWrite { } // clears the deposit nonce and all deposits for for a given rollup ID. - #[instrument(skip(self))] + #[instrument(skip_all)] async fn clear_deposit_info(&mut self, rollup_id: &RollupId) { self.nonverifiable_delete(deposit_nonce_storage_key(rollup_id)); let mut stream = std::pin::pin!( @@ -419,7 +419,7 @@ pub(crate) trait StateWriteExt: StateWrite { } } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn clear_block_deposits(&mut self) -> Result<()> { let deposit_rollup_ids = self .get_deposit_rollup_ids() @@ -431,7 +431,7 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_init_bridge_account_base_fee(&mut self, fee: u128) { self.put_raw( INIT_BRIDGE_ACCOUNT_BASE_FEE_STORAGE_KEY.to_string(), @@ -439,7 +439,7 @@ pub(crate) trait StateWriteExt: StateWrite { ); } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_bridge_lock_byte_cost_multiplier(&mut self, fee: u128) { self.put_raw( BRIDGE_LOCK_BYTE_COST_MULTIPLIER_STORAGE_KEY.to_string(), @@ -447,7 +447,7 @@ pub(crate) trait StateWriteExt: StateWrite { ); } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_bridge_sudo_change_base_fee(&mut self, fee: u128) { self.put_raw( BRIDGE_SUDO_CHANGE_FEE_STORAGE_KEY.to_string(), @@ -455,7 +455,7 @@ pub(crate) trait StateWriteExt: StateWrite { ); } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_last_transaction_hash_for_bridge_account( &mut self, address: &Address, diff --git a/crates/astria-sequencer/src/grpc/sequencer.rs b/crates/astria-sequencer/src/grpc/sequencer.rs index f8fb17e0c4..24fc096800 100644 --- a/crates/astria-sequencer/src/grpc/sequencer.rs +++ b/crates/astria-sequencer/src/grpc/sequencer.rs @@ -47,7 +47,7 @@ impl SequencerServer { #[async_trait::async_trait] impl SequencerService for SequencerServer { /// Given a block height, returns the sequencer block at that height. - #[instrument(skip_all, fields(height = request.get_ref().height))] + #[instrument(skip_all)] async fn get_sequencer_block( self: Arc, request: Request, @@ -77,7 +77,7 @@ impl SequencerService for SequencerServer { /// Given a block height and set of rollup ids, returns a SequencerBlock which /// is filtered to contain only the transactions that are relevant to the given rollup. - #[instrument(skip_all, fields(height = request.get_ref().height))] + #[instrument(skip_all)] async fn get_filtered_sequencer_block( self: Arc, request: Request, diff --git a/crates/astria-sequencer/src/ibc/component.rs b/crates/astria-sequencer/src/ibc/component.rs index 19aa1fcab9..3a44b6865d 100644 --- a/crates/astria-sequencer/src/ibc/component.rs +++ b/crates/astria-sequencer/src/ibc/component.rs @@ -29,7 +29,7 @@ pub(crate) struct IbcComponent; impl Component for IbcComponent { type AppState = astria_core::sequencer::GenesisState; - #[instrument(name = "IbcComponent::init_chain", skip(state))] + #[instrument(name = "IbcComponent::init_chain", skip_all)] async fn init_chain(mut state: S, app_state: &Self::AppState) -> Result<()> { Ibc::init_chain( &mut state, @@ -53,7 +53,7 @@ impl Component for IbcComponent { Ok(()) } - #[instrument(name = "IbcComponent::begin_block", skip(state))] + #[instrument(name = "IbcComponent::begin_block", skip_all)] async fn begin_block( state: &mut Arc, begin_block: &BeginBlock, @@ -62,7 +62,7 @@ impl Component for IbcComponent { Ok(()) } - #[instrument(name = "IbcComponent::end_block", skip(state))] + #[instrument(name = "IbcComponent::end_block", skip_all)] async fn end_block( state: &mut Arc, end_block: &EndBlock, diff --git a/crates/astria-sequencer/src/ibc/ics20_withdrawal.rs b/crates/astria-sequencer/src/ibc/ics20_withdrawal.rs index 93c76ba6e1..e2677cdff0 100644 --- a/crates/astria-sequencer/src/ibc/ics20_withdrawal.rs +++ b/crates/astria-sequencer/src/ibc/ics20_withdrawal.rs @@ -98,7 +98,7 @@ async fn ics20_withdrawal_check_stateful_bridge_account Result<()> { ensure!(self.timeout_time() != 0, "timeout time must be non-zero",); @@ -116,7 +116,7 @@ impl ActionHandler for action::Ics20Withdrawal { Ok(()) } - #[instrument(skip(self, state))] + #[instrument(skip_all)] async fn check_stateful( &self, state: &S, @@ -175,7 +175,7 @@ impl ActionHandler for action::Ics20Withdrawal { Ok(()) } - #[instrument(skip(self, state))] + #[instrument(skip_all)] async fn execute(&self, state: &mut S, from: Address) -> Result<()> { let fee = state .get_ics20_withdrawal_base_fee() diff --git a/crates/astria-sequencer/src/ibc/state_ext.rs b/crates/astria-sequencer/src/ibc/state_ext.rs index adda68485e..3e979972c7 100644 --- a/crates/astria-sequencer/src/ibc/state_ext.rs +++ b/crates/astria-sequencer/src/ibc/state_ext.rs @@ -67,7 +67,7 @@ fn ibc_relayer_key(address: &Address) -> String { #[async_trait] pub(crate) trait StateReadExt: StateRead { - #[instrument(skip(self, asset), fields(%asset))] + #[instrument(skip_all)] async fn get_ibc_channel_balance( &self, channel: &ChannelId, @@ -88,7 +88,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(balance) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_ibc_sudo_address(&self) -> Result
{ let Some(bytes) = self .get_raw(IBC_SUDO_STORAGE_KEY) @@ -103,7 +103,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(crate::address::base_prefixed(address_bytes)) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn is_ibc_relayer(&self, address: &Address) -> Result { Ok(self .get_raw(&ibc_relayer_key(address)) @@ -112,7 +112,7 @@ pub(crate) trait StateReadExt: StateRead { .is_some()) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_ics20_withdrawal_base_fee(&self) -> Result { let Some(bytes) = self .get_raw(ICS20_WITHDRAWAL_BASE_FEE_STORAGE_KEY) @@ -130,7 +130,7 @@ impl StateReadExt for T {} #[async_trait] pub(crate) trait StateWriteExt: StateWrite { - #[instrument(skip(self, asset), fields(%asset))] + #[instrument(skip_all)] fn put_ibc_channel_balance( &mut self, channel: &ChannelId, @@ -145,7 +145,7 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_ibc_sudo_address(&mut self, address: Address) -> Result<()> { self.put_raw( IBC_SUDO_STORAGE_KEY.to_string(), @@ -155,17 +155,17 @@ pub(crate) trait StateWriteExt: StateWrite { Ok(()) } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_ibc_relayer_address(&mut self, address: &Address) { self.put_raw(ibc_relayer_key(address), vec![]); } - #[instrument(skip(self))] + #[instrument(skip_all)] fn delete_ibc_relayer_address(&mut self, address: &Address) { self.delete(ibc_relayer_key(address)); } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_ics20_withdrawal_base_fee(&mut self, fee: u128) -> Result<()> { self.put_raw( ICS20_WITHDRAWAL_BASE_FEE_STORAGE_KEY.to_string(), diff --git a/crates/astria-sequencer/src/mempool/mod.rs b/crates/astria-sequencer/src/mempool/mod.rs index dcce7c3fc8..ad76dd25d1 100644 --- a/crates/astria-sequencer/src/mempool/mod.rs +++ b/crates/astria-sequencer/src/mempool/mod.rs @@ -35,7 +35,10 @@ use tokio::{ Instant, }, }; -use tracing::debug; +use tracing::{ + debug, + instrument, +}; type MempoolQueue = PriorityQueue; @@ -223,6 +226,7 @@ impl Mempool { /// returns the number of transactions in the mempool #[must_use] + #[instrument(skip_all)] pub(crate) async fn len(&self) -> usize { self.queue.read().await.len() } @@ -230,6 +234,7 @@ impl Mempool { /// inserts a transaction into the mempool /// /// note: the oldest timestamp from found priorities is maintained. + #[instrument(skip_all)] pub(crate) async fn insert( &self, tx: SignedTransaction, @@ -245,6 +250,7 @@ impl Mempool { /// inserts all the given transactions into the mempool /// /// note: the oldest timestamp from found priorities for an `EnqueuedTransaction` is maintained. + #[instrument(skip_all)] pub(crate) async fn insert_all(&self, txs: Vec<(EnqueuedTransaction, TransactionPriority)>) { let mut queue = self.queue.write().await; @@ -287,11 +293,13 @@ impl Mempool { /// pops the transaction with the highest priority from the mempool #[must_use] + #[instrument(skip_all)] pub(crate) async fn pop(&self) -> Option<(EnqueuedTransaction, TransactionPriority)> { self.queue.write().await.pop() } /// removes a transaction from the mempool + #[instrument(skip_all)] pub(crate) async fn remove(&self, tx_hash: [u8; 32]) { let (signed_tx, address) = dummy_signed_tx(); let enqueued_tx = EnqueuedTransaction { @@ -303,6 +311,7 @@ impl Mempool { } /// signal that the transaction should be removed from the `CometBFT` mempool + #[instrument(skip_all)] pub(crate) async fn track_removal_comet_bft(&self, tx_hash: [u8; 32], reason: RemovalReason) { self.comet_bft_removal_cache .write() @@ -312,6 +321,7 @@ impl Mempool { /// checks if a transaction was flagged to be removed from the `CometBFT` mempool /// and removes entry + #[instrument(skip_all)] pub(crate) async fn check_removed_comet_bft(&self, tx_hash: [u8; 32]) -> Option { self.comet_bft_removal_cache.write().await.remove(tx_hash) } @@ -321,6 +331,7 @@ impl Mempool { /// /// *NOTE*: this function locks the mempool until every tx has been checked. This could /// potentially stall consensus from moving to the next round if the mempool is large. + #[instrument(skip_all)] pub(crate) async fn run_maintenance( &self, current_account_nonce_getter: F, @@ -378,6 +389,7 @@ impl Mempool { /// returns the pending nonce for the given address, /// if it exists in the mempool. + #[instrument(skip_all)] pub(crate) async fn pending_nonce(&self, address: &Address) -> Option { let inner = self.queue.read().await; let mut nonce = None; diff --git a/crates/astria-sequencer/src/sequence/action.rs b/crates/astria-sequencer/src/sequence/action.rs index 31347d1e00..3b4353445d 100644 --- a/crates/astria-sequencer/src/sequence/action.rs +++ b/crates/astria-sequencer/src/sequence/action.rs @@ -55,12 +55,7 @@ impl ActionHandler for SequenceAction { Ok(()) } - #[instrument( - skip_all, - fields( - from = from.to_string(), - ) - )] + #[instrument(skip_all)] async fn execute(&self, state: &mut S, from: Address) -> Result<()> { let fee = calculate_fee_from_state(&self.data, state) .await diff --git a/crates/astria-sequencer/src/sequence/component.rs b/crates/astria-sequencer/src/sequence/component.rs index 2a7c710dcb..84265321a5 100644 --- a/crates/astria-sequencer/src/sequence/component.rs +++ b/crates/astria-sequencer/src/sequence/component.rs @@ -17,7 +17,7 @@ pub(crate) struct SequenceComponent; impl Component for SequenceComponent { type AppState = astria_core::sequencer::GenesisState; - #[instrument(name = "SequenceComponent::init_chain", skip(state))] + #[instrument(name = "SequenceComponent::init_chain", skip_all)] async fn init_chain(mut state: S, app_state: &Self::AppState) -> Result<()> { state.put_sequence_action_base_fee(app_state.fees().sequence_base_fee); state.put_sequence_action_byte_cost_multiplier( @@ -26,7 +26,7 @@ impl Component for SequenceComponent { Ok(()) } - #[instrument(name = "SequenceComponent::begin_block", skip(_state))] + #[instrument(name = "SequenceComponent::begin_block", skip_all)] async fn begin_block( _state: &mut Arc, _begin_block: &BeginBlock, @@ -34,7 +34,7 @@ impl Component for SequenceComponent { Ok(()) } - #[instrument(name = "SequenceComponent::end_block", skip(_state))] + #[instrument(name = "SequenceComponent::end_block", skip_all)] async fn end_block( _state: &mut Arc, _end_block: &EndBlock, diff --git a/crates/astria-sequencer/src/sequence/state_ext.rs b/crates/astria-sequencer/src/sequence/state_ext.rs index 50345e42e5..ad17e4ef72 100644 --- a/crates/astria-sequencer/src/sequence/state_ext.rs +++ b/crates/astria-sequencer/src/sequence/state_ext.rs @@ -23,7 +23,7 @@ struct Fee(u128); #[async_trait] pub(crate) trait StateReadExt: StateRead { - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_sequence_action_base_fee(&self) -> Result { let bytes = self .get_raw(SEQUENCE_ACTION_BASE_FEE_STORAGE_KEY) @@ -34,7 +34,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(fee) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_sequence_action_byte_cost_multiplier(&self) -> Result { let bytes = self .get_raw(SEQUENCE_ACTION_BYTE_COST_MULTIPLIER_STORAGE_KEY) @@ -50,7 +50,7 @@ impl StateReadExt for T {} #[async_trait] pub(crate) trait StateWriteExt: StateWrite { - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_sequence_action_base_fee(&mut self, fee: u128) { self.put_raw( SEQUENCE_ACTION_BASE_FEE_STORAGE_KEY.to_string(), @@ -58,7 +58,7 @@ pub(crate) trait StateWriteExt: StateWrite { ); } - #[instrument(skip(self))] + #[instrument(skip_all)] fn put_sequence_action_byte_cost_multiplier(&mut self, fee: u128) { self.put_raw( SEQUENCE_ACTION_BYTE_COST_MULTIPLIER_STORAGE_KEY.to_string(), diff --git a/crates/astria-sequencer/src/service/consensus.rs b/crates/astria-sequencer/src/service/consensus.rs index acb92a0a80..7e8a6e8244 100644 --- a/crates/astria-sequencer/src/service/consensus.rs +++ b/crates/astria-sequencer/src/service/consensus.rs @@ -115,11 +115,7 @@ impl Consensus { }) } - #[instrument(skip_all, fields( - chain_id = init_chain.chain_id, - time = %init_chain.time, - init_height = %init_chain.initial_height - ))] + #[instrument(skip_all)] async fn init_chain( &mut self, init_chain: request::InitChain, @@ -159,11 +155,7 @@ impl Consensus { }) } - #[instrument(skip_all, fields( - height = %prepare_proposal.height, - tx_count = prepare_proposal.txs.len(), - time = %prepare_proposal.time - ))] + #[instrument(skip_all)] async fn handle_prepare_proposal( &mut self, prepare_proposal: request::PrepareProposal, @@ -173,14 +165,7 @@ impl Consensus { .await } - #[instrument(skip_all, fields( - height = %process_proposal.height, - time = %process_proposal.time, - tx_count = process_proposal.txs.len(), - proposer = %process_proposal.proposer_address, - hash = %telemetry::display::base64(&process_proposal.hash), - next_validators_hash = %telemetry::display::base64(&process_proposal.next_validators_hash), - ))] + #[instrument(skip_all)] async fn handle_process_proposal( &mut self, process_proposal: request::ProcessProposal, @@ -192,12 +177,7 @@ impl Consensus { Ok(()) } - #[instrument(skip_all, fields( - hash = %finalize_block.hash, - height = %finalize_block.height, - time = %finalize_block.time, - proposer = %finalize_block.proposer_address - ))] + #[instrument(skip_all)] async fn finalize_block( &mut self, finalize_block: request::FinalizeBlock, diff --git a/crates/astria-sequencer/src/service/info/mod.rs b/crates/astria-sequencer/src/service/info/mod.rs index 873383e715..121e85f4bc 100644 --- a/crates/astria-sequencer/src/service/info/mod.rs +++ b/crates/astria-sequencer/src/service/info/mod.rs @@ -88,7 +88,7 @@ impl Info { }) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn handle_info_request(self, request: InfoRequest) -> Result { match request { InfoRequest::Info(_) => { diff --git a/crates/astria-sequencer/src/service/mempool.rs b/crates/astria-sequencer/src/service/mempool.rs index dc93c3e991..7f48d20587 100644 --- a/crates/astria-sequencer/src/service/mempool.rs +++ b/crates/astria-sequencer/src/service/mempool.rs @@ -28,7 +28,10 @@ use tendermint::v0_38::abci::{ }; use tower::Service; use tower_abci::BoxError; -use tracing::Instrument as _; +use tracing::{ + instrument, + Instrument as _, +}; use crate::{ accounts::state_ext::StateReadExt, @@ -98,6 +101,7 @@ impl Service for Mempool { /// /// If the tx passes all checks, status code 0 is returned. #[allow(clippy::too_many_lines)] +#[instrument(skip_all)] async fn handle_check_tx( req: request::CheckTx, state: S, diff --git a/crates/astria-sequencer/src/state_ext.rs b/crates/astria-sequencer/src/state_ext.rs index f8d4d77950..ec6daf5801 100644 --- a/crates/astria-sequencer/src/state_ext.rs +++ b/crates/astria-sequencer/src/state_ext.rs @@ -38,7 +38,7 @@ fn fee_asset_key>(asset: TAsset) -> String { #[async_trait] pub(crate) trait StateReadExt: StateRead { - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_chain_id(&self) -> Result { let Some(bytes) = self .get_raw("chain_id") @@ -54,7 +54,7 @@ pub(crate) trait StateReadExt: StateRead { .expect("only valid chain ids should be stored in the state")) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_revision_number(&self) -> Result { let Some(bytes) = self .get_raw(REVISION_NUMBER_KEY) @@ -74,7 +74,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(u64::from_be_bytes(bytes)) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_block_height(&self) -> Result { let Some(bytes) = self .get_raw("block_height") @@ -89,7 +89,7 @@ pub(crate) trait StateReadExt: StateRead { Ok(u64::from_be_bytes(bytes)) } - #[instrument(skip(self))] + #[instrument(skip_all)] async fn get_block_timestamp(&self) -> Result