diff --git a/.config/nextest.toml b/.config/nextest.toml index ad567c11943e6..dfee93312a4dc 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -3,8 +3,8 @@ chisel-serial = { max-threads = 1 } polkadot-localnode-serial = { max-threads = 1 } [profile.default] -retries = { backoff = "exponential", count = 2, delay = "5s", jitter = true } -slow-timeout = { period = "1m", terminate-after = 3 } +retries = { backoff = "exponential", count = 3, delay = "10s", jitter = true } +slow-timeout = { period = "5m", terminate-after = 4 } [[profile.default.overrides]] filter = "test(/ext_integration|can_test_forge_std/)" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 29362d25d5746..cb976c8618b2b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -41,7 +41,7 @@ jobs: sudo apt-get update sudo apt-get install -y clang libclang-dev - name: Build documentation - run: cargo doc --workspace --all-features --no-deps --document-private-items + run: cargo +nightly doc --workspace --all-features --no-deps --document-private-items env: RUSTDOCFLAGS: --cfg docsrs -D warnings --show-type-layout --generate-link-to-definition --enable-index-page -Zunstable-options - name: Deploy documentation @@ -53,19 +53,19 @@ jobs: force_orphan: true doctest: - runs-on: ubuntu-latest + runs-on: parity-large-new timeout-minutes: 60 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@1.88.0 + - name: Install build tools + run: | + sudo apt-get update + sudo apt-get install -y clang libclang-dev unzip build-essential - name: Install protobuf-compiler uses: arduino/setup-protoc@v3 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - - name: Install clang on ubuntu - run: | - sudo apt-get update - sudo apt-get install -y clang libclang-dev - uses: dtolnay/rust-toolchain@1.88.0 with: target: wasm32-unknown-unknown diff --git a/Cargo.lock b/Cargo.lock index 699efddd176c2..9b469ae89dc99 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1331,16 +1331,28 @@ dependencies = [ "ark-std 0.4.0", ] +[[package]] +name = "ark-bls12-377" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfedac3173d12820a5e0d6cd4de31b49719a74f4a41dc09b6652d0276a3b2cd4" +dependencies = [ + "ark-ec 0.5.0", + "ark-ff 0.5.0", + "ark-std 0.5.0", +] + [[package]] name = "ark-bls12-377-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20c7021f180a0cbea0380eba97c2af3c57074cdaffe0eef7e840e1c9f2841e55" +checksum = "e47f3bb6e4ef3c0edb795769fc11469767ce807ed1ccdc979ab101aea2dbf4b5" dependencies = [ - "ark-bls12-377", - "ark-ec 0.4.2", + "ark-bls12-377 0.5.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", "ark-models-ext", - "ark-std 0.4.0", + "ark-std 0.5.0", ] [[package]] @@ -1369,16 +1381,16 @@ dependencies = [ [[package]] name = "ark-bls12-381-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1dc4b3d08f19e8ec06e949712f95b8361e43f1391d94f65e4234df03480631c" +checksum = "0f1dbb23366825700828d373d5fc9c07b7f92253ffed47ab455003b7590d786d" dependencies = [ - "ark-bls12-381 0.4.0", - "ark-ec 0.4.2", - "ark-ff 0.4.2", + "ark-bls12-381 0.5.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", "ark-models-ext", - "ark-serialize 0.4.2", - "ark-std 0.4.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", ] [[package]] @@ -1395,27 +1407,27 @@ dependencies = [ [[package]] name = "ark-bw6-761" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e0605daf0cc5aa2034b78d008aaf159f56901d92a52ee4f6ecdfdac4f426700" +checksum = "1cc9cae367e0c3c0b52e3ef13371122752654f45d0212ec7306fb0c1c012cd98" dependencies = [ - "ark-bls12-377", - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-std 0.4.0", + "ark-bls12-377 0.5.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", + "ark-std 0.5.0", ] [[package]] name = "ark-bw6-761-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccee5fba47266f460067588ee1bf070a9c760bf2050c1c509982c5719aadb4f2" +checksum = "c6e1216f968e21c72fdaba53dbc9e547a8a60cc87b1dc74ac589727e906f9297" dependencies = [ "ark-bw6-761", - "ark-ec 0.4.2", - "ark-ff 0.4.2", + "ark-ec 0.5.0", + "ark-ff 0.5.0", "ark-models-ext", - "ark-std 0.4.0", + "ark-std 0.5.0", ] [[package]] @@ -1432,7 +1444,6 @@ dependencies = [ "hashbrown 0.13.2", "itertools 0.10.5", "num-traits", - "rayon", "zeroize", ] @@ -1460,39 +1471,27 @@ dependencies = [ [[package]] name = "ark-ed-on-bls12-377" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b10d901b9ac4b38f9c32beacedfadcdd64e46f8d7f8e88c1ae1060022cf6f6c6" +checksum = "ebbf817b2db27d2787009b2ff76304a5b90b4b01bb16aa8351701fd40f5f37b2" dependencies = [ - "ark-bls12-377", - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-std 0.4.0", + "ark-bls12-377 0.5.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", + "ark-std 0.5.0", ] [[package]] name = "ark-ed-on-bls12-377-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524a4fb7540df2e1a8c2e67a83ba1d1e6c3947f4f9342cc2359fc2e789ad731d" +checksum = "05093aa26f017411708e1271047852cc5f58686336f1f1a56fb2df747c3e173a" dependencies = [ - "ark-ec 0.4.2", + "ark-ec 0.5.0", "ark-ed-on-bls12-377", - "ark-ff 0.4.2", + "ark-ff 0.5.0", "ark-models-ext", - "ark-std 0.4.0", -] - -[[package]] -name = "ark-ed-on-bls12-381-bandersnatch" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9cde0f2aa063a2a5c28d39b47761aa102bda7c13c84fc118a61b87c7b2f785c" -dependencies = [ - "ark-bls12-381 0.4.0", - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-std 0.4.0", + "ark-std 0.5.0", ] [[package]] @@ -1509,15 +1508,15 @@ dependencies = [ [[package]] name = "ark-ed-on-bls12-381-bandersnatch-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d15185f1acb49a07ff8cbe5f11a1adc5a93b19e211e325d826ae98e98e124346" +checksum = "5e6dce0c47def6f25cf01022acded4f32732f577187dfcd1268510093ef16ea6" dependencies = [ - "ark-ec 0.4.2", - "ark-ed-on-bls12-381-bandersnatch 0.4.0", - "ark-ff 0.4.2", + "ark-ec 0.5.0", + "ark-ed-on-bls12-381-bandersnatch", + "ark-ff 0.5.0", "ark-models-ext", - "ark-std 0.4.0", + "ark-std 0.5.0", ] [[package]] @@ -1649,14 +1648,14 @@ dependencies = [ [[package]] name = "ark-models-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9eab5d4b5ff2f228b763d38442adc9b084b0a465409b059fac5c2308835ec2" +checksum = "ff772c552d00e9c092eab0608632342c553abbf6bca984008b55100a9a78a3a6" dependencies = [ - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-serialize 0.4.2", - "ark-std 0.4.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", "derivative", ] @@ -1718,26 +1717,14 @@ dependencies = [ "tracing-subscriber 0.2.25", ] -[[package]] -name = "ark-scale" -version = "0.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f69c00b3b529be29528a6f2fd5fa7b1790f8bed81b9cdca17e326538545a179" -dependencies = [ - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-serialize 0.4.2", - "ark-std 0.4.0", - "parity-scale-codec", - "scale-info", -] - [[package]] name = "ark-scale" version = "0.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "985c81a9c7b23a72f62b7b20686d5326d2a9956806f37de9ee35cb1238faf0c0" dependencies = [ + "ark-ec 0.5.0", + "ark-ff 0.5.0", "ark-serialize 0.5.0", "ark-std 0.5.0", "parity-scale-codec", @@ -1820,7 +1807,6 @@ checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", "rand 0.8.5", - "rayon", ] [[package]] @@ -1856,7 +1842,7 @@ checksum = "9501da18569b2afe0eb934fb7afd5a247d238b94116155af4dd068f319adfe6d" dependencies = [ "ark-bls12-381 0.5.0", "ark-ec 0.5.0", - "ark-ed-on-bls12-381-bandersnatch 0.5.0", + "ark-ed-on-bls12-381-bandersnatch", "ark-ff 0.5.0", "ark-serialize 0.5.0", "ark-std 0.5.0", @@ -1983,7 +1969,7 @@ checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" [[package]] name = "asset-test-utils" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-parachain-system", "cumulus-pallet-xcmp-queue", @@ -2013,7 +1999,7 @@ dependencies = [ [[package]] name = "assets-common" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "ethereum-standards", @@ -2835,7 +2821,7 @@ checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" [[package]] name = "binary-merkle-tree" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "hash-db", "log", @@ -3156,7 +3142,7 @@ checksum = "26c4925bc979b677330a8c7fe7a8c94af2dbb4a2d37b4a20a80d884400f46baa" [[package]] name = "bp-header-chain" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-runtime", "finality-grandpa", @@ -3173,7 +3159,7 @@ dependencies = [ [[package]] name = "bp-messages" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-runtime", @@ -3189,7 +3175,7 @@ dependencies = [ [[package]] name = "bp-parachains" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -3206,7 +3192,7 @@ dependencies = [ [[package]] name = "bp-polkadot-core" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-messages", "bp-runtime", @@ -3223,7 +3209,7 @@ dependencies = [ [[package]] name = "bp-relayers" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-messages", @@ -3241,7 +3227,7 @@ dependencies = [ [[package]] name = "bp-runtime" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -3264,7 +3250,7 @@ dependencies = [ [[package]] name = "bp-test-utils" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-parachains", @@ -3284,7 +3270,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-messages", "bp-runtime", @@ -3301,7 +3287,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub-router" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -3313,7 +3299,7 @@ dependencies = [ [[package]] name = "bridge-hub-common" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -3332,7 +3318,7 @@ dependencies = [ [[package]] name = "bridge-hub-test-utils" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "asset-test-utils", "bp-header-chain", @@ -3374,7 +3360,7 @@ dependencies = [ [[package]] name = "bridge-runtime-common" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-messages", @@ -4685,7 +4671,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-aura-ext" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-parachain-system", "frame-support", @@ -4702,7 +4688,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-dmp-queue" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-benchmarking", @@ -4719,8 +4705,9 @@ dependencies = [ [[package]] name = "cumulus-pallet-parachain-system" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ + "array-bytes", "bytes", "cumulus-pallet-parachain-system-proc-macro", "cumulus-primitives-core", @@ -4756,7 +4743,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-parachain-system-proc-macro" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -4767,7 +4754,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-session-benchmarking" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -4780,7 +4767,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-solo-to-para" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-parachain-system", "frame-support", @@ -4795,7 +4782,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-weight-reclaim" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-storage-weight-reclaim", "derive-where", @@ -4814,7 +4801,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-xcm" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -4829,7 +4816,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-xcmp-queue" version = "0.7.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "approx", "bounded-collections 0.3.2", @@ -4855,7 +4842,7 @@ dependencies = [ [[package]] name = "cumulus-ping" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-xcm", "cumulus-primitives-core", @@ -4870,7 +4857,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-aura" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-api", "sp-consensus-aura", @@ -4879,7 +4866,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-core" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-core-primitives", @@ -4896,7 +4883,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-parachain-inherent" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "cumulus-primitives-core", @@ -4910,7 +4897,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-proof-size-hostfunction" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-externalities", "sp-runtime-interface", @@ -4920,7 +4907,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-storage-weight-reclaim" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "cumulus-primitives-proof-size-hostfunction", @@ -4937,7 +4924,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-timestamp" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "sp-inherents", @@ -4947,7 +4934,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-utility" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -4964,7 +4951,7 @@ dependencies = [ [[package]] name = "cumulus-test-relay-sproof-builder" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "parity-scale-codec", @@ -5951,7 +5938,7 @@ dependencies = [ [[package]] name = "ethereum-standards" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "alloy-core", ] @@ -6517,7 +6504,7 @@ dependencies = [ [[package]] name = "fork-tree" version = "12.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", ] @@ -7226,7 +7213,7 @@ checksum = "28dd6caf6059519a65843af8fe2a3ae298b14b80179855aeb4adc2c1934ee619" [[package]] name = "frame-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-support-procedural", @@ -7250,7 +7237,7 @@ dependencies = [ [[package]] name = "frame-benchmarking-pallet-pov" version = "18.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -7278,7 +7265,7 @@ dependencies = [ [[package]] name = "frame-election-provider-solution-type" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -7289,7 +7276,7 @@ dependencies = [ [[package]] name = "frame-election-provider-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-election-provider-solution-type", "frame-support", @@ -7306,7 +7293,7 @@ dependencies = [ [[package]] name = "frame-executive" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "aquamarine", "frame-support", @@ -7336,7 +7323,7 @@ dependencies = [ [[package]] name = "frame-metadata-hash-extension" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "const-hex", @@ -7352,7 +7339,7 @@ dependencies = [ [[package]] name = "frame-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "aquamarine", "array-bytes", @@ -7393,7 +7380,7 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "Inflector", "cfg-expr", @@ -7413,7 +7400,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.3.0", @@ -7425,7 +7412,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro2", "quote", @@ -7435,7 +7422,7 @@ dependencies = [ [[package]] name = "frame-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cfg-if", "docify", @@ -7454,7 +7441,7 @@ dependencies = [ [[package]] name = "frame-system-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -7468,7 +7455,7 @@ dependencies = [ [[package]] name = "frame-system-rpc-runtime-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "parity-scale-codec", @@ -7478,7 +7465,7 @@ dependencies = [ [[package]] name = "frame-try-runtime" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "parity-scale-codec", @@ -9129,9 +9116,9 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b26c20e2178756451cfeb0661fb74c47dd5988cb7e3939de7e9241fd604d42" +checksum = "e281ae70cc3b98dac15fced3366a880949e65fc66e345ce857a5682d152f3e62" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -9147,9 +9134,9 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bacb85abf4117092455e1573625e21b8f8ef4dec8aff13361140b2dc266cdff2" +checksum = "cc4280b709ac3bb5e16cf3bad5056a0ec8df55fa89edfe996361219aadc2c7ea" dependencies = [ "base64 0.22.1", "futures-channel", @@ -9172,9 +9159,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456196007ca3a14db478346f58c7238028d55ee15c1df15115596e411ff27925" +checksum = "348ee569eaed52926b5e740aae20863762b16596476e943c9e415a6479021622" dependencies = [ "async-trait", "bytes", @@ -9199,9 +9186,9 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c872b6c9961a4ccc543e321bb5b89f6b2d2c7fe8b61906918273a3333c95400c" +checksum = "f50c389d6e6a52eb7c3548a6600c90cf74d9b71cb5912209833f00a5479e9a01" dependencies = [ "async-trait", "base64 0.22.1", @@ -9224,9 +9211,9 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e65763c942dfc9358146571911b0cd1c361c2d63e2d2305622d40d36376ca80" +checksum = "7398cddf5013cca4702862a2692b66c48a3bd6cf6ec681a47453c93d63cf8de5" dependencies = [ "heck 0.5.0", "proc-macro-crate 3.3.0", @@ -9237,9 +9224,9 @@ dependencies = [ [[package]] name = "jsonrpsee-server" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55e363146da18e50ad2b51a0a7925fc423137a0b1371af8235b1c231a0647328" +checksum = "21429bcdda37dcf2d43b68621b994adede0e28061f816b038b0f18c70c143d51" dependencies = [ "futures-util", "http 1.3.1", @@ -9264,9 +9251,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a8e70baf945b6b5752fc8eb38c918a48f1234daf11355e07106d963f860089" +checksum = "b0f05e0028e55b15dbd2107163b3c744cd3bb4474f193f95d9708acbf5677e44" dependencies = [ "http 1.3.1", "serde", @@ -9276,9 +9263,9 @@ dependencies = [ [[package]] name = "jsonrpsee-wasm-client" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6558a9586cad43019dafd0b6311d0938f46efc116b34b28c74778bc11a2edf6" +checksum = "e9d745e4f543fc10fc0e2b11aa1f3be506b1e475d412167e7191a65ecd239f1c" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -9287,9 +9274,9 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01b3323d890aa384f12148e8d2a1fd18eb66e9e7e825f9de4fa53bcc19b93eef" +checksum = "78fc744f17e7926d57f478cf9ca6e1ee5d8332bf0514860b1a3cdf1742e614cc" dependencies = [ "http 1.3.1", "jsonrpsee-client-transport", @@ -9410,9 +9397,9 @@ dependencies = [ [[package]] name = "kvdb-rocksdb" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8beb5ce840610e5a945f0306f6e7a2d5b3e68ea3e64e9a4f081fa4ee5aa6525" +checksum = "3b089b6062662d720a836f055931434439fcd3a90f0059db0b831a99da6db460" dependencies = [ "kvdb", "num_cpus", @@ -11462,7 +11449,7 @@ dependencies = [ [[package]] name = "pallet-alliance" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11481,7 +11468,7 @@ dependencies = [ [[package]] name = "pallet-asset-conversion" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11499,7 +11486,7 @@ dependencies = [ [[package]] name = "pallet-asset-conversion-ops" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11517,7 +11504,7 @@ dependencies = [ [[package]] name = "pallet-asset-conversion-tx-payment" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11532,7 +11519,7 @@ dependencies = [ [[package]] name = "pallet-asset-rate" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11546,7 +11533,7 @@ dependencies = [ [[package]] name = "pallet-asset-rewards" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11564,7 +11551,7 @@ dependencies = [ [[package]] name = "pallet-asset-tx-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11580,7 +11567,7 @@ dependencies = [ [[package]] name = "pallet-assets" version = "29.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11596,7 +11583,7 @@ dependencies = [ [[package]] name = "pallet-assets-freezer" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "pallet-assets", @@ -11608,7 +11595,7 @@ dependencies = [ [[package]] name = "pallet-assets-holder" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11623,7 +11610,7 @@ dependencies = [ [[package]] name = "pallet-assets-precompiles" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "ethereum-standards", "frame-support", @@ -11634,7 +11621,7 @@ dependencies = [ [[package]] name = "pallet-atomic-swap" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -11644,7 +11631,7 @@ dependencies = [ [[package]] name = "pallet-aura" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -11660,7 +11647,7 @@ dependencies = [ [[package]] name = "pallet-authority-discovery" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -11675,7 +11662,7 @@ dependencies = [ [[package]] name = "pallet-authorship" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -11688,7 +11675,7 @@ dependencies = [ [[package]] name = "pallet-babe" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11711,7 +11698,7 @@ dependencies = [ [[package]] name = "pallet-bags-list" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "aquamarine", "docify", @@ -11732,7 +11719,7 @@ dependencies = [ [[package]] name = "pallet-balances" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -11748,7 +11735,7 @@ dependencies = [ [[package]] name = "pallet-beefy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -11767,7 +11754,7 @@ dependencies = [ [[package]] name = "pallet-beefy-mmr" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "binary-merkle-tree", @@ -11792,7 +11779,7 @@ dependencies = [ [[package]] name = "pallet-bounties" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11809,7 +11796,7 @@ dependencies = [ [[package]] name = "pallet-bridge-grandpa" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-runtime", @@ -11828,7 +11815,7 @@ dependencies = [ [[package]] name = "pallet-bridge-messages" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-messages", @@ -11847,7 +11834,7 @@ dependencies = [ [[package]] name = "pallet-bridge-parachains" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-parachains", @@ -11867,7 +11854,7 @@ dependencies = [ [[package]] name = "pallet-bridge-relayers" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-messages", @@ -11890,7 +11877,7 @@ dependencies = [ [[package]] name = "pallet-broker" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitvec", "frame-benchmarking", @@ -11908,7 +11895,7 @@ dependencies = [ [[package]] name = "pallet-child-bounties" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11926,7 +11913,7 @@ dependencies = [ [[package]] name = "pallet-collator-selection" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11945,7 +11932,7 @@ dependencies = [ [[package]] name = "pallet-collective" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -11962,7 +11949,7 @@ dependencies = [ [[package]] name = "pallet-collective-content" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11976,7 +11963,7 @@ dependencies = [ [[package]] name = "pallet-contracts" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "frame-benchmarking", @@ -12006,7 +11993,7 @@ dependencies = [ [[package]] name = "pallet-contracts-mock-network" version = "3.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12037,7 +12024,7 @@ dependencies = [ [[package]] name = "pallet-contracts-proc-macro" version = "18.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro2", "quote", @@ -12047,7 +12034,7 @@ dependencies = [ [[package]] name = "pallet-contracts-uapi" version = "5.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitflags 1.3.2", "parity-scale-codec", @@ -12058,7 +12045,7 @@ dependencies = [ [[package]] name = "pallet-conviction-voting" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "assert_matches", "frame-benchmarking", @@ -12074,7 +12061,7 @@ dependencies = [ [[package]] name = "pallet-core-fellowship" version = "12.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12092,7 +12079,7 @@ dependencies = [ [[package]] name = "pallet-delegated-staking" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12107,7 +12094,7 @@ dependencies = [ [[package]] name = "pallet-democracy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12124,7 +12111,7 @@ dependencies = [ [[package]] name = "pallet-derivatives" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12144,7 +12131,7 @@ dependencies = [ [[package]] name = "pallet-dev-mode" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12159,7 +12146,7 @@ dependencies = [ [[package]] name = "pallet-dummy-dim" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12177,7 +12164,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-multi-block" version = "0.9.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12198,7 +12185,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-multi-phase" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12219,7 +12206,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-support-benchmarking" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12232,7 +12219,7 @@ dependencies = [ [[package]] name = "pallet-elections-phragmen" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12250,7 +12237,7 @@ dependencies = [ [[package]] name = "pallet-fast-unstake" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12268,7 +12255,7 @@ dependencies = [ [[package]] name = "pallet-glutton" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "blake2 0.10.6", "frame-benchmarking", @@ -12286,7 +12273,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12308,7 +12295,7 @@ dependencies = [ [[package]] name = "pallet-identity" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "enumflags2", "frame-benchmarking", @@ -12324,7 +12311,7 @@ dependencies = [ [[package]] name = "pallet-im-online" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12343,7 +12330,7 @@ dependencies = [ [[package]] name = "pallet-indices" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12358,7 +12345,7 @@ dependencies = [ [[package]] name = "pallet-insecure-randomness-collective-flip" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12369,7 +12356,7 @@ dependencies = [ [[package]] name = "pallet-lottery" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12382,7 +12369,7 @@ dependencies = [ [[package]] name = "pallet-membership" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12398,7 +12385,7 @@ dependencies = [ [[package]] name = "pallet-message-queue" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "frame-benchmarking", @@ -12417,7 +12404,7 @@ dependencies = [ [[package]] name = "pallet-meta-tx" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12435,7 +12422,7 @@ dependencies = [ [[package]] name = "pallet-migrations" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12454,7 +12441,7 @@ dependencies = [ [[package]] name = "pallet-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -12468,7 +12455,7 @@ dependencies = [ [[package]] name = "pallet-mmr" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -12480,7 +12467,7 @@ dependencies = [ [[package]] name = "pallet-multi-asset-bounties" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12497,7 +12484,7 @@ dependencies = [ [[package]] name = "pallet-multisig" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -12508,7 +12495,7 @@ dependencies = [ [[package]] name = "pallet-nft-fractionalization" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "pallet-assets", @@ -12521,7 +12508,7 @@ dependencies = [ [[package]] name = "pallet-nfts" version = "22.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "enumflags2", "frame-benchmarking", @@ -12538,7 +12525,7 @@ dependencies = [ [[package]] name = "pallet-nfts-runtime-api" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -12547,7 +12534,7 @@ dependencies = [ [[package]] name = "pallet-nis" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12557,7 +12544,7 @@ dependencies = [ [[package]] name = "pallet-node-authorization" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -12568,7 +12555,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12586,7 +12573,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools-benchmarking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12606,7 +12593,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools-runtime-api" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "pallet-nomination-pools", "parity-scale-codec", @@ -12616,7 +12603,7 @@ dependencies = [ [[package]] name = "pallet-offences" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12631,7 +12618,7 @@ dependencies = [ [[package]] name = "pallet-offences-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12654,7 +12641,7 @@ dependencies = [ [[package]] name = "pallet-oracle" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12672,7 +12659,7 @@ dependencies = [ [[package]] name = "pallet-oracle-runtime-api" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -12683,7 +12670,7 @@ dependencies = [ [[package]] name = "pallet-origin-restriction" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12701,7 +12688,7 @@ dependencies = [ [[package]] name = "pallet-paged-list" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "parity-scale-codec", @@ -12713,7 +12700,7 @@ dependencies = [ [[package]] name = "pallet-parameters" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12730,7 +12717,7 @@ dependencies = [ [[package]] name = "pallet-people" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12748,7 +12735,7 @@ dependencies = [ [[package]] name = "pallet-preimage" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12764,7 +12751,7 @@ dependencies = [ [[package]] name = "pallet-proxy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12774,7 +12761,7 @@ dependencies = [ [[package]] name = "pallet-ranked-collective" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12792,7 +12779,7 @@ dependencies = [ [[package]] name = "pallet-recovery" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12802,7 +12789,7 @@ dependencies = [ [[package]] name = "pallet-referenda" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12819,7 +12806,7 @@ dependencies = [ [[package]] name = "pallet-remark" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12835,7 +12822,7 @@ dependencies = [ [[package]] name = "pallet-revive" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "alloy-consensus", "alloy-core", @@ -12869,6 +12856,7 @@ dependencies = [ "rlp 0.6.1", "scale-info", "serde", + "serde_json", "sp-api", "sp-arithmetic", "sp-consensus-aura", @@ -12885,7 +12873,7 @@ dependencies = [ [[package]] name = "pallet-revive-eth-rpc" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "anyhow", "clap", @@ -12910,6 +12898,7 @@ dependencies = [ "sp-io", "sp-rpc", "sp-runtime", + "sp-timestamp", "sp-weights", "sqlx", "substrate-prometheus-endpoint", @@ -12922,7 +12911,7 @@ dependencies = [ [[package]] name = "pallet-revive-fixtures" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "alloy-core", "anyhow", @@ -12939,7 +12928,7 @@ dependencies = [ [[package]] name = "pallet-revive-proc-macro" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro2", "quote", @@ -12949,7 +12938,7 @@ dependencies = [ [[package]] name = "pallet-revive-uapi" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "alloy-core", "bitflags 1.3.2", @@ -12964,7 +12953,7 @@ dependencies = [ [[package]] name = "pallet-root-offences" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12980,7 +12969,7 @@ dependencies = [ [[package]] name = "pallet-root-testing" version = "4.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12993,7 +12982,7 @@ dependencies = [ [[package]] name = "pallet-safe-mode" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "pallet-balances", @@ -13007,7 +12996,7 @@ dependencies = [ [[package]] name = "pallet-salary" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "pallet-ranked-collective", @@ -13019,7 +13008,7 @@ dependencies = [ [[package]] name = "pallet-scheduler" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -13036,7 +13025,7 @@ dependencies = [ [[package]] name = "pallet-scored-pool" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13049,7 +13038,7 @@ dependencies = [ [[package]] name = "pallet-session" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13071,7 +13060,7 @@ dependencies = [ [[package]] name = "pallet-session-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13087,7 +13076,7 @@ dependencies = [ [[package]] name = "pallet-skip-feeless-payment" version = "3.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13099,7 +13088,7 @@ dependencies = [ [[package]] name = "pallet-society" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13116,7 +13105,7 @@ dependencies = [ [[package]] name = "pallet-staking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -13137,7 +13126,7 @@ dependencies = [ [[package]] name = "pallet-staking-async" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -13161,7 +13150,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-ah-client" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13181,7 +13170,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-rc-client" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13198,7 +13187,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-reward-fn" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "sp-arithmetic", @@ -13207,7 +13196,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-runtime-api" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -13217,7 +13206,7 @@ dependencies = [ [[package]] name = "pallet-staking-reward-fn" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "sp-arithmetic", @@ -13226,7 +13215,7 @@ dependencies = [ [[package]] name = "pallet-staking-runtime-api" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -13236,7 +13225,7 @@ dependencies = [ [[package]] name = "pallet-state-trie-migration" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13252,7 +13241,7 @@ dependencies = [ [[package]] name = "pallet-statement" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13269,7 +13258,7 @@ dependencies = [ [[package]] name = "pallet-sudo" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -13284,7 +13273,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -13302,7 +13291,7 @@ dependencies = [ [[package]] name = "pallet-tips" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13320,7 +13309,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13336,7 +13325,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -13348,7 +13337,7 @@ dependencies = [ [[package]] name = "pallet-transaction-storage" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13367,7 +13356,7 @@ dependencies = [ [[package]] name = "pallet-treasury" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -13386,7 +13375,7 @@ dependencies = [ [[package]] name = "pallet-tx-pause" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "parity-scale-codec", @@ -13397,7 +13386,7 @@ dependencies = [ [[package]] name = "pallet-uniques" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13411,7 +13400,7 @@ dependencies = [ [[package]] name = "pallet-utility" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13426,7 +13415,7 @@ dependencies = [ [[package]] name = "pallet-verify-signature" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13441,7 +13430,7 @@ dependencies = [ [[package]] name = "pallet-vesting" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13455,7 +13444,7 @@ dependencies = [ [[package]] name = "pallet-whitelist" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -13465,7 +13454,7 @@ dependencies = [ [[package]] name = "pallet-xcm" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bounded-collections 0.3.2", "frame-benchmarking", @@ -13489,7 +13478,7 @@ dependencies = [ [[package]] name = "pallet-xcm-benchmarks" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13506,7 +13495,7 @@ dependencies = [ [[package]] name = "pallet-xcm-bridge-hub" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-messages", "bp-runtime", @@ -13528,7 +13517,7 @@ dependencies = [ [[package]] name = "pallet-xcm-bridge-hub-router" version = "0.5.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-xcm-bridge-hub-router", "frame-benchmarking", @@ -13548,7 +13537,7 @@ dependencies = [ [[package]] name = "pallet-xcm-precompiles" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "pallet-revive", @@ -13562,7 +13551,7 @@ dependencies = [ [[package]] name = "parachains-common" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "cumulus-primitives-utility", @@ -13593,7 +13582,7 @@ dependencies = [ [[package]] name = "parachains-runtimes-test-utils" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-parachain-system", "cumulus-pallet-xcmp-queue", @@ -14048,7 +14037,7 @@ dependencies = [ [[package]] name = "polkadot-core-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -14059,7 +14048,7 @@ dependencies = [ [[package]] name = "polkadot-parachain-primitives" version = "6.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "bounded-collections 0.3.2", @@ -14076,7 +14065,7 @@ dependencies = [ [[package]] name = "polkadot-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitvec", "bounded-collections 0.3.2", @@ -14105,7 +14094,7 @@ dependencies = [ [[package]] name = "polkadot-runtime-common" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitvec", "frame-benchmarking", @@ -14154,7 +14143,7 @@ dependencies = [ [[package]] name = "polkadot-runtime-metrics" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bs58", "frame-benchmarking", @@ -14166,7 +14155,7 @@ dependencies = [ [[package]] name = "polkadot-runtime-parachains" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitflags 1.3.2", "bitvec", @@ -14213,7 +14202,7 @@ dependencies = [ [[package]] name = "polkadot-sdk" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "asset-test-utils", "assets-common", @@ -14343,6 +14332,7 @@ dependencies = [ "pallet-referenda", "pallet-remark", "pallet-revive", + "pallet-revive-uapi", "pallet-root-offences", "pallet-root-testing", "pallet-safe-mode", @@ -14488,7 +14478,7 @@ dependencies = [ [[package]] name = "polkadot-sdk-frame" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -15706,7 +15696,7 @@ dependencies = [ [[package]] name = "revive-dev-runtime" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "parity-scale-codec", @@ -16064,7 +16054,7 @@ dependencies = [ [[package]] name = "rococo-runtime-constants" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "polkadot-primitives", @@ -16577,7 +16567,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "sp-core", @@ -16588,7 +16578,7 @@ dependencies = [ [[package]] name = "sc-basic-authorship" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "futures", "log", @@ -16610,7 +16600,7 @@ dependencies = [ [[package]] name = "sc-block-builder" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -16625,7 +16615,7 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "docify", @@ -16651,7 +16641,7 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -16662,7 +16652,7 @@ dependencies = [ [[package]] name = "sc-cli" version = "0.36.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "bip39", @@ -16704,7 +16694,7 @@ dependencies = [ [[package]] name = "sc-client-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "fnv", "futures", @@ -16730,7 +16720,7 @@ dependencies = [ [[package]] name = "sc-client-db" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "hash-db", "kvdb", @@ -16758,7 +16748,7 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -16781,7 +16771,7 @@ dependencies = [ [[package]] name = "sc-consensus-aura" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "fork-tree", @@ -16812,7 +16802,7 @@ dependencies = [ [[package]] name = "sc-consensus-babe" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "fork-tree", @@ -16849,7 +16839,7 @@ dependencies = [ [[package]] name = "sc-consensus-epochs" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "fork-tree", "parity-scale-codec", @@ -16862,7 +16852,7 @@ dependencies = [ [[package]] name = "sc-consensus-manual-seal" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "assert_matches", "async-trait", @@ -16897,7 +16887,7 @@ dependencies = [ [[package]] name = "sc-consensus-slots" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -16920,7 +16910,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "parking_lot 0.12.4", @@ -16943,7 +16933,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "polkavm 0.26.0", "sc-allocator", @@ -16956,7 +16946,7 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "polkavm 0.26.0", @@ -16967,7 +16957,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "anyhow", "log", @@ -16983,7 +16973,7 @@ dependencies = [ [[package]] name = "sc-informant" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "console", "futures", @@ -16999,7 +16989,7 @@ dependencies = [ [[package]] name = "sc-keystore" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "parking_lot 0.12.4", @@ -17013,7 +17003,7 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "arrayvec 0.7.6", @@ -17041,7 +17031,7 @@ dependencies = [ [[package]] name = "sc-network" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -17091,7 +17081,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitflags 1.3.2", "parity-scale-codec", @@ -17101,7 +17091,7 @@ dependencies = [ [[package]] name = "sc-network-light" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -17122,7 +17112,7 @@ dependencies = [ [[package]] name = "sc-network-sync" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -17157,7 +17147,7 @@ dependencies = [ [[package]] name = "sc-network-transactions" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "futures", @@ -17176,7 +17166,7 @@ dependencies = [ [[package]] name = "sc-network-types" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bs58", "bytes", @@ -17197,7 +17187,7 @@ dependencies = [ [[package]] name = "sc-proposer-metrics" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "substrate-prometheus-endpoint", @@ -17206,7 +17196,7 @@ dependencies = [ [[package]] name = "sc-rpc" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "futures", "jsonrpsee", @@ -17238,7 +17228,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "jsonrpsee", "parity-scale-codec", @@ -17258,7 +17248,7 @@ dependencies = [ [[package]] name = "sc-rpc-server" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "dyn-clone", "forwarded-header-value", @@ -17282,7 +17272,7 @@ dependencies = [ [[package]] name = "sc-rpc-spec-v2" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "futures", @@ -17315,7 +17305,7 @@ dependencies = [ [[package]] name = "sc-runtime-utilities" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sc-executor", @@ -17330,7 +17320,7 @@ dependencies = [ [[package]] name = "sc-service" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "directories", @@ -17394,7 +17384,7 @@ dependencies = [ [[package]] name = "sc-state-db" version = "0.30.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -17405,7 +17395,7 @@ dependencies = [ [[package]] name = "sc-sysinfo" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "derive_more 0.99.20", "futures", @@ -17425,7 +17415,7 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "chrono", "futures", @@ -17444,7 +17434,7 @@ dependencies = [ [[package]] name = "sc-tracing" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "chrono", "console", @@ -17472,7 +17462,7 @@ dependencies = [ [[package]] name = "sc-tracing-proc-macro" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -17483,7 +17473,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -17514,7 +17504,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -17531,7 +17521,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-channel 1.9.0", "futures", @@ -18436,7 +18426,7 @@ checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7" [[package]] name = "slot-range-helper" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "enumn", "parity-scale-codec", @@ -18618,7 +18608,7 @@ dependencies = [ [[package]] name = "snowbridge-core" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-relayers", "frame-support", @@ -18853,7 +18843,7 @@ dependencies = [ [[package]] name = "sp-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "hash-db", @@ -18875,7 +18865,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "Inflector", "blake2 0.10.6", @@ -18889,7 +18879,7 @@ dependencies = [ [[package]] name = "sp-application-crypto" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -18901,7 +18891,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "integer-sqrt", @@ -18915,7 +18905,7 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -18927,7 +18917,7 @@ dependencies = [ [[package]] name = "sp-block-builder" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-api", "sp-inherents", @@ -18937,7 +18927,7 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "futures", "parity-scale-codec", @@ -18956,7 +18946,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -18970,7 +18960,7 @@ dependencies = [ [[package]] name = "sp-consensus-aura" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "parity-scale-codec", @@ -18986,7 +18976,7 @@ dependencies = [ [[package]] name = "sp-consensus-babe" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "parity-scale-codec", @@ -19004,7 +18994,7 @@ dependencies = [ [[package]] name = "sp-consensus-beefy" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19024,7 +19014,7 @@ dependencies = [ [[package]] name = "sp-consensus-grandpa" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "finality-grandpa", "log", @@ -19041,7 +19031,7 @@ dependencies = [ [[package]] name = "sp-consensus-pow" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -19052,7 +19042,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19063,7 +19053,7 @@ dependencies = [ [[package]] name = "sp-core" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "ark-vrf", "array-bytes", @@ -19110,7 +19100,7 @@ dependencies = [ [[package]] name = "sp-core-hashing" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] @@ -19118,7 +19108,7 @@ dependencies = [ [[package]] name = "sp-core-hashing-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-crypto-hashing-proc-macro", ] @@ -19126,20 +19116,20 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ - "ark-bls12-377", + "ark-bls12-377 0.5.0", "ark-bls12-377-ext", - "ark-bls12-381 0.4.0", + "ark-bls12-381 0.5.0", "ark-bls12-381-ext", "ark-bw6-761", "ark-bw6-761-ext", - "ark-ec 0.4.2", + "ark-ec 0.5.0", "ark-ed-on-bls12-377", "ark-ed-on-bls12-377-ext", - "ark-ed-on-bls12-381-bandersnatch 0.4.0", + "ark-ed-on-bls12-381-bandersnatch", "ark-ed-on-bls12-381-bandersnatch-ext", - "ark-scale 0.0.12", + "ark-scale", "sp-runtime-interface", ] @@ -19160,7 +19150,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "blake2b_simd", "byteorder", @@ -19173,7 +19163,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "quote", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", @@ -19183,7 +19173,7 @@ dependencies = [ [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "kvdb", "parking_lot 0.12.4", @@ -19192,7 +19182,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro2", "quote", @@ -19202,7 +19192,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "parity-scale-codec", @@ -19212,7 +19202,7 @@ dependencies = [ [[package]] name = "sp-genesis-builder" version = "0.8.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19224,7 +19214,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -19237,7 +19227,7 @@ dependencies = [ [[package]] name = "sp-io" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bytes", "docify", @@ -19263,7 +19253,7 @@ dependencies = [ [[package]] name = "sp-keyring" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-core", "sp-runtime", @@ -19273,7 +19263,7 @@ dependencies = [ [[package]] name = "sp-keystore" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "parking_lot 0.12.4", @@ -19284,7 +19274,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "thiserror 1.0.69", "zstd 0.12.4", @@ -19293,7 +19283,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-metadata", "parity-scale-codec", @@ -19303,7 +19293,7 @@ dependencies = [ [[package]] name = "sp-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19314,7 +19304,7 @@ dependencies = [ [[package]] name = "sp-mmr-primitives" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -19331,7 +19321,7 @@ dependencies = [ [[package]] name = "sp-npos-elections" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19344,7 +19334,7 @@ dependencies = [ [[package]] name = "sp-offchain" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-api", "sp-core", @@ -19354,7 +19344,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "backtrace", "regex", @@ -19363,7 +19353,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "rustc-hash 1.1.0", "serde", @@ -19373,7 +19363,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "binary-merkle-tree", "bytes", @@ -19403,7 +19393,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -19421,7 +19411,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "Inflector", "expander", @@ -19434,7 +19424,7 @@ dependencies = [ [[package]] name = "sp-session" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19448,7 +19438,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -19461,7 +19451,7 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "hash-db", "log", @@ -19481,7 +19471,7 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "aes-gcm", "curve25519-dalek", @@ -19505,12 +19495,12 @@ dependencies = [ [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "impl-serde", "parity-scale-codec", @@ -19522,7 +19512,7 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "parity-scale-codec", @@ -19534,7 +19524,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "regex", @@ -19546,7 +19536,7 @@ dependencies = [ [[package]] name = "sp-transaction-pool" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-api", "sp-runtime", @@ -19555,7 +19545,7 @@ dependencies = [ [[package]] name = "sp-transaction-storage-proof" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "parity-scale-codec", @@ -19569,7 +19559,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "ahash", "foldhash 0.1.5", @@ -19594,7 +19584,7 @@ dependencies = [ [[package]] name = "sp-version" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "impl-serde", "parity-scale-codec", @@ -19611,7 +19601,7 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "proc-macro-warning", @@ -19623,7 +19613,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -19635,7 +19625,7 @@ dependencies = [ [[package]] name = "sp-weights" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bounded-collections 0.3.2", "parity-scale-codec", @@ -19915,7 +19905,7 @@ dependencies = [ [[package]] name = "staging-parachain-info" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -19928,7 +19918,7 @@ dependencies = [ [[package]] name = "staging-xcm" version = "7.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "bounded-collections 0.3.2", @@ -19949,7 +19939,7 @@ dependencies = [ [[package]] name = "staging-xcm-builder" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "frame-support", @@ -19973,7 +19963,7 @@ dependencies = [ [[package]] name = "staging-xcm-executor" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "frame-benchmarking", @@ -20157,7 +20147,7 @@ dependencies = [ [[package]] name = "substrate-bip39" version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "hmac 0.12.1", "pbkdf2 0.12.2", @@ -20182,7 +20172,7 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-support" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "jsonrpsee", @@ -20196,7 +20186,7 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-system-rpc-runtime-api", @@ -20216,7 +20206,7 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "http-body-util", "hyper 1.6.0", @@ -20230,7 +20220,7 @@ dependencies = [ [[package]] name = "substrate-rpc-client" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "jsonrpsee", @@ -20254,7 +20244,7 @@ dependencies = [ [[package]] name = "substrate-wasm-builder" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "build-helper", "cargo_metadata 0.15.4", @@ -20793,7 +20783,7 @@ checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" [[package]] name = "testnet-parachains-constants" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -21857,7 +21847,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "225eaa083192400abfe78838e3089c539a361e0dd9b6884f61b5c6237676ec01" dependencies = [ - "ark-scale 0.0.13", + "ark-scale", "ark-serialize 0.5.0", "ark-vrf", "bounded-collections 0.1.9", @@ -21900,7 +21890,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6bfb937b3d12077654a9e43e32a4e9c20177dd9fea0f3aba673e7840bb54f32" dependencies = [ - "ark-bls12-377", + "ark-bls12-377 0.4.0", "ark-bls12-381 0.4.0", "ark-ec 0.4.2", "ark-ff 0.4.2", @@ -22666,7 +22656,7 @@ dependencies = [ [[package]] name = "westend-runtime-constants" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "polkadot-primitives", @@ -23347,7 +23337,7 @@ dependencies = [ [[package]] name = "xcm-procedural" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "Inflector", "proc-macro2", @@ -23358,7 +23348,7 @@ dependencies = [ [[package]] name = "xcm-runtime-apis" version = "0.1.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "parity-scale-codec", @@ -23372,7 +23362,7 @@ dependencies = [ [[package]] name = "xcm-simulator" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", diff --git a/Cargo.toml b/Cargo.toml index 7577bbb33cb6d..35bca7410ddae 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -216,6 +216,17 @@ revive-env = { path = "crates/revive-env" } revive-strategy = { path = "crates/revive-strategy" } revive-utils = { path = "crates/revive-utils" } +# polkadot-sdk +polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch = "master", features = [ + "experimental", + "runtime", + "polkadot-runtime-common", + "pallet-revive", + "pallet-balances", + "pallet-timestamp", + "pallet-revive-uapi", +]} + # solc & compilation utilities foundry-block-explorers = { version = "0.20.0", default-features = false } foundry-compilers = { version = "0.18.2", default-features = false } diff --git a/crates/anvil-polkadot/Cargo.toml b/crates/anvil-polkadot/Cargo.toml index cc845db6cf96e..c1abe8416719b 100644 --- a/crates/anvil-polkadot/Cargo.toml +++ b/crates/anvil-polkadot/Cargo.toml @@ -142,7 +142,7 @@ clap_complete_fig = "4" subxt = "0.43.0" subxt-signer = "0.43.0" tokio-stream = "0.1.17" -jsonrpsee = "0.24.9" +jsonrpsee = "0.24.10" sqlx = "0.8.6" revm.workspace = true diff --git a/crates/anvil-polkadot/src/api_server/mod.rs b/crates/anvil-polkadot/src/api_server/mod.rs index 38c4ac053fdaa..bda613cddce4f 100644 --- a/crates/anvil-polkadot/src/api_server/mod.rs +++ b/crates/anvil-polkadot/src/api_server/mod.rs @@ -15,6 +15,9 @@ pub mod error; pub mod revive_conversions; mod server; mod signer; +mod txpool_helpers; + +pub use txpool_helpers::TxpoolTransactionInfo; pub type ApiHandle = mpsc::Sender; diff --git a/crates/anvil-polkadot/src/api_server/server.rs b/crates/anvil-polkadot/src/api_server/server.rs index 59ae02862b794..1198dbe7ece09 100644 --- a/crates/anvil-polkadot/src/api_server/server.rs +++ b/crates/anvil-polkadot/src/api_server/server.rs @@ -7,17 +7,22 @@ use crate::{ ReviveFilter, SubstrateU256, convert_to_generic_transaction, }, signer::DevSigner, + txpool_helpers::{ + TxpoolTransactionInfo, extract_sender, extract_tx_info, extract_tx_summary, + transaction_matches_eth_hash, + }, }, logging::LoggingManager, macros::node_info, substrate_node::{ + host::recover_maybe_impersonated_address, impersonation::ImpersonationManager, in_mem_rpc::InMemoryRpcClient, mining_engine::MiningEngine, service::{ BackendError, BackendWithOverlay, Client, Service, TransactionPoolHandle, storage::{ - AccountType, ByteCodeType, CodeInfo, ContractInfo, ReviveAccountInfo, + AccountType, BytecodeType, CodeInfo, ContractInfo, ReviveAccountInfo, SystemAccountInfo, }, }, @@ -30,26 +35,29 @@ use alloy_primitives::{Address, B256, U64, U256}; use alloy_rpc_types::{ Filter, TransactionRequest, anvil::{Metadata as AnvilMetadata, MineOptions, NodeEnvironment, NodeInfo}, - txpool::TxpoolStatus, + txpool::{TxpoolContent, TxpoolInspect, TxpoolStatus}, }; use alloy_serde::WithOtherFields; use alloy_trie::{EMPTY_ROOT_HASH, KECCAK_EMPTY, TrieAccount}; use anvil_core::eth::{EthRequest, Params as MineParams}; use anvil_rpc::response::ResponseResult; -use codec::{Decode, DecodeLimit, Encode}; +use chrono::{DateTime, Datelike, Utc}; +use codec::{Decode, Encode}; use futures::{StreamExt, channel::mpsc}; use indexmap::IndexMap; use pallet_revive_eth_rpc::{ BlockInfoProvider, EthRpcError, ReceiptExtractor, ReceiptProvider, SubxtBlockInfoProvider, client::{Client as EthRpcClient, ClientError, SubscriptionType}, - subxt_client::{self, SrcChainConfig}, + subxt_client::{ + self, SrcChainConfig, runtime_types::bounded_collections::bounded_vec::BoundedVec, + }, }; use polkadot_sdk::{ pallet_revive::{ ReviveApi, evm::{ - Block, Bytes, FeeHistoryResult, FilterResults, ReceiptInfo, TransactionInfo, - TransactionSigned, + Block, BlockNumberOrTagOrHash, BlockTag, Bytes, FeeHistoryResult, FilterResults, + ReceiptInfo, TransactionInfo, TransactionSigned, }, }, parachains_common::{AccountId, Hash, Nonce}, @@ -57,15 +65,14 @@ use polkadot_sdk::{ sc_client_api::HeaderBackend, sc_service::{InPoolTransaction, SpawnTaskHandle, TransactionPool}, sp_api::{Metadata as _, ProvideRuntimeApi}, - sp_arithmetic::Permill, sp_blockchain::Info, sp_core::{self, Hasher, keccak_256}, - sp_runtime::traits::BlakeTwo256, + sp_runtime::{FixedU128, traits::BlakeTwo256}, }; use revm::primitives::hardfork::SpecId; use sqlx::sqlite::SqlitePoolOptions; use std::{collections::HashSet, sync::Arc, time::Duration}; -use substrate_runtime::{Balance, RuntimeCall, UncheckedExtrinsic}; +use substrate_runtime::{Balance, constants::NATIVE_TO_ETH_RATIO}; use subxt::{ Metadata as SubxtMetadata, OnlineClient, backend::rpc::RpcClient, client::RuntimeVersion as SubxtRuntimeVersion, config::substrate::H256, @@ -75,7 +82,6 @@ use subxt_signer::eth::Keypair; use tokio::try_join; pub const CLIENT_VERSION: &str = concat!("anvil-polkadot/v", env!("CARGO_PKG_VERSION")); -const MAX_EXTRINSIC_DEPTH: u32 = 256; pub struct ApiServer { eth_rpc_client: EthRpcClient, @@ -113,6 +119,7 @@ impl ApiServer { substrate_service.spawn_handle.clone(), ) .await?; + Ok(Self { block_provider, req_receiver, @@ -143,6 +150,17 @@ impl ApiServer { pub async fn execute(&mut self, req: EthRequest) -> ResponseResult { let res = match req.clone() { EthRequest::SetLogging(enabled) => self.set_logging(enabled).to_rpc_result(), + //------- Gas ----------- + EthRequest::SetNextBlockBaseFeePerGas(base_fee) => { + let latest_block = self.latest_block(); + // We inject in substrate storage an 1e18 denominated value after transforming it + // to a 1e12. + self.backend.inject_next_fee_multiplier( + latest_block, + FixedU128::from_rational(base_fee.to::(), NATIVE_TO_ETH_RATIO.into()), + ); + Ok(()).to_rpc_result() + } //------- Mining--------- EthRequest::Mine(blocks, interval) => self.mine(blocks, interval).await.to_rpc_result(), @@ -339,18 +357,18 @@ impl ApiServer { self.get_account_info(addr, block).await.to_rpc_result() } //------- Transaction Pool --------- - EthRequest::TxPoolStatus(_) => { - node_info!("txpool_status"); - self.txpool_status().await.to_rpc_result() - } + EthRequest::TxPoolStatus(_) => self.txpool_status().await.to_rpc_result(), + EthRequest::TxPoolInspect(_) => self.txpool_inspect().await.to_rpc_result(), + EthRequest::TxPoolContent(_) => self.txpool_content().await.to_rpc_result(), EthRequest::DropAllTransactions() => { - node_info!("anvil_dropAllTransactions"); self.anvil_drop_all_transactions().await.to_rpc_result() } EthRequest::DropTransaction(eth_hash) => { - node_info!("anvil_dropTransaction"); self.anvil_drop_transaction(eth_hash).await.to_rpc_result() } + EthRequest::RemovePoolTransactions(address) => { + self.anvil_remove_pool_transactions(address).await.to_rpc_result() + } // --- Metadata --- EthRequest::NodeInfo(_) => self.anvil_node_info().await.to_rpc_result(), EthRequest::AnvilMetadata(_) => self.anvil_metadata().await.to_rpc_result(), @@ -385,10 +403,17 @@ impl ApiServer { "The interval between blocks is too large".to_string(), )); } - self.mining_engine + + // Subscribe to new best blocks. + let receiver = self.eth_rpc_client.block_notifier().map(|sender| sender.subscribe()); + + let awaited_hash = self + .mining_engine .mine(blocks.map(|b| b.to()), interval.map(|i| Duration::from_secs(i.to()))) .await - .map_err(Error::Mining) + .map_err(Error::Mining)?; + self.wait_for_hash(receiver, awaited_hash).await?; + Ok(()) } fn set_interval_mining(&self, interval: u64) -> Result<()> { @@ -420,7 +445,10 @@ impl ApiServer { async fn evm_mine(&self, mine: Option>>) -> Result { node_info!("evm_mine"); - self.mining_engine.evm_mine(mine.and_then(|p| p.params)).await?; + // Subscribe to new best blocks. + let receiver = self.eth_rpc_client.block_notifier().map(|sender| sender.subscribe()); + let awaited_hash = self.mining_engine.evm_mine(mine.and_then(|p| p.params)).await?; + self.wait_for_hash(receiver, awaited_hash).await?; Ok("0x0".to_string()) } @@ -429,7 +457,15 @@ impl ApiServer { mine: Option>>, ) -> Result> { node_info!("evm_mine_detailed"); - let mined_blocks = self.mining_engine.do_evm_mine(mine.and_then(|p| p.params)).await?; + + // Subscribe to new best blocks. + let receiver = self.eth_rpc_client.block_notifier().map(|sender| sender.subscribe()); + + let (mined_blocks, awaited_hash) = + self.mining_engine.do_evm_mine(mine.and_then(|p| p.params)).await?; + + self.wait_for_hash(receiver, awaited_hash).await?; + let mut blocks = Vec::with_capacity(mined_blocks as usize); let last_block = self.client.info().best_number as u64; let starting = last_block - mined_blocks + 1; @@ -560,13 +596,21 @@ impl ApiServer { addr: Address, slot: U256, block: Option, - ) -> Result { + ) -> Result { node_info!("eth_getStorageAt"); let hash = self.get_block_hash_for_tag(block).await?; let runtime_api = self.eth_rpc_client.runtime_api(hash); - let bytes = - runtime_api.get_storage(ReviveAddress::from(addr).inner(), slot.to_be_bytes()).await?; - Ok(bytes.unwrap_or_default().into()) + let bytes: B256 = match runtime_api + .get_storage(ReviveAddress::from(addr).inner(), slot.to_be_bytes()) + .await + { + Ok(Some(bytes)) => bytes.as_slice().try_into().map_err(|_| { + Error::InternalError("Unable to convert value to 32-byte value".to_string()) + })?, + Ok(None) | Err(ClientError::ContractNotFound) => Default::default(), + Err(err) => return Err(Error::ReviveRpc(EthRpcError::ClientError(err))), + }; + Ok(bytes) } async fn get_code(&self, address: Address, block: Option) -> Result { @@ -593,34 +637,43 @@ impl ApiServer { return Ok(None); }; let block = self.eth_rpc_client.evm_block(block, hydrated_transactions).await; - Ok(Some(block)) + Ok(block) } async fn estimate_gas( &self, request: WithOtherFields, - block: Option, + block: Option, ) -> Result { node_info!("eth_estimateGas"); let hash = self.get_block_hash_for_tag(block).await?; let runtime_api = self.eth_rpc_client.runtime_api(hash); - let dry_run = - runtime_api.dry_run(convert_to_generic_transaction(request.into_inner())).await?; + let dry_run = runtime_api + .dry_run( + convert_to_generic_transaction(request.into_inner()), + ReviveBlockId::from(block).inner(), + ) + .await?; Ok(dry_run.eth_gas) } async fn call( &self, request: WithOtherFields, - block: Option, + block: Option, ) -> Result { node_info!("eth_call"); let hash = self.get_block_hash_for_tag(block).await?; + let runtime_api = self.eth_rpc_client.runtime_api(hash); - let dry_run = - runtime_api.dry_run(convert_to_generic_transaction(request.into_inner())).await?; + let dry_run = runtime_api + .dry_run( + convert_to_generic_transaction(request.into_inner()), + ReviveBlockId::from(block).inner(), + ) + .await?; Ok(dry_run.data.into()) } @@ -686,9 +739,11 @@ impl ApiServer { if transaction.gas_price.is_none() { transaction.gas_price = Some(self.gas_price().await?); } + if transaction.nonce.is_none() { transaction.nonce = Some(self.get_transaction_count(from, latest_block_id).await?); } + if transaction.chain_id.is_none() { transaction.chain_id = Some(sp_core::U256::from_big_endian(&self.chain_id(latest_block).to_be_bytes())); @@ -726,7 +781,7 @@ impl ApiServer { return Ok(None); }; let block = self.eth_rpc_client.evm_block(block, hydrated_transactions).await; - Ok(Some(block)) + Ok(block) } pub(crate) async fn snapshot(&mut self) -> Result { @@ -831,10 +886,13 @@ impl ApiServer { &self, block_number: BlockNumberOrTag, ) -> Result> { - let Some(block) = self.get_block_by_number(block_number, false).await? else { + let Some(hash) = + self.maybe_get_block_hash_for_tag(Some(BlockId::Number(block_number))).await? + else { return Ok(None); }; - Ok(self.eth_rpc_client.receipts_count_per_block(&block.hash).await.map(U256::from)) + + Ok(self.eth_rpc_client.receipts_count_per_block(&hash).await.map(U256::from)) } async fn get_transaction_by_block_hash_and_index( @@ -914,8 +972,9 @@ impl ApiServer { } async fn max_priority_fee_per_gas(&self) -> Result { - let gas_price = self.gas_price().await?; - Ok(Permill::from_percent(20).mul_ceil(gas_price)) + // We do not support tips. Hence the recommended priority fee is + // always zero. The effective gas price will always be the base price. + Ok(Default::default()) } pub fn accounts(&self) -> Result> { @@ -990,15 +1049,58 @@ impl ApiServer { let latest_block = self.latest_block(); - let Some(ReviveAccountInfo { account_type: AccountType::Contract(contract_info), .. }) = - self.backend.read_revive_account_info(latest_block, address)? - else { - return Ok(()); + let account_id = self.get_account_id(latest_block, address)?; + + let maybe_system_account_info = + self.backend.read_system_account_info(latest_block, account_id.clone())?; + let nonce = maybe_system_account_info.as_ref().map(|info| info.nonce).unwrap_or_default(); + + if maybe_system_account_info.is_none() { + self.set_frame_system_balance( + latest_block, + account_id, + substrate_runtime::currency::DOLLARS, + )?; + } + + let trie_id = match self.backend.read_revive_account_info(latest_block, address)? { + // If the account doesn't exist, create one. + None => { + let contract_info = new_contract_info(&address, (*KECCAK_EMPTY).into(), nonce); + let trie_id = contract_info.trie_id.0.clone(); + + self.backend.inject_revive_account_info( + latest_block, + address, + ReviveAccountInfo { + account_type: AccountType::Contract(contract_info), + dust: 0, + }, + ); + + trie_id + } + // If the account is not already a contract account, make it one. + Some(ReviveAccountInfo { account_type: AccountType::EOA, dust }) => { + let contract_info = new_contract_info(&address, (*KECCAK_EMPTY).into(), nonce); + let trie_id = contract_info.trie_id.0.clone(); + + self.backend.inject_revive_account_info( + latest_block, + address, + ReviveAccountInfo { account_type: AccountType::Contract(contract_info), dust }, + ); + + trie_id + } + Some(ReviveAccountInfo { + account_type: AccountType::Contract(contract_info), .. + }) => contract_info.trie_id.0, }; self.backend.inject_child_storage( latest_block, - contract_info.trie_id.to_vec(), + trie_id, key.to_be_bytes_vec(), value.to_vec(), ); @@ -1073,7 +1175,7 @@ impl ApiServer { let code_info = old_code_info .map(|mut code_info| { code_info.code_len = bytes.len() as u32; - code_info.code_type = ByteCodeType::Evm; + code_info.code_type = BytecodeType::Evm; code_info }) .unwrap_or_else(|| CodeInfo { @@ -1082,7 +1184,7 @@ impl ApiServer { refcount: 1, code_len: bytes.len() as u32, behaviour_version: 0, - code_type: ByteCodeType::Evm, + code_type: BytecodeType::Evm, }); self.backend.inject_pristine_code(latest_block, code_hash, Some(bytes)); @@ -1186,21 +1288,38 @@ impl ApiServer { self.update_block_provider_on_revert(&revert_info.info).await?; } - let hash = self - .get_block_hash_for_tag(Some(BlockId::Number(BlockNumberOrTag::Number( - revert_info.info.best_number.into(), - )))) - .await?; - self.update_time_on_revert(hash).await?; + self.update_time_on_revert(revert_info.info.best_hash).await?; Ok(()) } + async fn maybe_get_block_hash_for_tag( + &self, + block_id: Option, + ) -> Result> { + match ReviveBlockId::from(block_id).inner() { + BlockNumberOrTagOrHash::BlockHash(hash) => Ok(Some(hash)), + BlockNumberOrTagOrHash::BlockNumber(block_number) => { + let n = block_number.try_into().map_err(|_| { + Error::InvalidParams("Block number conversion failed".to_string()) + })?; + Ok(self.eth_rpc_client.get_block_hash(n).await?) + } + BlockNumberOrTagOrHash::BlockTag(BlockTag::Finalized | BlockTag::Safe) => { + let block = self.eth_rpc_client.latest_finalized_block().await; + Ok(Some(block.hash())) + } + BlockNumberOrTagOrHash::BlockTag(_) => { + let block = self.eth_rpc_client.latest_block().await; + Ok(Some(block.hash())) + } + } + } + async fn get_block_hash_for_tag(&self, block_id: Option) -> Result { - self.eth_rpc_client - .block_hash_for_tag(ReviveBlockId::from(block_id).inner()) - .await - .map_err(Error::from) + self.maybe_get_block_hash_for_tag(block_id) + .await? + .ok_or(Error::InvalidParams("Block number not found".to_string())) } fn get_account_id(&self, block: Hash, address: Address) -> Result { @@ -1247,12 +1366,58 @@ impl ApiServer { /// Returns transaction pool status async fn txpool_status(&self) -> Result { + node_info!("txpool_status"); let pool_status = self.tx_pool.status(); Ok(TxpoolStatus { pending: pool_status.ready as u64, queued: pool_status.future as u64 }) } + /// Returns a summary of all transactions in the pool + async fn txpool_inspect(&self) -> Result { + node_info!("txpool_inspect"); + let mut inspect = TxpoolInspect::default(); + + for tx in self.tx_pool.ready() { + if let Some((sender, nonce, summary)) = extract_tx_summary(tx.data()) { + let entry = inspect.pending.entry(sender).or_default(); + entry.insert(nonce.to_string(), summary); + } + } + + for tx in self.tx_pool.futures() { + if let Some((sender, nonce, summary)) = extract_tx_summary(tx.data()) { + let entry = inspect.queued.entry(sender).or_default(); + entry.insert(nonce.to_string(), summary); + } + } + + Ok(inspect) + } + + /// Returns full transaction details for all transactions in the pool + async fn txpool_content(&self) -> Result> { + node_info!("txpool_content"); + let mut content = TxpoolContent::default(); + + for tx in self.tx_pool.ready() { + if let Some((sender, nonce, tx_info)) = extract_tx_info(tx.data()) { + let entry = content.pending.entry(sender).or_default(); + entry.insert(nonce.to_string(), tx_info); + } + } + + for tx in self.tx_pool.futures() { + if let Some((sender, nonce, tx_info)) = extract_tx_info(tx.data()) { + let entry = content.queued.entry(sender).or_default(); + entry.insert(nonce.to_string(), tx_info); + } + } + + Ok(content) + } + /// Drop all transactions from pool async fn anvil_drop_all_transactions(&self) -> Result<()> { + node_info!("anvil_dropAllTransactions"); let ready_txs = self.tx_pool.ready(); let future_txs = self.tx_pool.futures(); @@ -1273,7 +1438,7 @@ impl ApiServer { /// Drop a specific transaction from the pool by its ETH hash async fn anvil_drop_transaction(&self, eth_hash: B256) -> Result> { - // Search in ready transactions + node_info!("anvil_dropTransaction"); for tx in self.tx_pool.ready() { if transaction_matches_eth_hash(tx.data(), eth_hash) { let mut invalid_txs = IndexMap::new(); @@ -1283,7 +1448,6 @@ impl ApiServer { } } - // Search in future transactions for tx in self.tx_pool.futures() { if transaction_matches_eth_hash(tx.data(), eth_hash) { let mut invalid_txs = IndexMap::new(); @@ -1296,45 +1460,97 @@ impl ApiServer { // Transaction not found Ok(None) } -} -/// Helper function to check if transaction matches ETH hash -fn transaction_matches_eth_hash( - tx_data: &Arc, - target_eth_hash: B256, -) -> bool { - let encoded = tx_data.encode(); - let Ok(ext) = - UncheckedExtrinsic::decode_all_with_depth_limit(MAX_EXTRINSIC_DEPTH, &mut &encoded[..]) - else { - return false; - }; + /// Remove all transactions from a specific sender address + async fn anvil_remove_pool_transactions(&self, address: Address) -> Result<()> { + node_info!("anvil_removePoolTransactions"); + let mut invalid_txs = IndexMap::new(); - let polkadot_sdk::sp_runtime::generic::UncheckedExtrinsic { - function: RuntimeCall::Revive(polkadot_sdk::pallet_revive::Call::eth_transact { payload }), - .. - } = ext.0 - else { - return false; - }; + for tx in self.tx_pool.ready() { + if let Some(sender) = extract_sender(tx.data()) + && sender == address + { + invalid_txs.insert(*tx.hash(), None); + } + } + + for tx in self.tx_pool.futures() { + if let Some(sender) = extract_sender(tx.data()) + && sender == address + { + invalid_txs.insert(*tx.hash(), None); + } + } + + if !invalid_txs.is_empty() { + self.tx_pool.report_invalid(None, invalid_txs).await; + } + + Ok(()) + } + + async fn wait_for_hash( + &self, + receiver: Option>, + awaited_hash: H256, + ) -> Result<()> { + if let Some(mut receiver) = receiver { + tokio::time::timeout(Duration::from_secs(3), async { + loop { + if let Ok(block_hash) = receiver.recv().await { + if let Err(e) = self.log_mined_block(block_hash).await { + node_info!("Failed to log mined block {block_hash:?}: {e:?}"); + } + if block_hash == awaited_hash { + break; + } + } + } + }) + .await + .map_err(|e| { + Error::InternalError(format!( + "Was not notified about the new best block in time {e:?}." + )) + })?; + } + Ok(()) + } + + async fn log_mined_block(&self, block_hash: H256) -> Result<()> { + let block_timestamp = self.backend.read_timestamp(block_hash)?; + let block_number = self.backend.read_block_number(block_hash)?; + let timestamp = utc_from_millis(block_timestamp)?; + node_info!(" Block Number: {}", block_number); + node_info!(" Block Hash: {:?}", block_hash); + if timestamp.year() > 9999 { + // rf2822 panics with more than 4 digits + node_info!(" Block Time: {:?}\n", timestamp.to_rfc3339()); + } else { + node_info!(" Block Time: {:?}\n", timestamp.to_rfc2822()); + } + Ok(()) + } +} - let tx_eth_hash = keccak_256(&payload); - B256::from_slice(&tx_eth_hash) == target_eth_hash +/// Returns the `Utc` datetime for the given seconds since unix epoch +fn utc_from_millis(millis: u64) -> Result> { + DateTime::from_timestamp_millis( + millis.try_into().map_err(|err| { + Error::InvalidParams(format!("Could not convert the timestamp: {err:?}")) + })?, + ) + .ok_or(Error::InvalidParams("Could not get the utc datetime 😭".to_string())) } fn new_contract_info(address: &Address, code_hash: H256, nonce: Nonce) -> ContractInfo { let address = H160::from_slice(address.as_slice()); - let trie_id = { - let buf = ("bcontract_trie_v1", address, nonce).using_encoded(BlakeTwo256::hash); - buf.as_ref() - .to_vec() - .try_into() - .expect("Runtime uses a reasonable hash size. Hence sizeof(T::Hash) <= 128; qed") - }; + let trie_id = + ("bcontract_trie_v1", address, nonce).using_encoded(BlakeTwo256::hash).as_ref().to_vec(); ContractInfo { - trie_id, + trie_id: BoundedVec(trie_id), code_hash, storage_bytes: 0, storage_items: 0, @@ -1437,16 +1653,7 @@ async fn create_revive_rpc_client( let receipt_extractor = ReceiptExtractor::new_with_custom_address_recovery( api.clone(), None, - Arc::new(|signed_tx: &TransactionSigned| { - let sig = signed_tx.raw_signature()?; - if sig[..12] == [0; 12] && sig[32..64] == [0; 32] { - let mut res = [0; 20]; - res.copy_from_slice(&sig[12..32]); - Ok(H160::from(res)) - } else { - signed_tx.recover_eth_address() - } - }), + Arc::new(recover_maybe_impersonated_address), ) .await .map_err(|err| Error::ReviveRpc(EthRpcError::ClientError(err)))?; @@ -1460,9 +1667,12 @@ async fn create_revive_rpc_client( .await .map_err(|err| Error::ReviveRpc(EthRpcError::ClientError(ClientError::SqlxError(err))))?; - let eth_rpc_client = EthRpcClient::new(api, rpc_client, rpc, block_provider, receipt_provider) - .await - .map_err(Error::from)?; + let mut eth_rpc_client = + EthRpcClient::new(api, rpc_client, rpc, block_provider, receipt_provider) + .await + .map_err(Error::from)?; + + eth_rpc_client.create_block_notifier(); let eth_rpc_client_clone = eth_rpc_client.clone(); task_spawn_handle.spawn("block-subscription", "None", async move { let eth_rpc_client = eth_rpc_client_clone; diff --git a/crates/anvil-polkadot/src/api_server/txpool_helpers.rs b/crates/anvil-polkadot/src/api_server/txpool_helpers.rs new file mode 100644 index 0000000000000..d22eb94a14e01 --- /dev/null +++ b/crates/anvil-polkadot/src/api_server/txpool_helpers.rs @@ -0,0 +1,200 @@ +//! Helper functions for txpool RPC methods +//! +//! This module contains utilities for extracting transaction information from +//! Substrate extrinsics, including support for impersonated transactions with +//! fake signatures. + +use alloy_primitives::{Address, B256, U256, keccak256}; +use alloy_rpc_types::txpool::TxpoolInspectSummary; +use codec::{DecodeLimit, Encode}; +use polkadot_sdk::{ + pallet_revive::evm::TransactionSigned, + sp_core::{self, H256}, +}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use substrate_runtime::{RuntimeCall, UncheckedExtrinsic}; + +use crate::substrate_node::host::recover_maybe_impersonated_address; + +const MAX_EXTRINSIC_DEPTH: u32 = 256; + +/// Transaction info for txpool RPCs with Option fields to match Anvil's null values +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TxpoolTransactionInfo { + pub hash: H256, + pub block_hash: Option, + pub block_number: Option, + pub transaction_index: Option, + pub from: sp_core::H160, + pub transaction_signed: TransactionSigned, +} + +/// Decode extrinsic into ETH transaction payload and signed transaction +pub(super) fn decode_eth_transaction( + tx_data: &Arc, +) -> Option<(Vec, TransactionSigned)> { + let encoded = tx_data.encode(); + let ext = + UncheckedExtrinsic::decode_all_with_depth_limit(MAX_EXTRINSIC_DEPTH, &mut &encoded[..]) + .ok()?; + + let polkadot_sdk::sp_runtime::generic::UncheckedExtrinsic { + function: RuntimeCall::Revive(polkadot_sdk::pallet_revive::Call::eth_transact { payload }), + .. + } = ext.0 + else { + return None; + }; + + let signed_tx = TransactionSigned::decode(&payload).ok()?; + + Some((payload, signed_tx)) +} + +/// Check if transaction matches ETH hash +pub(super) fn transaction_matches_eth_hash( + tx_data: &Arc, + target_eth_hash: B256, +) -> bool { + let Some((payload, _signed_tx)) = decode_eth_transaction(tx_data) else { + return false; + }; + + let tx_eth_hash = keccak256(&payload); + B256::from_slice(tx_eth_hash.as_ref()) == target_eth_hash +} + +/// Fields extracted from an Ethereum transaction +pub(super) struct TransactionFields { + pub nonce: sp_core::U256, + pub to: Option, + pub value: sp_core::U256, + pub gas: sp_core::U256, + pub gas_price: sp_core::U256, +} + +/// Extract fields from ETH transaction +fn extract_tx_fields(signed_tx: &TransactionSigned) -> TransactionFields { + match signed_tx { + TransactionSigned::TransactionLegacySigned(tx) => { + let t = &tx.transaction_legacy_unsigned; + TransactionFields { + nonce: t.nonce, + to: t.to, + value: t.value, + gas: t.gas, + gas_price: t.gas_price, + } + } + TransactionSigned::Transaction2930Signed(tx) => { + let t = &tx.transaction_2930_unsigned; + TransactionFields { + nonce: t.nonce, + to: t.to, + value: t.value, + gas: t.gas, + gas_price: t.gas_price, + } + } + TransactionSigned::Transaction1559Signed(tx) => { + let t = &tx.transaction_1559_unsigned; + TransactionFields { + nonce: t.nonce, + to: t.to, + value: t.value, + gas: t.gas, + gas_price: t.max_fee_per_gas, + } + } + TransactionSigned::Transaction4844Signed(tx) => { + let t = &tx.transaction_4844_unsigned; + TransactionFields { + nonce: t.nonce, + to: Some(t.to), + value: t.value, + gas: t.gas, + gas_price: t.max_fee_per_gas, + } + } + TransactionSigned::Transaction7702Signed(tx) => { + let t = &tx.transaction_7702_unsigned; + TransactionFields { + nonce: t.nonce, + to: Some(t.to), + value: t.value, + gas: t.gas, + gas_price: t.max_fee_per_gas, + } + } + } +} + +/// Extract transaction summary from extrinsic +pub(super) fn extract_tx_summary( + tx_data: &Arc, +) -> Option<(Address, u64, TxpoolInspectSummary)> { + let (_payload, signed_tx) = decode_eth_transaction(tx_data)?; + + let from = recover_maybe_impersonated_address(&signed_tx).ok()?; + let sender = Address::from_slice(from.as_bytes()); + + let fields = extract_tx_fields(&signed_tx); + + let to_addr = fields.to.map(|addr| Address::from_slice(addr.as_bytes())); + let value_u256 = U256::from_limbs(fields.value.0); + let gas_u64 = fields.gas.as_u64(); + let gas_price_u128 = fields.gas_price.as_u128(); + let nonce_u64 = fields.nonce.as_u64(); + + Some(( + sender, + nonce_u64, + TxpoolInspectSummary { + to: to_addr, + value: value_u256, + gas: gas_u64, + gas_price: gas_price_u128, + }, + )) +} + +/// Extract full transaction info from extrinsic +pub(super) fn extract_tx_info( + tx_data: &Arc, +) -> Option<(Address, u64, TxpoolTransactionInfo)> { + let (payload, signed_tx) = decode_eth_transaction(tx_data)?; + + let eth_hash = keccak256(&payload); + let eth_hash_h256 = H256::from_slice(eth_hash.as_ref()); + + let from = recover_maybe_impersonated_address(&signed_tx).ok()?; + let sender = Address::from_slice(from.as_bytes()); + + let fields = extract_tx_fields(&signed_tx); + let nonce_u64 = fields.nonce.as_u64(); + + let tx_info = TxpoolTransactionInfo { + hash: eth_hash_h256, + block_hash: None, + block_number: None, + transaction_index: None, + from, + transaction_signed: signed_tx, + }; + + Some((sender, nonce_u64, tx_info)) +} + +/// Extract sender address from extrinsic as Alloy Address type. +/// Helper for `anvil_remove_pool_transactions` to compare sender addresses. +pub(super) fn extract_sender( + tx_data: &Arc, +) -> Option
{ + let (_payload, signed_tx) = decode_eth_transaction(tx_data)?; + + let from = recover_maybe_impersonated_address(&signed_tx).ok()?; + let sender = Address::from_slice(from.as_bytes()); + + Some(sender) +} diff --git a/crates/anvil-polkadot/src/cmd.rs b/crates/anvil-polkadot/src/cmd.rs index bab435dc64aaa..77aa9c337628d 100644 --- a/crates/anvil-polkadot/src/cmd.rs +++ b/crates/anvil-polkadot/src/cmd.rs @@ -1,15 +1,16 @@ use crate::config::{ - AccountGenerator, AnvilNodeConfig, CHAIN_ID, DEFAULT_MNEMONIC, SubstrateNodeConfig, + AccountGenerator, AnvilNodeConfig, CHAIN_ID, DEFAULT_MNEMONIC, ForkChoice, SubstrateNodeConfig, }; use alloy_genesis::Genesis; -use alloy_primitives::{U256, utils::Unit}; +use alloy_primitives::{B256, U256, utils::Unit}; use alloy_signer_local::coins_bip39::{English, Mnemonic}; use anvil_server::ServerConfig; use clap::Parser; +use core::fmt; use foundry_common::shell; use foundry_config::Chain; use rand_08::{SeedableRng, rngs::StdRng}; -use std::{net::IpAddr, path::PathBuf, time::Duration}; +use std::{net::IpAddr, path::PathBuf, str::FromStr, time::Duration}; #[derive(Clone, Debug, Parser)] pub struct NodeArgs { @@ -109,7 +110,6 @@ impl NodeArgs { let anvil_config = AnvilNodeConfig::default() .with_gas_limit(self.evm.gas_limit) .disable_block_gas_limit(self.evm.disable_block_gas_limit) - .with_gas_price(self.evm.gas_price) .with_blocktime(self.block_time) .with_no_mining(self.no_mining) .with_mixed_mining(self.mixed_mining, self.block_time) @@ -134,7 +134,20 @@ impl NodeArgs { .with_code_size_limit(self.evm.code_size_limit) .disable_code_size_limit(self.evm.disable_code_size_limit) .with_disable_default_create2_deployer(self.evm.disable_default_create2_deployer) - .with_memory_limit(self.evm.memory_limit); + .with_memory_limit(self.evm.memory_limit) + .with_fork_choice(match (self.evm.fork_block_number, self.evm.fork_transaction_hash) { + (Some(block), None) => Some(ForkChoice::Block(block)), + (None, Some(hash)) => Some(ForkChoice::Transaction(hash)), + _ => self + .evm + .fork_url + .as_ref() + .and_then(|f| f.block) + .map(|num| ForkChoice::Block(num as i128)), + }) + .with_eth_rpc_url(self.evm.fork_url.map(|fork| fork.url)) + .fork_request_timeout(self.evm.fork_request_timeout.map(Duration::from_millis)) + .fork_request_retries(self.evm.fork_request_retries); let substrate_node_config = SubstrateNodeConfig::new(&anvil_config); @@ -170,6 +183,56 @@ impl NodeArgs { #[derive(Clone, Debug, Parser)] #[command(next_help_heading = "EVM options")] pub struct AnvilEvmArgs { + /// Fetch state over a remote endpoint instead of starting from an empty state. + /// + /// If you want to fetch state from a specific block number, add a block number like `http://localhost:8545@1400000` or use the `--fork-block-number` argument. + #[arg( + long, + short, + visible_alias = "rpc-url", + value_name = "URL", + help_heading = "Fork config" + )] + pub fork_url: Option, + + /// Fetch state from a specific block number over a remote endpoint. + /// + /// If negative, the given value is subtracted from the `latest` block number. + /// + /// See --fork-url. + #[arg( + long, + requires = "fork_url", + value_name = "BLOCK", + help_heading = "Fork config", + allow_hyphen_values = true + )] + pub fork_block_number: Option, + + /// Fetch state from a specific transaction hash over a remote endpoint. + /// + /// See --fork-url. + #[arg( + long, + requires = "fork_url", + value_name = "TRANSACTION", + help_heading = "Fork config", + conflicts_with = "fork_block_number" + )] + pub fork_transaction_hash: Option, + + /// Timeout in ms for requests sent to remote JSON-RPC server in forking mode. + /// + /// Default value 45000 + #[arg(id = "timeout", long = "timeout", help_heading = "Fork config", requires = "fork_url")] + pub fork_request_timeout: Option, + + /// Number of retry requests for spurious networks (timed out requests) + /// + /// Default value 5 + #[arg(id = "retries", long = "retries", help_heading = "Fork config", requires = "fork_url")] + pub fork_request_retries: Option, + /// The block gas limit. #[arg(long, alias = "block-gas-limit", help_heading = "Environment config")] pub gas_limit: Option, @@ -198,10 +261,6 @@ pub struct AnvilEvmArgs { )] pub disable_code_size_limit: bool, - /// The gas price. - #[arg(long, help_heading = "Environment config")] - pub gas_price: Option, - /// The base fee in a block. #[arg( long, @@ -245,6 +304,46 @@ pub struct AnvilEvmArgs { pub memory_limit: Option, } +/// Represents the input URL for a fork with an optional trailing block number: +/// `http://localhost:8545@1000000` +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ForkUrl { + /// The endpoint url + pub url: String, + /// Optional trailing block + pub block: Option, +} + +impl fmt::Display for ForkUrl { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.url.fmt(f)?; + if let Some(block) = self.block { + write!(f, "@{block}")?; + } + Ok(()) + } +} + +impl FromStr for ForkUrl { + type Err = String; + + fn from_str(s: &str) -> Result { + if let Some((url, block)) = s.rsplit_once('@') { + if block == "latest" { + return Ok(Self { url: url.to_string(), block: None }); + } + // this will prevent false positives for auths `user:password@example.com` + if !block.is_empty() && !block.contains(':') && !block.contains('.') { + let block: u64 = block + .parse() + .map_err(|_| format!("Failed to parse block number: `{block}`"))?; + return Ok(Self { url: url.to_string(), block: Some(block) }); + } + } + Ok(Self { url: s.to_string(), block: None }) + } +} + /// Clap's value parser for genesis. Loads a genesis.json file. fn read_genesis_file(path: &str) -> Result { foundry_common::fs::read_json_file(path.as_ref()).map_err(|err| err.to_string()) diff --git a/crates/anvil-polkadot/src/config.rs b/crates/anvil-polkadot/src/config.rs index 13cee2f199f42..18882e90a8f36 100644 --- a/crates/anvil-polkadot/src/config.rs +++ b/crates/anvil-polkadot/src/config.rs @@ -1,6 +1,6 @@ -use crate::substrate_node::chain_spec::keypairs_from_private_keys; +use crate::api_server::revive_conversions::ReviveAddress; use alloy_genesis::Genesis; -use alloy_primitives::{U256, hex, map::HashMap, utils::Unit}; +use alloy_primitives::{Address, TxHash, U256, hex, map::HashMap, utils::Unit}; use alloy_signer::Signer; use alloy_signer_local::{ MnemonicBuilder, PrivateKeySigner, @@ -8,7 +8,7 @@ use alloy_signer_local::{ }; use anvil_server::ServerConfig; use eyre::{Context, Result}; -use foundry_common::{duration_since_unix_epoch, sh_println}; +use foundry_common::{REQUEST_TIMEOUT, duration_since_unix_epoch, sh_println}; use polkadot_sdk::{ pallet_revive::evm::Account, sc_cli::{ @@ -48,11 +48,9 @@ pub const DEFAULT_MNEMONIC: &str = "test test test test test test test test test pub const DEFAULT_IPC_ENDPOINT: &str = if cfg!(unix) { "/tmp/anvil.ipc" } else { r"\\.\pipe\anvil.ipc" }; -/// Initial base fee for EIP-1559 blocks. -pub const INITIAL_BASE_FEE: u64 = 1_000_000_000; - -/// Initial default gas price for the first block -pub const INITIAL_GAS_PRICE: u128 = 1_875_000_000; +/// In anvil this is `1_000_000_000`, in 1e18 denomination. However, +/// asset-hub-westend runtime sets it to `1_000_000`. +pub const INITIAL_BASE_FEE: u128 = 1_000_000; const BANNER: &str = r" _ _ @@ -272,10 +270,8 @@ pub struct AnvilNodeConfig { pub gas_limit: Option, /// If set to `true`, disables the block gas limit pub disable_block_gas_limit: bool, - /// Default gas price for all txs - pub gas_price: Option, /// Default base fee - pub base_fee: Option, + pub base_fee: Option, /// If set to `true`, disables the enforcement of a minimum suggested priority fee pub disable_min_priority_fee: bool, /// Signer accounts that will be initialised with `genesis_balance` in the genesis block @@ -328,6 +324,14 @@ pub struct AnvilNodeConfig { pub memory_limit: Option, /// Do not print log messages. pub silent: bool, + /// url of the rpc server that should be used for any rpc calls + pub eth_rpc_url: Option, + /// pins the block number or transaction hash for the state fork + pub fork_choice: Option, + /// Timeout in for requests sent to remote JSON-RPC server in forking mode + pub fork_request_timeout: Duration, + /// Number of request retries for spurious networks + pub fork_request_retries: u32, } impl AnvilNodeConfig { @@ -347,8 +351,12 @@ Available Accounts ); let balance = alloy_primitives::utils::format_ether(self.genesis_balance); for (idx, wallet) in self.genesis_accounts.iter().enumerate() { - write!(s, "\n({idx}) {} ({balance} ETH)", Account::from(wallet.clone()).address()) - .unwrap(); + write!( + s, + "\n({idx}) {} ({balance} ETH)", + Address::from(ReviveAddress::new(Account::from(wallet.clone()).address())) + ) + .unwrap(); } let _ = write!( @@ -478,7 +486,6 @@ Genesis Number "private_keys": private_keys, "wallet": wallet_description, "base_fee": format!("{}", self.get_base_fee()), - "gas_price": format!("{}", self.get_gas_price()), "gas_limit": gas_limit, "genesis_timestamp": format!("{}", self.get_genesis_timestamp()), }) @@ -517,7 +524,6 @@ impl Default for AnvilNodeConfig { chain_id: None, gas_limit: None, disable_block_gas_limit: false, - gas_price: None, signer_accounts: genesis_accounts.clone(), genesis_timestamp: None, genesis_block_number: None, @@ -547,6 +553,10 @@ impl Default for AnvilNodeConfig { disable_default_create2_deployer: false, memory_limit: None, silent: false, + eth_rpc_url: None, + fork_choice: None, + fork_request_timeout: REQUEST_TIMEOUT, + fork_request_retries: 5, } } } @@ -558,18 +568,19 @@ impl AnvilNodeConfig { self.memory_limit = mems_value; self } + /// Returns the base fee to use - pub fn get_base_fee(&self) -> u64 { + pub fn get_base_fee(&self) -> u128 { self.base_fee - .or_else(|| self.genesis.as_ref().and_then(|g| g.base_fee_per_gas.map(|g| g as u64))) + .or_else(|| { + self.genesis.as_ref().and_then(|g| { + // The base fee received via CLI will be transformed to 1e-12. + g.base_fee_per_gas + }) + }) .unwrap_or(INITIAL_BASE_FEE) } - /// Returns the base fee to use - pub fn get_gas_price(&self) -> u128 { - self.gas_price.unwrap_or(INITIAL_GAS_PRICE) - } - /// Sets a custom code size limit #[must_use] pub fn with_code_size_limit(mut self, code_size_limit: Option) -> Self { @@ -620,17 +631,10 @@ impl AnvilNodeConfig { self } - /// Sets the gas price - #[must_use] - pub fn with_gas_price(mut self, gas_price: Option) -> Self { - self.gas_price = gas_price; - self - } - /// Sets the base fee #[must_use] pub fn with_base_fee(mut self, base_fee: Option) -> Self { - self.base_fee = base_fee; + self.base_fee = base_fee.map(|bf| bf.into()); self } @@ -850,6 +854,96 @@ impl AnvilNodeConfig { self.silent = silent; self } + + /// Sets the `eth_rpc_url` to use when forking + #[must_use] + pub fn with_eth_rpc_url>(mut self, eth_rpc_url: Option) -> Self { + self.eth_rpc_url = eth_rpc_url.map(Into::into); + self + } + + /// Sets the `fork_choice` to use to fork off from based on a block number + #[must_use] + pub fn with_fork_block_number>(self, fork_block_number: Option) -> Self { + self.with_fork_choice(fork_block_number.map(Into::into)) + } + + /// Sets the `fork_choice` to use to fork off from based on a transaction hash + #[must_use] + pub fn with_fork_transaction_hash>( + self, + fork_transaction_hash: Option, + ) -> Self { + self.with_fork_choice(fork_transaction_hash.map(Into::into)) + } + + /// Sets the `fork_choice` to use to fork off from + #[must_use] + pub fn with_fork_choice>(mut self, fork_choice: Option) -> Self { + self.fork_choice = fork_choice.map(Into::into); + self + } + + /// Sets the `fork_request_timeout` to use for requests + #[must_use] + pub fn fork_request_timeout(mut self, fork_request_timeout: Option) -> Self { + if let Some(fork_request_timeout) = fork_request_timeout { + self.fork_request_timeout = fork_request_timeout; + } + self + } + + /// Sets the `fork_request_retries` to use for spurious networks + #[must_use] + pub fn fork_request_retries(mut self, fork_request_retries: Option) -> Self { + if let Some(fork_request_retries) = fork_request_retries { + self.fork_request_retries = fork_request_retries; + } + self + } +} + +/// Fork delimiter used to specify which block or transaction to fork from. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum ForkChoice { + /// Block number to fork from. + /// + /// If negative, the given value is subtracted from the `latest` block number. + Block(i128), + /// Transaction hash to fork from. + Transaction(TxHash), +} + +impl ForkChoice { + /// Returns the block number to fork from + pub fn block_number(&self) -> Option { + match self { + Self::Block(block_number) => Some(*block_number), + Self::Transaction(_) => None, + } + } + + /// Returns the transaction hash to fork from + pub fn transaction_hash(&self) -> Option { + match self { + Self::Block(_) => None, + Self::Transaction(transaction_hash) => Some(*transaction_hash), + } + } +} + +/// Convert a transaction hash into a ForkChoice +impl From for ForkChoice { + fn from(tx_hash: TxHash) -> Self { + Self::Transaction(tx_hash) + } +} + +/// Convert a decimal block number into a ForkChoice +impl From for ForkChoice { + fn from(block: u64) -> Self { + Self::Block(block as i128) + } } /// Can create dev accounts @@ -919,3 +1013,15 @@ impl AccountGenerator { Ok(wallets) } } + +fn keypairs_from_private_keys( + accounts: &[PrivateKeySigner], +) -> Result, subxt_signer::eth::Error> { + accounts + .iter() + .map(|signer| { + let key = Keypair::from_secret_key(signer.credential().to_bytes().into())?; + Ok(key) + }) + .collect() +} diff --git a/crates/anvil-polkadot/src/substrate_node/chain_spec.rs b/crates/anvil-polkadot/src/substrate_node/chain_spec.rs index 3c7fc16920ded..7cf58735cd0f5 100644 --- a/crates/anvil-polkadot/src/substrate_node/chain_spec.rs +++ b/crates/anvil-polkadot/src/substrate_node/chain_spec.rs @@ -1,16 +1,35 @@ use crate::substrate_node::genesis::GenesisConfig; -use alloy_signer_local::PrivateKeySigner; +use codec::{Decode, Encode}; use polkadot_sdk::{ - sc_chain_spec::{ChainSpec, GetExtension}, + sc_chain_spec::{ChainSpec, GetExtension, json_patch}, + sc_executor, sc_executor::HostFunctions, sc_network::config::MultiaddrWithPeerId, sc_service::{ChainType, GenericChainSpec, Properties}, sc_telemetry::TelemetryEndpoints, - sp_core::storage::Storage, + sp_core::{ + storage::Storage, + traits::{CallContext, CodeExecutor, Externalities, FetchRuntimeCode, RuntimeCode}, + }, + sp_genesis_builder::Result as BuildResult, + sp_io::{self, hashing::blake2_256}, sp_runtime::BuildStorage, + sp_state_machine::BasicExternalities, }; -use substrate_runtime::WASM_BINARY; -use subxt_signer::eth::Keypair; +use serde_json::Value; +use std::borrow::Cow; + +pub fn development_chain_spec( + genesis_config: GenesisConfig, +) -> Result { + let inner = GenericChainSpec::builder(&genesis_config.code, Default::default()) + .with_name("Development") + .with_id("dev") + .with_chain_type(ChainType::Development) + .with_properties(props()) + .build(); + Ok(DevelopmentChainSpec { inner, genesis_config }) +} /// This is a wrapper around the general Substrate ChainSpec type that allows manual changes to the /// genesis block. @@ -26,8 +45,20 @@ where GenericChainSpec: BuildStorage, { fn assimilate_storage(&self, storage: &mut Storage) -> Result<(), String> { - self.inner.assimilate_storage(storage)?; storage.top.extend(self.genesis_config.as_storage_key_value()); + + // We need to initialise the storage used when calling into the runtime for the genesis + // config, so that the customised items (like block number and timestamp) will be + // seen even in the code that processes the genesis config patch. + let temp_storage = storage.clone(); + + GenesisBuilderRuntimeCaller::::new(&self.genesis_config.code[..]) + .get_storage_for_patch( + self.genesis_config.runtime_genesis_config_patch(), + temp_storage, + )? + .assimilate_storage(storage)?; + Ok(()) } } @@ -105,35 +136,108 @@ where fn props() -> Properties { let mut properties = Properties::new(); properties.insert("tokenDecimals".to_string(), 12.into()); - properties.insert("tokenSymbol".to_string(), "MINI".into()); + properties.insert("tokenSymbol".to_string(), "DOT".into()); properties } -pub fn development_chain_spec( - genesis_config: GenesisConfig, -) -> Result { - let inner = GenericChainSpec::builder( - WASM_BINARY.expect("Development wasm not available"), - Default::default(), - ) - .with_name("Development") - .with_id("dev") - .with_chain_type(ChainType::Development) - .with_genesis_config_patch(genesis_config.runtime_genesis_config_patch()) - .with_properties(props()) - .build(); - Ok(DevelopmentChainSpec { inner, genesis_config }) +// This mostly copies the upstream `GenesisConfigBuilderRuntimeCaller`, but with the ability of +// injecting genesis state even before the genesis config builders in the runtime are run via +// `GenesisBuilder_build_state` +struct GenesisBuilderRuntimeCaller<'a, EHF = ()> +where + EHF: HostFunctions, +{ + code: Cow<'a, [u8]>, + code_hash: Vec, + executor: sc_executor::WasmExecutor<(sp_io::SubstrateHostFunctions, EHF)>, } -pub fn keypairs_from_private_keys( - accounts: &[PrivateKeySigner], -) -> Result, subxt_signer::eth::Error> { - accounts - .iter() - .map(|signer| { - let key = - subxt_signer::eth::Keypair::from_secret_key(signer.credential().to_bytes().into())?; - Ok(key) - }) - .collect() +impl<'a, EHF> FetchRuntimeCode for GenesisBuilderRuntimeCaller<'a, EHF> +where + EHF: HostFunctions, +{ + fn fetch_runtime_code(&self) -> Option> { + Some(self.code.as_ref().into()) + } +} + +impl<'a, EHF> GenesisBuilderRuntimeCaller<'a, EHF> +where + EHF: HostFunctions, +{ + fn new(code: &'a [u8]) -> Self { + GenesisBuilderRuntimeCaller { + code: code.into(), + code_hash: blake2_256(code).to_vec(), + executor: sc_executor::WasmExecutor::<(sp_io::SubstrateHostFunctions, EHF)>::builder() + .with_allow_missing_host_functions(true) + .build(), + } + } + + fn get_storage_for_patch( + &self, + patch: Value, + genesis_storage: Storage, + ) -> core::result::Result { + let mut config = self.get_named_preset(None)?; + json_patch::merge(&mut config, patch); + self.get_storage_for_config(config, genesis_storage) + } + + fn call( + &self, + ext: &mut dyn Externalities, + method: &str, + data: &[u8], + ) -> sc_executor::error::Result> { + self.executor + .call( + ext, + &RuntimeCode { heap_pages: None, code_fetcher: self, hash: self.code_hash.clone() }, + method, + data, + CallContext::Offchain, + ) + .0 + } + + fn get_named_preset(&self, id: Option<&String>) -> core::result::Result { + let mut t = BasicExternalities::new_empty(); + let call_result = self + .call(&mut t, "GenesisBuilder_get_preset", &id.encode()) + .map_err(|e| format!("wasm call error {e}"))?; + + let named_preset = Option::>::decode(&mut &call_result[..]) + .map_err(|e| format!("scale codec error: {e}"))?; + + if let Some(named_preset) = named_preset { + Ok(serde_json::from_slice(&named_preset[..]).expect("returned value is json. qed.")) + } else { + Err(format!("The preset with name {id:?} is not available.")) + } + } + + fn get_storage_for_config( + &self, + config: Value, + genesis_storage: Storage, + ) -> core::result::Result { + // This is the key difference compared to the upstream variant, we don't initialise the + // storage as empty. + let mut ext = BasicExternalities::new(genesis_storage); + + let json_pretty_str = serde_json::to_string_pretty(&config) + .map_err(|e| format!("json to string failed: {e}"))?; + + let call_result = self + .call(&mut ext, "GenesisBuilder_build_state", &json_pretty_str.encode()) + .map_err(|e| format!("wasm call error {e}"))?; + + BuildResult::decode(&mut &call_result[..]) + .map_err(|e| format!("scale codec error: {e}"))? + .map_err(|e| format!("{e} for blob:\n{json_pretty_str}"))?; + + Ok(ext.into_storages()) + } } diff --git a/crates/anvil-polkadot/src/substrate_node/genesis.rs b/crates/anvil-polkadot/src/substrate_node/genesis.rs index 98907edbc1614..0777218ea8af3 100644 --- a/crates/anvil-polkadot/src/substrate_node/genesis.rs +++ b/crates/anvil-polkadot/src/substrate_node/genesis.rs @@ -13,15 +13,16 @@ use polkadot_sdk::{ sc_client_api::{BlockImportOperation, backend::Backend}, sc_executor::RuntimeVersionOf, sp_blockchain, - sp_core::{H160, storage::Storage}, + sp_core::{self, H160, storage::Storage}, sp_runtime::{ - BuildStorage, + BuildStorage, FixedU128, traits::{Block as BlockT, Hash as HashT, HashingFor, Header as HeaderT}, }, }; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; use std::{collections::BTreeMap, marker::PhantomData, sync::Arc}; +use substrate_runtime::{WASM_BINARY, constants::NATIVE_TO_ETH_RATIO}; use subxt_signer::eth::Keypair; /// Genesis settings @@ -37,7 +38,7 @@ pub struct GenesisConfig { /// The initial number for the genesis block pub number: u32, /// The genesis header base fee - pub base_fee_per_gas: u64, + pub base_fee_per_gas: FixedU128, /// The genesis header gas limit. pub gas_limit: Option, /// Signer accounts from account_generator @@ -46,6 +47,8 @@ pub struct GenesisConfig { pub genesis_balance: U256, /// Coinbase address pub coinbase: Option
, + /// Substrate runtime code + pub code: Vec, } impl<'a> From<&'a AnvilNodeConfig> for GenesisConfig { @@ -62,11 +65,15 @@ impl<'a> From<&'a AnvilNodeConfig> for GenesisConfig { .get_genesis_number() .try_into() .expect("Genesis block number overflow"), - base_fee_per_gas: anvil_config.get_base_fee(), + base_fee_per_gas: FixedU128::from_rational( + anvil_config.get_base_fee(), + NATIVE_TO_ETH_RATIO.into(), + ), gas_limit: anvil_config.gas_limit, genesis_accounts: anvil_config.genesis_accounts.clone(), genesis_balance: anvil_config.genesis_balance, coinbase: anvil_config.genesis.as_ref().map(|g| g.coinbase), + code: WASM_BINARY.expect("Development wasm not available").to_vec(), } } } @@ -94,8 +101,8 @@ impl GenesisConfig { (well_known_keys::TIMESTAMP.to_vec(), self.timestamp.encode()), (well_known_keys::BLOCK_NUMBER_KEY.to_vec(), self.number.encode()), (well_known_keys::AURA_AUTHORITIES.to_vec(), vec![aura_authority_id].encode()), + (sp_core::storage::well_known_keys::CODE.to_vec(), self.code.clone()), ]; - // TODO: add other fields storage } @@ -151,6 +158,9 @@ impl GenesisConfig { "revive": { "accounts": revive_genesis_accounts, }, + "transactionPayment": { + "multiplier": self.base_fee_per_gas.into_inner().to_string(), + } }) } } @@ -185,7 +195,7 @@ impl, E: RuntimeVersionOf> ) } - pub fn new_with_storage( + fn new_with_storage( genesis_number: u64, genesis_storage: Storage, commit_genesis_state: bool, @@ -261,11 +271,13 @@ mod tests { let timestamp: u64 = 10; let chain_id: u64 = 42; let authority_id: [u8; 32] = [0xEE; 32]; + let base_fee_per_gas = FixedU128::from_rational(6_000_000, NATIVE_TO_ETH_RATIO.into()); let genesis_config = GenesisConfig { number: block_number, timestamp, chain_id, coinbase: Some(Address::from([0xEE; 20])), + base_fee_per_gas, ..Default::default() }; let genesis_storage = genesis_config.as_storage_key_value(); diff --git a/crates/anvil-polkadot/src/substrate_node/host.rs b/crates/anvil-polkadot/src/substrate_node/host.rs index 6e43bacf516b8..434c06a78931d 100644 --- a/crates/anvil-polkadot/src/substrate_node/host.rs +++ b/crates/anvil-polkadot/src/substrate_node/host.rs @@ -28,10 +28,27 @@ use sp_runtime_interface::{ // The host functions in this module expect transactions // with fake signatures conforming the format checked in this function. -fn is_impersonated(sig: &[u8]) -> bool { +pub fn is_impersonated(sig: &[u8]) -> bool { sig[..12] == [0; 12] && sig[32..64] == [0; 32] } +/// Recover sender address from signed transaction, handling impersonated transactions. +/// For impersonated transactions (fake signatures), extracts the address embedded in the signature. +/// For normal transactions, performs standard ECDSA recovery. +#[allow(clippy::result_unit_err)] +pub fn recover_maybe_impersonated_address( + signed_tx: &polkadot_sdk::pallet_revive::evm::TransactionSigned, +) -> Result { + let sig = signed_tx.raw_signature()?; + if is_impersonated(&sig) { + let mut res = [0; 20]; + res.copy_from_slice(&sig[12..32]); + Ok(polkadot_sdk::sp_core::H160::from(res)) + } else { + signed_tx.recover_eth_address() + } +} + #[runtime_interface] pub trait Crypto { #[version(1)] diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs new file mode 100644 index 0000000000000..477d00fc33ff1 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs @@ -0,0 +1,188 @@ +use super::{blockchain::StoredBlock, forked_lazy_backend::ForkedLazyBackend}; +use polkadot_sdk::{ + sc_client_api::backend, + sp_blockchain, + sp_runtime::{ + Justification, Justifications, StateVersion, Storage, + traits::{Block as BlockT, HashingFor}, + }, + sp_state_machine::{ + self, Backend, BackendTransaction, ChildStorageCollection, StorageCollection, + }, +}; +use serde::de::DeserializeOwned; + +pub(crate) struct PendingBlock { + pub(crate) block: StoredBlock, + pub(crate) state: backend::NewBlockState, +} + +pub struct BlockImportOperation { + pub(crate) pending_block: Option>, + pub(crate) old_state: ForkedLazyBackend, + pub(crate) new_state: Option>>, + pub(crate) aux: Vec<(Vec, Option>)>, + pub(crate) storage_updates: StorageCollection, + pub(crate) child_storage_updates: ChildStorageCollection, + pub(crate) finalized_blocks: Vec<(Block::Hash, Option)>, + pub(crate) set_head: Option, +} + +impl BlockImportOperation { + pub(crate) fn apply_storage( + &mut self, + storage: Storage, + commit: bool, + state_version: StateVersion, + ) -> sp_blockchain::Result { + check_genesis_storage(&storage)?; + + let child_delta = storage.children_default.values().map(|child_content| { + ( + &child_content.child_info, + child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), + ) + }); + + let (root, transaction) = self.old_state.full_storage_root( + storage.top.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), + child_delta, + state_version, + ); + + if commit { + self.new_state = Some(transaction); + self.storage_updates = + storage + .top + .iter() + .map(|(k, v)| { + if v.is_empty() { (k.clone(), None) } else { (k.clone(), Some(v.clone())) } + }) + .collect(); + + self.child_storage_updates = storage + .children_default + .values() + .map(|child_content| { + let child_storage: StorageCollection = child_content + .data + .iter() + .map(|(k, v)| { + if v.is_empty() { + (k.clone(), None) + } else { + (k.clone(), Some(v.clone())) + } + }) + .collect(); + (child_content.child_info.storage_key().to_vec(), child_storage) + }) + .collect(); + } + Ok(root) + } +} + +impl backend::BlockImportOperation + for BlockImportOperation +{ + type State = ForkedLazyBackend; + + fn state(&self) -> sp_blockchain::Result> { + Ok(Some(&self.old_state)) + } + + fn set_block_data( + &mut self, + header: ::Header, + body: Option::Extrinsic>>, + _indexed_body: Option>>, + justifications: Option, + state: backend::NewBlockState, + ) -> sp_blockchain::Result<()> { + assert!(self.pending_block.is_none(), "Only one block per operation is allowed"); + self.pending_block = + Some(PendingBlock { block: StoredBlock::new(header, body, justifications), state }); + Ok(()) + } + + fn update_db_storage( + &mut self, + update: BackendTransaction>, + ) -> sp_blockchain::Result<()> { + self.new_state = Some(update); + Ok(()) + } + + fn set_genesis_state( + &mut self, + storage: Storage, + commit: bool, + state_version: StateVersion, + ) -> sp_blockchain::Result { + self.apply_storage(storage, commit, state_version) + } + + fn reset_storage( + &mut self, + storage: Storage, + state_version: StateVersion, + ) -> sp_blockchain::Result { + self.apply_storage(storage, true, state_version) + } + + fn insert_aux(&mut self, ops: I) -> sp_blockchain::Result<()> + where + I: IntoIterator, Option>)>, + { + self.aux.append(&mut ops.into_iter().collect()); + Ok(()) + } + + fn update_storage( + &mut self, + update: StorageCollection, + child_update: ChildStorageCollection, + ) -> sp_blockchain::Result<()> { + self.storage_updates = update; + self.child_storage_updates = child_update; + Ok(()) + } + + fn mark_finalized( + &mut self, + hash: Block::Hash, + justification: Option, + ) -> sp_blockchain::Result<()> { + self.finalized_blocks.push((hash, justification)); + Ok(()) + } + + fn mark_head(&mut self, hash: Block::Hash) -> sp_blockchain::Result<()> { + assert!(self.pending_block.is_none(), "Only one set block per operation is allowed"); + self.set_head = Some(hash); + Ok(()) + } + + fn update_transaction_index( + &mut self, + _index: Vec, + ) -> sp_blockchain::Result<()> { + Ok(()) + } + + fn set_create_gap(&mut self, _create_gap: bool) {} +} + +pub(crate) fn check_genesis_storage(storage: &Storage) -> sp_blockchain::Result<()> { + if storage + .top + .keys() + .any(|k| polkadot_sdk::sp_core::storage::well_known_keys::is_child_storage_key(k)) + { + return Err(sp_blockchain::Error::InvalidState); + } + + Ok(()) +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs new file mode 100644 index 0000000000000..fd11c8b27e413 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs @@ -0,0 +1,462 @@ +use crate::substrate_node::lazy_loading::{LAZY_LOADING_LOG_TARGET, rpc_client::RPCClient}; +use parking_lot::RwLock; +use polkadot_sdk::{ + sc_client_api::{ + backend::{self, NewBlockState}, + leaves::LeafSet, + }, + sp_blockchain::{self, BlockStatus, CachedHeaderMetadata, HeaderBackend, HeaderMetadata}, + sp_runtime::{ + Justification, Justifications, + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT, NumberFor, Zero}, + }, +}; +use serde::de::DeserializeOwned; +use std::{collections::HashMap, sync::Arc}; + +#[derive(PartialEq, Eq, Clone)] +pub(crate) enum StoredBlock { + Header(B::Header, Option), + Full(B, Option), +} + +impl StoredBlock { + pub(crate) fn new( + header: B::Header, + body: Option>, + just: Option, + ) -> Self { + match body { + Some(body) => Self::Full(B::new(header, body), just), + None => Self::Header(header, just), + } + } + + pub(crate) fn header(&self) -> &B::Header { + match *self { + Self::Header(ref h, _) => h, + Self::Full(ref b, _) => b.header(), + } + } + + pub(crate) fn justifications(&self) -> Option<&Justifications> { + match *self { + Self::Header(_, ref j) | Self::Full(_, ref j) => j.as_ref(), + } + } + + pub(crate) fn extrinsics(&self) -> Option<&[B::Extrinsic]> { + match *self { + Self::Header(_, _) => None, + Self::Full(ref b, _) => Some(b.extrinsics()), + } + } + + pub(crate) fn into_inner( + self, + ) -> (B::Header, Option>, Option) { + match self { + Self::Header(header, just) => (header, None, just), + Self::Full(block, just) => { + let (header, body) = block.deconstruct(); + (header, Some(body), just) + } + } + } +} + +#[derive(Clone)] +pub(crate) struct BlockchainStorage { + pub(crate) blocks: HashMap>, + pub(crate) hashes: HashMap, Block::Hash>, + pub(crate) best_hash: Block::Hash, + pub(crate) best_number: NumberFor, + pub(crate) finalized_hash: Block::Hash, + pub(crate) finalized_number: NumberFor, + pub(crate) genesis_hash: Block::Hash, + pub(crate) leaves: LeafSet>, + pub(crate) aux: HashMap, Vec>, +} + +/// In-memory blockchain. Supports concurrent reads. +#[derive(Clone)] +pub struct Blockchain { + rpc_client: Option>>, + pub(crate) storage: Arc>>, +} + +impl Blockchain { + /// Create new in-memory blockchain storage. + pub(crate) fn new(rpc_client: Option>>) -> Self { + let storage = Arc::new(RwLock::new(BlockchainStorage { + blocks: HashMap::new(), + hashes: HashMap::new(), + best_hash: Default::default(), + best_number: Zero::zero(), + finalized_hash: Default::default(), + finalized_number: Zero::zero(), + genesis_hash: Default::default(), + leaves: LeafSet::new(), + aux: HashMap::new(), + })); + Self { rpc_client, storage } + } + + #[inline] + fn rpc(&self) -> Option<&dyn RPCClient> { + self.rpc_client.as_deref() + } + + /// Get header hash of given block. + pub fn id(&self, id: BlockId) -> Option { + match id { + BlockId::Hash(h) => Some(h), + BlockId::Number(n) => { + let block_hash = self.storage.read().hashes.get(&n).copied(); + + match block_hash { + None => { + let block_hash = + self.rpc().and_then(|rpc| rpc.block_hash(Some(n)).ok().flatten()); + if let Some(h) = block_hash { + self.storage.write().hashes.insert(n, h); + } + block_hash + } + block_hash => block_hash, + } + } + } + } + + /// Insert a block header and associated data. + pub fn insert( + &self, + hash: Block::Hash, + header: ::Header, + justifications: Option, + body: Option::Extrinsic>>, + new_state: NewBlockState, + ) -> sp_blockchain::Result<()> { + let number = *header.number(); + + if new_state.is_best() { + self.apply_head(&header)?; + } + + let mut storage = self.storage.write(); + + // Always insert the block into blocks and hashes storage + storage.blocks.insert(hash, StoredBlock::new(header.clone(), body, justifications)); + storage.hashes.insert(number, hash); + + // Set genesis_hash only for the first block inserted + if storage.blocks.len() == 1 { + storage.genesis_hash = hash; + } + + // Update leaves for non-genesis blocks + if storage.blocks.len() > 1 { + storage.leaves.import(hash, number, *header.parent_hash()); + } + + // Finalize block only if explicitly requested via new_state + if let NewBlockState::Final = new_state { + storage.finalized_hash = hash; + storage.finalized_number = number; + } + + Ok(()) + } + + /// Set an existing block as head. + pub fn set_head(&self, hash: Block::Hash) -> sp_blockchain::Result<()> { + let header = self + .header(hash)? + .ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{hash:?}")))?; + + self.apply_head(&header) + } + + fn apply_head(&self, header: &::Header) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + + let hash = header.hash(); + let number = header.number(); + + storage.best_hash = hash; + storage.best_number = *number; + storage.hashes.insert(*number, hash); + + Ok(()) + } + + pub(crate) fn finalize_header( + &self, + block: Block::Hash, + justification: Option, + ) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + storage.finalized_hash = block; + + if justification.is_some() { + let block = storage + .blocks + .get_mut(&block) + .expect("hash was fetched from a block in the db; qed"); + + let block_justifications = match block { + StoredBlock::Header(_, j) | StoredBlock::Full(_, j) => j, + }; + + *block_justifications = justification.map(Justifications::from); + } + + Ok(()) + } + + pub(crate) fn append_justification( + &self, + hash: Block::Hash, + justification: Justification, + ) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + + let block = + storage.blocks.get_mut(&hash).expect("hash was fetched from a block in the db; qed"); + + let block_justifications = match block { + StoredBlock::Header(_, j) | StoredBlock::Full(_, j) => j, + }; + + if let Some(stored_justifications) = block_justifications { + if !stored_justifications.append(justification) { + return Err(sp_blockchain::Error::BadJustification( + "Duplicate consensus engine ID".into(), + )); + } + } else { + *block_justifications = Some(Justifications::from(justification)); + }; + + Ok(()) + } + + pub(crate) fn write_aux(&self, ops: Vec<(Vec, Option>)>) { + let mut storage = self.storage.write(); + for (k, v) in ops { + match v { + Some(v) => storage.aux.insert(k, v), + None => storage.aux.remove(&k), + }; + } + } +} + +impl HeaderBackend for Blockchain { + fn header( + &self, + hash: Block::Hash, + ) -> sp_blockchain::Result::Header>> { + // First, try to get the header from local storage + if let Some(header) = self.storage.read().blocks.get(&hash).map(|b| b.header().clone()) { + return Ok(Some(header)); + } + + // If not found in local storage, fetch from RPC client + let header = if let Some(rpc) = self.rpc() { + rpc.block(Some(hash)).ok().flatten().map(|full| { + let block = full.block.clone(); + self.storage + .write() + .blocks + .insert(hash, StoredBlock::Full(block.clone(), full.justifications)); + block.header().clone() + }) + } else { + None + }; + + if header.is_none() { + tracing::warn!( + target: LAZY_LOADING_LOG_TARGET, + "Expected block {:x?} to exist.", + &hash + ); + } + + Ok(header) + } + + fn info(&self) -> sp_blockchain::Info { + let storage = self.storage.read(); + let finalized_state = if storage.blocks.len() <= 1 { + None + } else { + Some((storage.finalized_hash, storage.finalized_number)) + }; + + sp_blockchain::Info { + best_hash: storage.best_hash, + best_number: storage.best_number, + genesis_hash: storage.genesis_hash, + finalized_hash: storage.finalized_hash, + finalized_number: storage.finalized_number, + finalized_state, + number_leaves: storage.leaves.count(), + block_gap: None, + } + } + + fn status(&self, hash: Block::Hash) -> sp_blockchain::Result { + // Check local storage first + if self.storage.read().blocks.contains_key(&hash) { + return Ok(BlockStatus::InChain); + } + + // If not in local storage, check RPC + if let Some(rpc) = self.rpc() { + match rpc.header(Some(hash)) { + Ok(Some(_)) => Ok(BlockStatus::InChain), + Ok(None) => Ok(BlockStatus::Unknown), + Err(_) => Ok(BlockStatus::Unknown), + } + } else { + Ok(BlockStatus::Unknown) + } + } + + fn number(&self, hash: Block::Hash) -> sp_blockchain::Result>> { + if let Some(b) = self.storage.read().blocks.get(&hash) { + return Ok(Some(*b.header().number())); + } + match self.rpc() { + Some(rpc) => match rpc.block(Some(hash)) { + Ok(Some(block)) => Ok(Some(*block.block.header().number())), + err => Err(sp_blockchain::Error::UnknownBlock(format!( + "Failed to fetch block number from RPC: {err:?}" + ))), + }, + None => Err(sp_blockchain::Error::UnknownBlock( + "RPC not configured to resolve block number".into(), + )), + } + } + + fn hash( + &self, + number: <::Header as HeaderT>::Number, + ) -> sp_blockchain::Result> { + Ok(self.id(BlockId::Number(number))) + } +} + +impl HeaderMetadata for Blockchain { + type Error = sp_blockchain::Error; + + fn header_metadata( + &self, + hash: Block::Hash, + ) -> Result, Self::Error> { + self.header(hash)?.map(|header| CachedHeaderMetadata::from(&header)).ok_or_else(|| { + sp_blockchain::Error::UnknownBlock(format!("header not found: {hash:?}")) + }) + } + + fn insert_header_metadata(&self, _hash: Block::Hash, _metadata: CachedHeaderMetadata) { + // No need to implement. + unimplemented!("insert_header_metadata") + } + fn remove_header_metadata(&self, _hash: Block::Hash) { + // No need to implement. + unimplemented!("remove_header_metadata") + } +} + +impl sp_blockchain::Backend for Blockchain { + fn body( + &self, + hash: Block::Hash, + ) -> sp_blockchain::Result::Extrinsic>>> { + if let Some(xs) = + self.storage.read().blocks.get(&hash).and_then(|b| b.extrinsics().map(|x| x.to_vec())) + { + return Ok(Some(xs)); + } + let extrinsics = self.rpc().and_then(|rpc| { + rpc.block(Some(hash)).ok().flatten().map(|b| b.block.extrinsics().to_vec()) + }); + Ok(extrinsics) + } + + fn justifications(&self, hash: Block::Hash) -> sp_blockchain::Result> { + // Check local storage first + if let Some(justifications) = + self.storage.read().blocks.get(&hash).and_then(|b| b.justifications().cloned()) + { + return Ok(Some(justifications)); + } + + // If not in local storage, fetch from RPC + let justifications = self.rpc().and_then(|rpc| { + rpc.block(Some(hash)).ok().flatten().and_then(|full| full.justifications) + }); + + Ok(justifications) + } + + fn last_finalized(&self) -> sp_blockchain::Result { + let last_finalized = self.storage.read().finalized_hash; + + Ok(last_finalized) + } + + fn leaves(&self) -> sp_blockchain::Result> { + let leaves = self.storage.read().leaves.hashes(); + + Ok(leaves) + } + + fn children(&self, _parent_hash: Block::Hash) -> sp_blockchain::Result> { + unimplemented!("Not supported by the `lazy-loading` backend.") + } + + fn indexed_transaction(&self, _hash: Block::Hash) -> sp_blockchain::Result>> { + unimplemented!("Not supported by the `lazy-loading` backend.") + } + + fn block_indexed_body( + &self, + _hash: Block::Hash, + ) -> sp_blockchain::Result>>> { + unimplemented!("Not supported by the `lazy-loading` backend.") + } +} + +impl backend::AuxStore for Blockchain { + fn insert_aux< + 'a, + 'b: 'a, + 'c: 'a, + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + for (k, v) in insert { + storage.aux.insert(k.to_vec(), v.to_vec()); + } + for k in delete { + storage.aux.remove(*k); + } + Ok(()) + } + + fn get_aux(&self, key: &[u8]) -> sp_blockchain::Result>> { + Ok(self.storage.read().aux.get(key).cloned()) + } +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs new file mode 100644 index 0000000000000..58597a0e120bd --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -0,0 +1,436 @@ +use super::make_composite_child_key; +use crate::substrate_node::lazy_loading::{LAZY_LOADING_LOG_TARGET, rpc_client::RPCClient}; +use alloy_primitives::hex; +use parking_lot::RwLock; +use polkadot_sdk::{ + sc_client_api::StorageKey, + sp_core, + sp_runtime::{ + StateVersion, + traits::{Block as BlockT, HashingFor}, + }, + sp_state_machine::{ + self, BackendTransaction, InMemoryBackend, IterArgs, StorageCollection, StorageValue, + TrieBackend, backend::AsTrieBackend, + }, + sp_storage::ChildInfo, + sp_trie::{self, PrefixedMemoryDB}, +}; +use serde::de::DeserializeOwned; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + +/// DB-backed patricia trie state, transaction type is an overlay of changes to commit. +pub type DbState = TrieBackend>>, HashingFor>; + +/// Simple wrapper around the InMemoryBackend's RawIter that delegates all operations. +pub struct RawIter { + inner: > as sp_state_machine::Backend>>::RawIter, +} + +impl sp_state_machine::StorageIterator> + for RawIter +{ + type Backend = ForkedLazyBackend; + type Error = String; + + fn next_key( + &mut self, + backend: &Self::Backend, + ) -> Option> { + let db = backend.db.read(); + self.inner.next_key(&*db) + } + + fn next_pair( + &mut self, + backend: &Self::Backend, + ) -> Option> + { + let db = backend.db.read(); + self.inner.next_pair(&*db) + } + + fn was_complete(&self) -> bool { + self.inner.was_complete() + } +} + +#[derive(Debug, Clone)] +pub struct ForkedLazyBackend { + pub(crate) rpc_client: Option>>, + pub(crate) block_hash: Option, + pub(crate) fork_block: Option, + pub(crate) db: Arc>>>, + + /// Keys explicitly deleted after fork. Prevents RPC fallback from returning stale values + /// for deleted keys (distinguishes "not cached locally" from "deleted"). + pub(crate) removed_keys: Arc>>>, + + pub(crate) before_fork: bool, +} + +impl ForkedLazyBackend { + pub(crate) fn update_storage(&self, key: &[u8], value: &Option>) { + if let Some(val) = value { + let mut entries: HashMap, StorageCollection> = Default::default(); + entries.insert(None, vec![(key.to_vec(), Some(val.clone()))]); + + self.db.write().insert(entries, StateVersion::V1); + } + } + + pub(crate) fn update_child_storage( + &self, + child_info: &ChildInfo, + key: &[u8], + value: &Option>, + ) { + if let Some(val) = value { + let mut entries: HashMap, StorageCollection> = Default::default(); + entries.insert(Some(child_info.clone()), vec![(key.to_vec(), Some(val.clone()))]); + + self.db.write().insert(entries, StateVersion::V1); + } + } + + #[inline] + pub(crate) fn rpc(&self) -> Option<&dyn RPCClient> { + self.rpc_client.as_deref() + } +} + +impl sp_state_machine::Backend> + for ForkedLazyBackend +{ + type Error = as sp_state_machine::Backend>>::Error; + type TrieBackendStorage = PrefixedMemoryDB>; + type RawIter = RawIter; + + fn storage(&self, key: &[u8]) -> Result, Self::Error> { + let remote_fetch = |block: Option| -> Option> { + self.rpc() + .and_then(|rpc| rpc.storage(StorageKey(key.to_vec()), block).ok()) + .flatten() + .map(|v| v.0) + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return Ok(remote_fetch(self.block_hash)); + } else { + // No RPC client, try to read from local DB + let readable_db = self.db.read(); + return Ok(readable_db.storage(key).ok().flatten()); + } + } + + let readable_db = self.db.read(); + let maybe_storage = readable_db.storage(key); + let value = match maybe_storage { + Ok(Some(data)) => Some(data), + _ if !self.removed_keys.read().contains(key) => { + let result = + if self.rpc().is_some() { remote_fetch(self.fork_block) } else { None }; + + drop(readable_db); + self.update_storage(key, &result); + + result + } + _ => None, + }; + + Ok(value) + } + + fn storage_hash( + &self, + key: &[u8], + ) -> Result as sp_core::Hasher>::Out>, Self::Error> { + let remote_fetch = |block: Option| -> Result< + Option< as sp_core::Hasher>::Out>, + Self::Error, + > { + match self.rpc() { + Some(rpc) => rpc + .storage_hash(StorageKey(key.to_vec()), block) + .map_err(|e| format!("Failed to fetch storage hash from RPC: {e:?}")), + None => Ok(None), + } + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return remote_fetch(self.block_hash); + } else { + // No RPC client, try to read from local DB + return Ok(self.db.read().storage_hash(key).ok().flatten()); + } + } + + let storage_hash = self.db.read().storage_hash(key); + match storage_hash { + Ok(Some(hash)) => Ok(Some(hash)), + _ if !self.removed_keys.read().contains(key) => { + if self.rpc().is_some() { + remote_fetch(self.fork_block) + } else { + Ok(None) + } + } + _ => Ok(None), + } + } + + fn closest_merkle_value( + &self, + _key: &[u8], + ) -> Result< + Option as sp_core::Hasher>::Out>>, + Self::Error, + > { + unimplemented!("closest_merkle_value: unsupported feature for lazy loading") + } + + fn child_closest_merkle_value( + &self, + _child_info: &ChildInfo, + _key: &[u8], + ) -> Result< + Option as sp_core::Hasher>::Out>>, + Self::Error, + > { + unimplemented!("child_closest_merkle_value: unsupported feature for lazy loading") + } + + fn child_storage( + &self, + child_info: &ChildInfo, + key: &[u8], + ) -> Result, Self::Error> { + let remote_fetch = |block: Option| -> Option> { + self.rpc() + .and_then(|rpc| rpc.child_storage(child_info, StorageKey(key.to_vec()), block).ok()) + .flatten() + .map(|v| v.0) + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return Ok(remote_fetch(self.block_hash)); + } else { + // No RPC client, try to read from local DB + let readable_db = self.db.read(); + return Ok(readable_db.child_storage(child_info, key).ok().flatten()); + } + } + + let readable_db = self.db.read(); + let maybe_storage = readable_db.child_storage(child_info, key); + + match maybe_storage { + Ok(Some(value)) => Ok(Some(value)), + Ok(None) => { + let composite_key = make_composite_child_key(child_info.storage_key(), key); + if self.removed_keys.read().contains(&composite_key) { + return Ok(None); + } + + if let Some(remote_value) = remote_fetch(self.fork_block) { + drop(readable_db); + self.update_child_storage(child_info, key, &Some(remote_value.clone())); + Ok(Some(remote_value)) + } else { + Ok(None) + } + } + Err(e) => Err(e), + } + } + + fn child_storage_hash( + &self, + child_info: &ChildInfo, + key: &[u8], + ) -> Result as sp_core::Hasher>::Out>, Self::Error> { + let remote_fetch = |block: Option| -> Option { + self.rpc() + .and_then(|rpc| { + rpc.child_storage_hash(child_info, StorageKey(key.to_vec()), block).ok() + }) + .flatten() + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return Ok(remote_fetch(self.block_hash)); + } else { + let readable_db = self.db.read(); + return Ok(readable_db.child_storage_hash(child_info, key).ok().flatten()); + } + } + + let readable_db = self.db.read(); + let maybe_hash = readable_db.child_storage_hash(child_info, key); + + match maybe_hash { + Ok(Some(hash)) => Ok(Some(hash)), + Ok(None) => { + let composite_key = make_composite_child_key(child_info.storage_key(), key); + if self.removed_keys.read().contains(&composite_key) { + return Ok(None); + } + + Ok(remote_fetch(self.fork_block)) + } + Err(e) => Err(e), + } + } + + fn next_storage_key( + &self, + key: &[u8], + ) -> Result, Self::Error> { + let remote_fetch = |block: Option| { + let start_key = Some(StorageKey(key.to_vec())); + self.rpc() + .and_then(|rpc| rpc.storage_keys_paged(start_key.clone(), 2, None, block).ok()) + .and_then(|keys| keys.last().cloned()) + }; + + // When before_fork, try RPC first, then fall back to local DB + let maybe_next_key = if self.before_fork { + if self.rpc().is_some() { + remote_fetch(self.block_hash) + } else { + self.db.read().next_storage_key(key).ok().flatten() + } + } else { + let next_storage_key = self.db.read().next_storage_key(key); + match next_storage_key { + Ok(Some(next_key)) => Some(next_key), + _ if !self.removed_keys.read().contains(key) => { + if self.rpc().is_some() { + remote_fetch(self.fork_block) + } else { + None + } + } + // Otherwise, there's no next key + _ => None, + } + } + .filter(|next_key| next_key != key); + + tracing::trace!( + target: LAZY_LOADING_LOG_TARGET, + "next_storage_key: (key: {:?}, next_key: {:?})", + hex::encode(key), + maybe_next_key.clone().map(hex::encode) + ); + + Ok(maybe_next_key) + } + + fn next_child_storage_key( + &self, + child_info: &ChildInfo, + key: &[u8], + ) -> Result, Self::Error> { + let remote_fetch = |block: Option| { + let start_key = Some(StorageKey(key.to_vec())); + self.rpc() + .and_then(|rpc| { + rpc.child_storage_keys_paged(child_info, None, 2, start_key.clone(), block).ok() + }) + .and_then(|keys| keys.last().cloned()) + }; + + // When before_fork, try RPC first, then fall back to local DB + let maybe_next_key = if self.before_fork { + if self.rpc().is_some() { + remote_fetch(self.block_hash) + } else { + self.db.read().next_child_storage_key(child_info, key).ok().flatten() + } + } else { + let next_child_key = self.db.read().next_child_storage_key(child_info, key); + match next_child_key { + Ok(Some(next_key)) => Some(next_key), + // Otherwise, check removed_keys and try remote fetch if not removed + _ => { + let composite_key = make_composite_child_key(child_info.storage_key(), key); + if !self.removed_keys.read().contains(&composite_key) { + if self.rpc().is_some() { remote_fetch(self.fork_block) } else { None } + } else { + None + } + } + } + } + .filter(|next_key| next_key != key); + + tracing::trace!( + target: LAZY_LOADING_LOG_TARGET, + "next_child_storage_key: (child_info: {:?}, key: {:?}, next_key: {:?})", + child_info, + hex::encode(key), + maybe_next_key.clone().map(hex::encode) + ); + + Ok(maybe_next_key) + } + + fn storage_root<'a>( + &self, + delta: impl Iterator)>, + state_version: StateVersion, + ) -> ( as sp_core::Hasher>::Out, BackendTransaction>) + where + as sp_core::Hasher>::Out: Ord, + { + self.db.read().storage_root(delta, state_version) + } + + fn child_storage_root<'a>( + &self, + child_info: &ChildInfo, + delta: impl Iterator)>, + state_version: StateVersion, + ) -> ( as sp_core::Hasher>::Out, bool, BackendTransaction>) + where + as sp_core::Hasher>::Out: Ord, + { + self.db.read().child_storage_root(child_info, delta, state_version) + } + + fn raw_iter(&self, args: IterArgs<'_>) -> Result { + let inner = self.db.read().raw_iter(args)?; + Ok(RawIter { inner }) + } + + fn register_overlay_stats(&self, stats: &sp_state_machine::StateMachineStats) { + self.db.read().register_overlay_stats(stats) + } + + fn usage_info(&self) -> sp_state_machine::UsageInfo { + self.db.read().usage_info() + } +} + +impl AsTrieBackend> for ForkedLazyBackend { + type TrieBackendStorage = PrefixedMemoryDB>; + + fn as_trie_backend( + &self, + ) -> &sp_state_machine::TrieBackend> { + unimplemented!("`as_trie_backend` is not supported in lazy loading mode.") + } +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs new file mode 100644 index 0000000000000..9df04cae38788 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -0,0 +1,469 @@ +mod block_import_operation; +mod blockchain; +mod forked_lazy_backend; + +pub use block_import_operation::BlockImportOperation; +pub use blockchain::Blockchain; +pub use forked_lazy_backend::ForkedLazyBackend; + +#[cfg(test)] +mod tests; + +use parking_lot::RwLock; +use polkadot_sdk::{ + sc_client_api::{ + TrieCacheContext, UsageInfo, + backend::{self, AuxStore}, + }, + sp_blockchain, + sp_core::{H256, offchain::storage::InMemOffchainStorage}, + sp_runtime::{ + Justification, StateVersion, + traits::{Block as BlockT, Header as HeaderT, NumberFor, One, Saturating, Zero}, + }, +}; +use serde::de::DeserializeOwned; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + +use crate::substrate_node::lazy_loading::rpc_client::RPCClient; + +pub struct Backend { + pub(crate) rpc_client: Option>>, + pub(crate) fork_checkpoint: Option, + states: RwLock>>, + pub(crate) blockchain: Blockchain, + import_lock: RwLock<()>, + pinned_blocks: RwLock>, +} + +impl Backend { + fn new( + rpc_client: Option>>, + fork_checkpoint: Option, + ) -> Self { + Self { + rpc_client: rpc_client.clone(), + states: Default::default(), + blockchain: Blockchain::new(rpc_client), + import_lock: Default::default(), + pinned_blocks: Default::default(), + fork_checkpoint, + } + } + + #[inline] + pub fn rpc(&self) -> Option<&dyn RPCClient> { + self.rpc_client.as_deref() + } +} + +impl AuxStore for Backend { + fn insert_aux< + 'a, + 'b: 'a, + 'c: 'a, + I: IntoIterator, + D: IntoIterator, + >( + &self, + _insert: I, + _delete: D, + ) -> sp_blockchain::Result<()> { + unimplemented!("`insert_aux` is not supported in lazy loading mode.") + } + + fn get_aux(&self, _key: &[u8]) -> sp_blockchain::Result>> { + unimplemented!("`get_aux` is not supported in lazy loading mode.") + } +} + +impl backend::Backend for Backend { + type BlockImportOperation = BlockImportOperation; + type Blockchain = Blockchain; + type State = ForkedLazyBackend; + type OffchainStorage = InMemOffchainStorage; + + fn begin_operation(&self) -> sp_blockchain::Result { + let old_state = self.state_at(Default::default(), TrieCacheContext::Trusted)?; + Ok(BlockImportOperation { + pending_block: None, + old_state, + new_state: None, + aux: Default::default(), + storage_updates: Default::default(), + child_storage_updates: Default::default(), + finalized_blocks: Default::default(), + set_head: None, + }) + } + + fn begin_state_operation( + &self, + operation: &mut Self::BlockImportOperation, + block: Block::Hash, + ) -> sp_blockchain::Result<()> { + operation.old_state = self.state_at(block, TrieCacheContext::Trusted)?; + Ok(()) + } + + fn commit_operation(&self, operation: Self::BlockImportOperation) -> sp_blockchain::Result<()> { + for (block, justification) in operation.finalized_blocks { + self.blockchain.finalize_header(block, justification)?; + } + + if let Some(pending_block) = operation.pending_block { + let old_state = &operation.old_state; + let (header, body, justification) = pending_block.block.into_inner(); + let hash = header.hash(); + + let storage_updates = operation.storage_updates.clone(); + let child_storage_updates = operation.child_storage_updates.clone(); + + let mut removed_keys = old_state.removed_keys.read().clone(); + for (key, value) in &storage_updates { + if value.is_some() { + removed_keys.remove(key); + } else { + removed_keys.insert(key.clone()); + } + } + + for (child_key, child_data) in &child_storage_updates { + for (key, value) in child_data { + let composite_key = make_composite_child_key(child_key, key); + if value.is_some() { + removed_keys.remove(&composite_key); + } else { + removed_keys.insert(composite_key); + } + } + } + + let new_removed_keys = Arc::new(RwLock::new(removed_keys)); + + let mut db_clone = old_state.db.read().clone(); + { + let mut entries = vec![(None, storage_updates.clone())]; + if !child_storage_updates.is_empty() { + entries.extend(child_storage_updates.iter().map(|(key, data)| { + (Some(polkadot_sdk::sp_storage::ChildInfo::new_default(key)), data.clone()) + })); + } + db_clone.insert(entries, StateVersion::V1); + } + let new_db = Arc::new(RwLock::new(db_clone)); + let fork_block = self.fork_checkpoint.as_ref().map(|checkpoint| checkpoint.hash()); + let new_state = ForkedLazyBackend { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block, + db: new_db, + removed_keys: new_removed_keys, + before_fork: false, + }; + self.states.write().insert(hash, new_state); + + self.blockchain.insert(hash, header, justification, body, pending_block.state)?; + } + + if !operation.aux.is_empty() { + self.blockchain.write_aux(operation.aux); + } + + if let Some(set_head) = operation.set_head { + self.blockchain.set_head(set_head)?; + } + + Ok(()) + } + + fn finalize_block( + &self, + hash: Block::Hash, + justification: Option, + ) -> sp_blockchain::Result<()> { + self.blockchain.finalize_header(hash, justification) + } + + fn append_justification( + &self, + hash: Block::Hash, + justification: Justification, + ) -> sp_blockchain::Result<()> { + self.blockchain.append_justification(hash, justification) + } + + fn blockchain(&self) -> &Self::Blockchain { + &self.blockchain + } + + fn usage_info(&self) -> Option { + None + } + + fn offchain_storage(&self) -> Option { + None + } + + fn state_at( + &self, + hash: Block::Hash, + _trie_cache_context: TrieCacheContext, + ) -> sp_blockchain::Result { + if hash == Default::default() { + let (fork_block, before_fork) = match &self.fork_checkpoint { + Some(checkpoint) => (Some(checkpoint.hash()), true), + None => (None, false), + }; + + return Ok(ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block, + db: Default::default(), + removed_keys: Default::default(), + before_fork, + }); + } + + let (backend, should_write) = self + .states + .read() + .get(&hash) + .cloned() + .map(|state| Ok((state, false))) + .unwrap_or_else(|| { + self.rpc() + .and_then(|rpc| rpc.header(Some(hash)).ok()) + .flatten() + .ok_or(sp_blockchain::Error::UnknownBlock(format!( + "Failed to fetch block header: {hash:?}" + ))) + .map(|header| { + let state = match &self.fork_checkpoint { + Some(checkpoint) => { + if header.number().gt(checkpoint.number()) { + let parent = self + .state_at(*header.parent_hash(), TrieCacheContext::Trusted) + .ok(); + + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: Some(checkpoint.hash()), + db: parent.clone().map_or(Default::default(), |p| p.db), + removed_keys: parent + .map_or(Default::default(), |p| p.removed_keys), + before_fork: false, + } + } else { + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: Some(checkpoint.hash()), + db: Default::default(), + removed_keys: Default::default(), + before_fork: true, + } + } + } + None => { + let parent = self + .state_at(*header.parent_hash(), TrieCacheContext::Trusted) + .ok(); + + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: None, + db: parent.clone().map_or(Default::default(), |p| p.db), + removed_keys: parent + .map_or(Default::default(), |p| p.removed_keys), + before_fork: false, + } + } + }; + + (state, true) + }) + })?; + + if should_write { + self.states.write().insert(hash, backend.clone()); + } + + Ok(backend) + } + + fn revert( + &self, + n: NumberFor, + revert_finalized: bool, + ) -> sp_blockchain::Result<(NumberFor, HashSet)> { + let mut storage = self.blockchain.storage.write(); + + if storage.blocks.is_empty() || n.is_zero() { + return Ok((Zero::zero(), HashSet::new())); + } + + let mut states = self.states.write(); + let pinned = self.pinned_blocks.read(); + + let mut target = n; + let original_finalized_number = storage.finalized_number; + + if !revert_finalized { + let revertible = storage.best_number.saturating_sub(storage.finalized_number); + if target > revertible { + target = revertible; + } + } + + let mut reverted = NumberFor::::zero(); + let mut reverted_up_to_finalized = HashSet::new(); + + let mut current_hash = storage.best_hash; + + while reverted < target { + // Stop if we've reached genesis or if there are no blocks in storage + if storage.blocks.is_empty() || current_hash == storage.genesis_hash { + break; + } + + if let Some(count) = pinned.get(¤t_hash) { + if *count > 0 { + break; + } + } + + let Some(block) = storage.blocks.get(¤t_hash) else { + break; + }; + + let header = block.header().clone(); + let number = *header.number(); + let parent_hash = header.parent_hash(); + + // If this is the genesis block, parent doesn't become a leaf + // Otherwise, check if any other block has the same parent + let parent_becomes_leaf = if current_hash == storage.genesis_hash { + false + } else { + !storage.blocks.iter().any(|(other_hash, stored)| { + *other_hash != current_hash && stored.header().parent_hash() == parent_hash + }) + }; + + let hash_to_remove = current_hash; + + storage.blocks.remove(&hash_to_remove); + if let Some(entry) = storage.hashes.get(&number) { + if *entry == hash_to_remove { + storage.hashes.remove(&number); + } + } + states.remove(&hash_to_remove); + + storage.leaves.remove( + hash_to_remove, + number, + parent_becomes_leaf.then_some(*parent_hash), + ); + + if number <= original_finalized_number { + reverted_up_to_finalized.insert(hash_to_remove); + } + + reverted = reverted.saturating_add(One::one()); + + current_hash = *parent_hash; + + storage.best_hash = current_hash; + storage.best_number = number.saturating_sub(One::one()); + } + + let best_hash_after = storage.best_hash; + let best_number_after = storage.best_number; + + let _ = storage.leaves.revert(best_hash_after, best_number_after); + + storage.hashes.insert(best_number_after, best_hash_after); + + if storage.finalized_number > best_number_after { + storage.finalized_number = best_number_after; + } + + // Get the genesis block number to use as lower bound + let genesis_number = storage + .blocks + .get(&storage.genesis_hash) + .map(|block| *block.header().number()) + .unwrap_or(Zero::zero()); + + // Decrement finalized_number until we find a valid block, but don't go below genesis + while storage.finalized_number > genesis_number + && !storage.hashes.contains_key(&storage.finalized_number) + { + storage.finalized_number = storage.finalized_number.saturating_sub(One::one()); + } + + if let Some(hash) = storage.hashes.get(&storage.finalized_number).copied() { + storage.finalized_hash = hash; + } else { + storage.finalized_hash = storage.genesis_hash; + } + + Ok((reverted, reverted_up_to_finalized)) + } + + fn remove_leaf_block(&self, _hash: Block::Hash) -> sp_blockchain::Result<()> { + // Not used + Ok(()) + } + + fn get_import_lock(&self) -> &RwLock<()> { + &self.import_lock + } + + fn requires_full_sync(&self) -> bool { + false + } + + fn pin_block(&self, hash: ::Hash) -> sp_blockchain::Result<()> { + let mut blocks = self.pinned_blocks.write(); + *blocks.entry(hash).or_default() += 1; + Ok(()) + } + + fn unpin_block(&self, hash: ::Hash) { + let mut blocks = self.pinned_blocks.write(); + blocks.entry(hash).and_modify(|counter| *counter -= 1).or_insert(-1); + } +} + +impl backend::LocalBackend for Backend {} + +pub fn new_backend( + rpc_client: Option>>, + checkpoint: Option, +) -> Result>, polkadot_sdk::sc_service::Error> +where + Block: BlockT + DeserializeOwned, + Block::Hash: From, +{ + let backend = Arc::new(Backend::new(rpc_client, checkpoint)); + Ok(backend) +} + +/// Creates a composite key for child storage by combining child_storage_key + key. +/// This ensures keys from different child storages don't collide. +#[inline] +pub fn make_composite_child_key(child_storage_key: &[u8], key: &[u8]) -> Vec { + let mut composite = Vec::with_capacity(child_storage_key.len() + key.len()); + composite.extend_from_slice(child_storage_key); + composite.extend_from_slice(key); + composite +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs new file mode 100644 index 0000000000000..92597a6dfafb3 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -0,0 +1,725 @@ +use super::*; +use mock_rpc::{Rpc, TestBlock, TestHeader}; +use parking_lot::RwLock; +use polkadot_sdk::{ + sc_client_api::{Backend as BackendT, HeaderBackend, StateBackend}, + sp_runtime::{ + OpaqueExtrinsic, + traits::{BlakeTwo256, Header as HeaderT}, + }, + sp_state_machine, + sp_storage::{StorageData, StorageKey}, +}; +use std::{ + collections::BTreeMap, + sync::atomic::{AtomicUsize, Ordering}, +}; + +#[cfg(test)] +mod mock_rpc { + use super::*; + use polkadot_sdk::sp_runtime::{ + Justifications, + generic::{Block as GenericBlock, Header, SignedBlock}, + traits::Header as HeaderT, + }; + + pub type TestHashing = BlakeTwo256; + pub type TestHeader = Header; + pub type TestExtrinsic = OpaqueExtrinsic; + pub type TestBlock = GenericBlock, TestExtrinsic>; + + #[derive(Default, Debug)] + pub struct Counters { + pub storage_calls: AtomicUsize, + pub storage_hash_calls: AtomicUsize, + pub storage_keys_paged_calls: AtomicUsize, + pub header_calls: AtomicUsize, + pub block_calls: AtomicUsize, + } + + /// Mockable RPC with interior mutability. + #[allow(clippy::type_complexity)] + #[derive(Clone, Default, Debug)] + pub struct Rpc { + pub counters: std::sync::Arc, + /// storage[(block_hash, key)] = value + pub storage: Arc>>, + /// storage_hash[(block_hash, key)] = hash + pub storage_hashes: Arc>>, + /// storage_keys_paged[(block_hash, (prefix,start))] = Vec + pub storage_keys_pages: Arc), Vec>>>, + /// child_storage[(block_hash, child_storage_key, key)] = value + pub child_storage: Arc, StorageKey), StorageData>>>, + /// child_storage_hashes[(block_hash, child_storage_key, key)] = hash + pub child_storage_hashes: + Arc, StorageKey), Block::Hash>>>, + /// child_storage_keys_pages[(block_hash, child_storage_key, prefix)] = Vec + pub child_storage_keys_pages: + Arc, Vec), Vec>>>, + /// headers[hash] = header + pub headers: Arc>>, + /// blocks[hash] = SignedBlock + pub blocks: Arc>>>, + } + + impl Rpc { + pub fn new() -> Self { + Self { + counters: std::sync::Arc::new(Counters::default()), + storage: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + storage_hashes: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + storage_keys_pages: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + child_storage: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + child_storage_hashes: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + child_storage_keys_pages: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + headers: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + blocks: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + } + } + + pub fn put_storage(&self, at: Block::Hash, key: StorageKey, val: StorageData) { + self.storage.write().insert((at, key), val); + } + pub fn put_header(&self, h: Block::Header) { + self.headers.write().insert(h.hash(), h); + } + pub fn put_block(&self, block: Block, just: Option) { + let full = SignedBlock { block, justifications: just }; + self.blocks.write().insert(full.block.header().hash(), full); + } + + pub fn put_child_storage( + &self, + at: Block::Hash, + child_storage_key: Vec, + key: StorageKey, + val: StorageData, + ) { + self.child_storage.write().insert((at, child_storage_key, key), val); + } + + pub fn put_child_storage_hash( + &self, + at: Block::Hash, + child_storage_key: Vec, + key: StorageKey, + hash: Block::Hash, + ) { + self.child_storage_hashes.write().insert((at, child_storage_key, key), hash); + } + + pub fn put_child_storage_keys_page( + &self, + at: Block::Hash, + child_storage_key: Vec, + prefix: Vec, + keys: Vec, + ) { + self.child_storage_keys_pages.write().insert((at, child_storage_key, prefix), keys); + } + } + + impl RPCClient for Rpc { + fn storage( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.storage_calls.fetch_add(1, Ordering::Relaxed); + let map = self.storage.read(); + Ok(map.get(&(at.unwrap_or_default(), key)).cloned()) + } + + fn storage_hash( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.storage_hash_calls.fetch_add(1, Ordering::Relaxed); + let bh = at.unwrap_or_default(); + let map = self.storage_hashes.read(); + Ok(map.get(&(bh, key)).copied()) + } + + fn storage_keys_paged( + &self, + key: Option, + count: u32, + start_key: Option, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.storage_keys_paged_calls.fetch_add(1, Ordering::Relaxed); + + use std::cmp::min; + + let bh = at.unwrap_or_default(); + let prefix = key.map(|k| k.0).unwrap_or_default(); + let start = start_key.map(|k| k.0); + + let map = self.storage_keys_pages.read(); + let mut all = map.get(&(bh, prefix.clone())).cloned().unwrap_or_default(); + + all.sort_by(|a, b| a.0.cmp(&b.0)); + + let mut filtered: Vec = + all.into_iter().filter(|k| k.0.starts_with(&prefix)).collect(); + + if let Some(s) = start { + if let Some(pos) = filtered.iter().position(|k| k.0 == s) { + filtered = filtered.into_iter().skip(pos + 1).collect(); + } else { + filtered.retain(|k| k.0 > s); + } + } + + let take = min(filtered.len(), count as usize); + Ok(filtered.into_iter().take(take).map(|k| k.0).collect()) + } + + fn header( + &self, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.header_calls.fetch_add(1, Ordering::Relaxed); + let key = at.unwrap_or_default(); + let raw = self.headers.read().get(&key).cloned(); + Ok(raw) + } + + fn block( + &self, + hash: Option, + ) -> Result< + Option>, + jsonrpsee::core::ClientError, + > { + self.counters.block_calls.fetch_add(1, Ordering::Relaxed); + let key = hash.unwrap_or_default(); + let raw = self.blocks.read().get(&key).cloned(); + Ok(raw) + } + + fn block_hash( + &self, + _num: Option>, + ) -> Result, jsonrpsee::core::ClientError> { + todo!() + } + + fn system_chain(&self) -> Result { + todo!() + } + + fn system_properties( + &self, + ) -> Result { + todo!() + } + + fn child_storage( + &self, + child_info: &polkadot_sdk::sp_storage::ChildInfo, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + let bh = at.unwrap_or_default(); + let child_storage_key = child_info.storage_key().to_vec(); + let map = self.child_storage.read(); + Ok(map.get(&(bh, child_storage_key, key)).cloned()) + } + + fn child_storage_hash( + &self, + child_info: &polkadot_sdk::sp_storage::ChildInfo, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + let bh = at.unwrap_or_default(); + let child_storage_key = child_info.storage_key().to_vec(); + let map = self.child_storage_hashes.read(); + Ok(map.get(&(bh, child_storage_key, key)).copied()) + } + + fn child_storage_keys_paged( + &self, + child_info: &polkadot_sdk::sp_storage::ChildInfo, + key: Option, + count: u32, + start_key: Option, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + use std::cmp::min; + + let bh = at.unwrap_or_default(); + let child_storage_key = child_info.storage_key().to_vec(); + let prefix = key.map(|k| k.0).unwrap_or_default(); + let start = start_key.map(|k| k.0); + + let map = self.child_storage_keys_pages.read(); + let mut all = + map.get(&(bh, child_storage_key, prefix.clone())).cloned().unwrap_or_default(); + + all.sort_by(|a, b| a.0.cmp(&b.0)); + + let mut filtered: Vec = + all.into_iter().filter(|k| k.0.starts_with(&prefix)).collect(); + + if let Some(s) = start { + if let Some(pos) = filtered.iter().position(|k| k.0 == s) { + filtered = filtered.into_iter().skip(pos + 1).collect(); + } else { + filtered.retain(|k| k.0 > s); + } + } + + let take = min(filtered.len(), count as usize); + Ok(filtered.into_iter().take(take).map(|k| k.0).collect()) + } + } +} + +type N = u32; +type TestBlockT = TestBlock; + +fn make_header(number: N, parent: ::Hash) -> TestHeader { + TestHeader::new(number, Default::default(), Default::default(), parent, Default::default()) +} + +fn make_block( + number: N, + parent: ::Hash, + xts: Vec, +) -> TestBlock { + let header = make_header(number, parent); + TestBlock::new(header, xts) +} + +fn checkpoint(n: N) -> TestHeader { + make_header(n, Default::default()) +} + +#[test] +fn before_fork_reads_remote_only() { + let rpc = std::sync::Arc::new(Rpc::new()); + // fork checkpoint at #100 + let cp = checkpoint(100); + let backend = Backend::::new(Some(rpc.clone()), Some(cp)); + + // state_at(Default::default()) => before_fork=true + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + let key = b":foo".to_vec(); + // prepare remote value at "block_hash = Default::default()" + let at = Default::default(); + rpc.put_storage(at, StorageKey(key.clone()), StorageData(b"bar".to_vec())); + + // read storage + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"bar".to_vec())); + + // not cached in DB: second read still goes to RPC + let v2 = state.storage(&key).unwrap(); + assert_eq!(v2, Some(b"bar".to_vec())); + assert!(rpc.counters.storage_calls.load(Ordering::Relaxed) >= 2); +} + +#[test] +fn after_fork_first_fetch_caches_subsequent_hits_local() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(10); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // Build a block #11 > checkpoint (#10), with parent #10 + let parent = cp.hash(); + let b11 = make_block(11, parent, vec![]); + let h11 = b11.header.hash(); + + rpc.put_header(b11.header.clone()); + rpc.put_block(b11, None); + + // remote storage at fork block (checkpoint hash) + let fork_hash = cp.hash(); + let key = b":k".to_vec(); + rpc.put_storage(fork_hash, StorageKey(key.clone()), StorageData(b"v".to_vec())); + + // Grab state_at(#11): after_fork=false; local DB empty + let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); + + // First read fetches remote and caches + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"v".to_vec())); + + // Mutate RPC to detect second call (remove remote value) + // If second read still tries RPC, it would return None; but it should come from cache. + // So we do not change the mock; instead, assert RPC call count increases only once. + let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); + let _ = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); + assert_eq!(calls_before, calls_after, "second hit should be served from cache"); +} + +#[test] +fn removed_keys_prevents_remote_fetch() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(5); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // make block #6 + let b6 = make_block(6, cp.hash(), vec![]); + rpc.put_header(b6.header.clone()); + rpc.put_block(b6.clone(), None); + let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); + + // mark key as removed + let key = b":dead".to_vec(); + state.removed_keys.write().insert(key.clone()); + + // Even if remote has a value, backend must not fetch it + rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"ghost".to_vec())); + let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); + let v = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); + + assert!(v.is_none()); + assert_eq!(calls_before, calls_after, "should not call RPC for removed keys"); +} + +#[test] +fn blockchain_header_and_number_are_cached() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(3); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + let chain = backend.blockchain(); + + // prepare one block w/ extrinsics + let xts: Vec = vec![]; + let b4 = make_block(4, cp.hash(), xts); + let h4 = b4.header().hash(); + rpc.put_block(b4, None); + + // first header() fetches RPC and caches as Full + let h = chain.header(h4).unwrap().unwrap(); + assert_eq!(h.hash(), h4); + + // number() should now return from cache (no extra RPC needed) + let calls_before = rpc.counters.block_calls.load(Ordering::Relaxed); + let number = chain.number(h4).unwrap().unwrap(); + let calls_after = rpc.counters.block_calls.load(Ordering::Relaxed); + + assert_eq!(number, 4); + assert_eq!(calls_before, calls_after, "number() should be served from cache after header()"); +} + +#[test] +fn no_fork_mode_uses_local_db_only() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + assert!(!state.before_fork); + + let key = b":test_key".to_vec(); + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, None); + + state.update_storage(&key, &Some(b"local_value".to_vec())); + + let v2 = state.storage(&key).unwrap(); + assert_eq!(v2, Some(b"local_value".to_vec())); +} + +#[test] +fn no_fork_mode_state_at_default() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + assert!(!state.before_fork); + assert_eq!(state.fork_block, None); + assert!(state.rpc_client.is_none()); +} + +#[test] +fn no_fork_mode_storage_operations() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + let key1 = b":key1".to_vec(); + let key2 = b":key2".to_vec(); + let key3 = b":key3".to_vec(); + + state.update_storage(&key1, &Some(b"value1".to_vec())); + state.update_storage(&key2, &Some(b"value2".to_vec())); + + assert_eq!(state.storage(&key1).unwrap(), Some(b"value1".to_vec())); + assert_eq!(state.storage(&key2).unwrap(), Some(b"value2".to_vec())); + assert_eq!(state.storage(&key3).unwrap(), None); +} + +#[test] +fn child_storage_before_fork_reads_remote() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(100); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp)); + + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + let child_info = ChildInfo::new_default(b"child1"); + let key = b":child_key".to_vec(); + let at = Default::default(); + + // Put child storage in mock RPC + rpc.put_child_storage( + at, + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"child_value".to_vec()), + ); + + // Read child storage - should fetch from RPC + let v = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v, Some(b"child_value".to_vec())); +} + +#[test] +fn child_storage_after_fork_caches() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(10); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b11 = make_block(11, parent, vec![]); + let h11 = b11.header.hash(); + + rpc.put_header(b11.header.clone()); + rpc.put_block(b11, None); + + let child_info = ChildInfo::new_default(b"child2"); + let key = b":child_key2".to_vec(); + let fork_hash = cp.hash(); + + rpc.put_child_storage( + fork_hash, + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"cached_value".to_vec()), + ); + + let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); + + // First read - should cache + let v1 = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v1, Some(b"cached_value".to_vec())); + + // Second read - should come from cache (we can verify by checking the value is still there) + let v2 = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v2, Some(b"cached_value".to_vec())); +} + +#[test] +fn child_storage_hash_reads_from_rpc() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(50); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b51 = make_block(51, parent, vec![]); + let h51 = b51.header.hash(); + + rpc.put_header(b51.header.clone()); + rpc.put_block(b51, None); + + let child_info = ChildInfo::new_default(b"child3"); + let key = b":hash_key".to_vec(); + let fork_hash = cp.hash(); + let expected_hash = ::Hash::default(); + + rpc.put_child_storage_hash( + fork_hash, + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + expected_hash, + ); + + let state = backend.state_at(h51, TrieCacheContext::Trusted).unwrap(); + + let hash = state.child_storage_hash(&child_info, &key).unwrap(); + assert_eq!(hash, Some(expected_hash)); +} + +#[test] +fn next_child_storage_key_uses_paged() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(20); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b21 = make_block(21, parent, vec![]); + let h21 = b21.header.hash(); + + rpc.put_header(b21.header.clone()); + rpc.put_block(b21, None); + + let child_info = ChildInfo::new_default(b"child4"); + let fork_hash = cp.hash(); + + // Put a page of keys + rpc.put_child_storage_keys_page( + fork_hash, + child_info.storage_key().to_vec(), + vec![], + vec![StorageKey(b"key1".to_vec()), StorageKey(b"key2".to_vec())], + ); + + let state = backend.state_at(h21, TrieCacheContext::Trusted).unwrap(); + + // Get next key after "key1" should be "key2" + let next = state.next_child_storage_key(&child_info, b"key1").unwrap(); + assert_eq!(next, Some(b"key2".to_vec())); +} + +#[test] +fn blockchain_status_queries_rpc_when_not_local() { + use polkadot_sdk::sp_blockchain::{BlockStatus, HeaderBackend}; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(20); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b21 = make_block(21, parent, vec![]); + let h21 = b21.header.hash(); + + // Block is not in local storage yet, but exists in RPC + rpc.put_header(b21.header); + + // status() should query RPC and return InChain + let status = backend.blockchain().status(h21).unwrap(); + assert_eq!(status, BlockStatus::InChain); + + // Now query for a block that doesn't exist anywhere + let unknown_hash = make_block(999, parent, vec![]).header.hash(); + let status = backend.blockchain().status(unknown_hash).unwrap(); + assert_eq!(status, BlockStatus::Unknown); +} + +#[test] +fn blockchain_justifications_queries_rpc_when_not_local() { + use polkadot_sdk::sp_blockchain::Backend as BlockchainBackend; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(20); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b21 = make_block(21, parent, vec![]); + let h21 = b21.header.hash(); + + // Create justifications + let justifications = + polkadot_sdk::sp_runtime::Justifications::from((*b"TEST", vec![1, 2, 3, 4])); + + // Block is not in local storage yet, but exists in RPC with justifications + rpc.put_block(b21, Some(justifications.clone())); + + // justifications() should query RPC and return the justifications + let result = backend.blockchain().justifications(h21).unwrap(); + assert_eq!(result, Some(justifications)); + + // Now query for a block that doesn't exist anywhere + let unknown_hash = make_block(999, parent, vec![]).header.hash(); + let result = backend.blockchain().justifications(unknown_hash).unwrap(); + assert_eq!(result, None); +} + +#[test] +fn child_storage_removed_keys_uses_composite_key() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(5); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // make block #6 + let b6 = make_block(6, cp.hash(), vec![]); + rpc.put_header(b6.header.clone()); + rpc.put_block(b6.clone(), None); + let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); + + let child_info1 = ChildInfo::new_default(b"child1"); + let child_info2 = ChildInfo::new_default(b"child2"); + let key = b"same_key".to_vec(); + + // Put values in RPC for both child storages with the same key + rpc.put_child_storage( + cp.hash(), + child_info1.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"value1".to_vec()), + ); + rpc.put_child_storage( + cp.hash(), + child_info2.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"value2".to_vec()), + ); + + // Mark the key as removed only for child1 using composite key + let composite_key1 = super::make_composite_child_key(child_info1.storage_key(), &key); + state.removed_keys.write().insert(composite_key1); + + // child1 should return None (key is removed) + let v1 = state.child_storage(&child_info1, &key).unwrap(); + assert_eq!(v1, None, "child1 key should be removed"); + + // child2 should still fetch from RPC (different composite key) + let v2 = state.child_storage(&child_info2, &key).unwrap(); + assert_eq!(v2, Some(b"value2".to_vec()), "child2 key should still be accessible"); +} + +#[test] +fn child_storage_removed_keys_no_collision_with_main_storage() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(5); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // make block #6 + let b6 = make_block(6, cp.hash(), vec![]); + rpc.put_header(b6.header.clone()); + rpc.put_block(b6.clone(), None); + let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); + + let child_info = ChildInfo::new_default(b"child1"); + let key = b"test_key".to_vec(); + + // Put value in main storage + rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"main_value".to_vec())); + + // Put value in child storage with the same key + rpc.put_child_storage( + cp.hash(), + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"child_value".to_vec()), + ); + + // Mark the key as removed in main storage (just the raw key) + state.removed_keys.write().insert(key.clone()); + + // Main storage should return None + let v_main = state.storage(&key).unwrap(); + assert_eq!(v_main, None, "main storage key should be removed"); + + // Child storage should still work (uses composite key) + let v_child = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v_child, Some(b"child_value".to_vec()), "child storage key should not be affected"); +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/mod.rs new file mode 100644 index 0000000000000..1a95184f949b0 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/mod.rs @@ -0,0 +1,4 @@ +pub mod backend; +pub mod rpc_client; + +pub const LAZY_LOADING_LOG_TARGET: &str = "lazy-loading"; diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs new file mode 100644 index 0000000000000..5dcaafc291652 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs @@ -0,0 +1,73 @@ +use jsonrpsee::core::ClientError; +use polkadot_sdk::{ + sc_chain_spec, + sp_api::__private::HeaderT, + sp_runtime::{generic::SignedBlock, traits::Block as BlockT}, + sp_state_machine, + sp_storage::{ChildInfo, StorageData, StorageKey}, +}; +use serde::de::DeserializeOwned; + +pub trait RPCClient: Send + Sync + std::fmt::Debug { + fn system_chain(&self) -> Result; + + fn system_properties(&self) -> Result; + + fn block( + &self, + hash: Option, + ) -> Result>, jsonrpsee::core::ClientError>; + + fn block_hash( + &self, + block_number: Option<::Number>, + ) -> Result, jsonrpsee::core::ClientError>; + + fn header( + &self, + hash: Option, + ) -> Result, jsonrpsee::core::ClientError>; + + fn storage( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError>; + + fn storage_hash( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError>; + + fn storage_keys_paged( + &self, + key: Option, + count: u32, + start_key: Option, + at: Option, + ) -> Result, ClientError>; + + fn child_storage( + &self, + child_info: &ChildInfo, + key: StorageKey, + at: Option, + ) -> Result, ClientError>; + + fn child_storage_hash( + &self, + child_info: &ChildInfo, + key: StorageKey, + at: Option, + ) -> Result, ClientError>; + + fn child_storage_keys_paged( + &self, + child_info: &ChildInfo, + key: Option, + count: u32, + start_key: Option, + at: Option, + ) -> Result, ClientError>; +} diff --git a/crates/anvil-polkadot/src/substrate_node/mining_engine.rs b/crates/anvil-polkadot/src/substrate_node/mining_engine.rs index 4b61c21d4e1a6..1070a1644a1fa 100644 --- a/crates/anvil-polkadot/src/substrate_node/mining_engine.rs +++ b/crates/anvil-polkadot/src/substrate_node/mining_engine.rs @@ -11,7 +11,7 @@ use parking_lot::RwLock; use polkadot_sdk::{ sc_consensus_manual_seal::{CreatedBlock, EngineCommand, Error as BlockProducingError}, sc_service::TransactionPool, - sp_core, + sp_core::{self, H256}, }; use std::{pin::Pin, sync::Arc}; use substrate_runtime::Hash; @@ -134,21 +134,22 @@ impl MiningEngine { /// * `interval` - Optional time to advance between blocks (in seconds) /// /// # Returns - /// * `Ok(())` - All blocks were mined successfully + /// * `Ok(H256)` - The hash of the last block mined successfully. /// * `Err(MiningError)` - Block production failed pub async fn mine( &self, num_blocks: Option, interval: Option, - ) -> Result<(), MiningError> { + ) -> Result { let blocks = num_blocks.unwrap_or(1); + let mut last_hash = H256::zero(); for _ in 0..blocks { if let Some(interval) = interval { self.time_manager.increase_time(interval.as_secs()); } - seal_now(&self.seal_command_sender).await?; + last_hash = seal_now(&self.seal_command_sender).await?.hash; } - Ok(()) + Ok(last_hash) } /// Ethereum-compatible block mining RPC method. @@ -161,10 +162,10 @@ impl MiningEngine { /// * `opts` - Optional mining parameters including timestamp and block count /// /// # Returns - /// * `Ok(())` - Success response + /// * `Ok(H256)` - The hash of the last block mined successfully. /// * `Err(MiningError)` - Mining operation failed - pub async fn evm_mine(&self, opts: Option) -> Result<(), MiningError> { - self.do_evm_mine(opts).await.map(|_| ()) + pub async fn evm_mine(&self, opts: Option) -> Result { + self.do_evm_mine(opts).await.map(|res| res.1) } /// Configure interval-based mining mode. @@ -311,8 +312,9 @@ impl MiningEngine { self.waker.wake(); } - pub async fn do_evm_mine(&self, opts: Option) -> Result { + pub async fn do_evm_mine(&self, opts: Option) -> Result<(u64, H256), MiningError> { let mut blocks_to_mine = 1u64; + let mut last_hash = H256::zero(); if let Some(opts) = opts { let timestamp = match opts { @@ -333,10 +335,10 @@ impl MiningEngine { } for _ in 0..blocks_to_mine { - seal_now(&self.seal_command_sender).await?; + last_hash = seal_now(&self.seal_command_sender).await?.hash; } - Ok(blocks_to_mine) + Ok((blocks_to_mine, last_hash)) } } diff --git a/crates/anvil-polkadot/src/substrate_node/mod.rs b/crates/anvil-polkadot/src/substrate_node/mod.rs index 28ae7c067acc7..a4df2de2b63a9 100644 --- a/crates/anvil-polkadot/src/substrate_node/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/mod.rs @@ -3,6 +3,7 @@ pub mod genesis; pub mod host; pub mod impersonation; pub mod in_mem_rpc; +mod lazy_loading; pub mod mining_engine; pub mod rpc; pub mod service; diff --git a/crates/anvil-polkadot/src/substrate_node/service/backend.rs b/crates/anvil-polkadot/src/substrate_node/service/backend.rs index 331bedf7b748c..c768ffb891dd3 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/backend.rs @@ -1,6 +1,9 @@ -use crate::substrate_node::service::{ - Backend, - storage::{CodeInfo, ReviveAccountInfo, SystemAccountInfo, well_known_keys}, +use crate::substrate_node::{ + lazy_loading::backend::Blockchain, + service::{ + Backend, + storage::{CodeInfo, ReviveAccountInfo, SystemAccountInfo, well_known_keys}, + }, }; use alloy_primitives::{Address, Bytes}; use codec::{Decode, Encode}; @@ -9,10 +12,10 @@ use parking_lot::Mutex; use polkadot_sdk::{ parachains_common::{AccountId, Hash, opaque::Block}, sc_client_api::{Backend as BackendT, StateBackend, TrieCacheContext}, - sc_client_db::BlockchainDb, sp_blockchain, sp_core::{H160, H256}, sp_io::hashing::blake2_256, + sp_runtime::FixedU128, sp_state_machine::{StorageKey, StorageValue}, }; use std::{collections::HashMap, num::NonZeroUsize, sync::Arc}; @@ -30,6 +33,10 @@ pub enum BackendError { MissingAuraAuthorities, #[error("Could not find timestamp in the state")] MissingTimestamp, + #[error("Could not find the next fee multiplier in the state")] + MissingNextFeeMultiplier, + #[error("Could not find block number in the state")] + MissingBlockNumber, #[error("Unable to decode total issuance {0}")] DecodeTotalIssuance(codec::Error), #[error("Unable to decode chain id {0}")] @@ -44,8 +51,12 @@ pub enum BackendError { DecodeCodeInfo(codec::Error), #[error("Unable to decode timestamp: {0}")] DecodeTimestamp(codec::Error), + #[error("Unable to decode blockNumber: {0}")] + DecodeBlockNumber(codec::Error), #[error("Unable to decode aura authorities: {0}")] DecodeAuraAuthorities(codec::Error), + #[error("Unable to decode the next fee multiplier: {0}")] + DecodeNextFeeMultiplier(codec::Error), } type Result = std::result::Result; @@ -60,7 +71,7 @@ impl BackendWithOverlay { Self { backend, overrides } } - pub fn blockchain(&self) -> &BlockchainDb { + pub fn blockchain(&self) -> &Blockchain { self.backend.blockchain() } @@ -72,6 +83,13 @@ impl BackendWithOverlay { u64::decode(&mut &value[..]).map_err(BackendError::DecodeTimestamp) } + pub fn read_block_number(&self, hash: Hash) -> Result { + let key = well_known_keys::BLOCK_NUMBER_KEY; + let value = + self.read_top_state(hash, key.to_vec())?.ok_or(BackendError::MissingBlockNumber)?; + u32::decode(&mut &value[..]).map_err(BackendError::DecodeBlockNumber) + } + pub fn read_chain_id(&self, hash: Hash) -> Result { let key = well_known_keys::CHAIN_ID; @@ -164,6 +182,11 @@ impl BackendWithOverlay { overrides.set_coinbase(at, aura_authority); } + pub fn inject_next_fee_multiplier(&self, at: Hash, next_fee_multiplier: FixedU128) { + let mut overrides = self.overrides.lock(); + overrides.set_next_fee_multiplier(at, next_fee_multiplier); + } + pub fn inject_total_issuance(&self, at: Hash, value: Balance) { let mut overrides = self.overrides.lock(); overrides.set_total_issuance(at, value); @@ -263,6 +286,16 @@ impl StorageOverrides { self.add(latest_block, changeset); } + fn set_next_fee_multiplier(&mut self, latest_block: Hash, next_fee_multiplier: FixedU128) { + let mut changeset = BlockOverrides::default(); + changeset.top.insert( + well_known_keys::NEXT_FEE_MULTIPLIER.to_vec(), + Some(next_fee_multiplier.encode()), + ); + + self.add(latest_block, changeset); + } + fn set_system_account_info( &mut self, latest_block: Hash, diff --git a/crates/anvil-polkadot/src/substrate_node/service/client.rs b/crates/anvil-polkadot/src/substrate_node/service/client.rs index c334c4a17c79f..3834a3883a3be 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/client.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/client.rs @@ -1,5 +1,6 @@ use crate::substrate_node::{ genesis::DevelopmentGenesisBlockBuilder, + lazy_loading::backend::new_backend as new_lazy_loading_backend, service::{ Backend, backend::StorageOverrides, @@ -11,7 +12,7 @@ use polkadot_sdk::{ parachains_common::opaque::Block, sc_chain_spec::get_extension, sc_client_api::{BadBlocks, ForkBlocks, execution_extensions::ExecutionExtensions}, - sc_service::{self, KeystoreContainer, LocalCallExecutor, TaskManager, new_db_backend}, + sc_service::{self, KeystoreContainer, LocalCallExecutor, TaskManager}, sp_keystore::KeystorePtr, }; use std::{collections::HashMap, sync::Arc}; @@ -25,7 +26,7 @@ pub fn new_client( executor: WasmExecutor, storage_overrides: Arc>, ) -> Result<(Arc, Arc, KeystorePtr, TaskManager), sc_service::error::Error> { - let backend = new_db_backend(config.db_config())?; + let backend = new_lazy_loading_backend(None, None)?; let genesis_block_builder = DevelopmentGenesisBlockBuilder::new( genesis_block_number, diff --git a/crates/anvil-polkadot/src/substrate_node/service/mod.rs b/crates/anvil-polkadot/src/substrate_node/service/mod.rs index fa9a36f1cfa6e..e8f7c95acc2f8 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/mod.rs @@ -1,6 +1,7 @@ use crate::{ AnvilNodeConfig, substrate_node::{ + lazy_loading::backend::Backend as LazyLoadingBackend, mining_engine::{MiningEngine, MiningMode, run_mining_engine}, rpc::spawn_rpc_server, service::consensus::SameSlotConsensusDataProvider, @@ -30,7 +31,7 @@ mod consensus; mod executor; pub mod storage; -pub type Backend = sc_service::TFullBackend; +pub type Backend = LazyLoadingBackend; pub type TransactionPoolHandle = sc_transaction_pool::TransactionPoolHandle; diff --git a/crates/anvil-polkadot/src/substrate_node/service/storage.rs b/crates/anvil-polkadot/src/substrate_node/service/storage.rs index abca913a8a392..a6553cead52c8 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/storage.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/storage.rs @@ -1,54 +1,10 @@ -use codec::{Decode, Encode}; -use polkadot_sdk::{ - frame_support::BoundedVec, - frame_system, - pallet_balances::AccountData, - parachains_common::{AccountId, Nonce}, - sp_core::ConstU32, -}; -use substrate_runtime::{Balance, Hash}; - -#[derive(Encode, Decode)] -pub struct ReviveAccountInfo { - pub account_type: AccountType, - pub dust: u32, -} - -#[derive(Encode, Decode)] -pub enum AccountType { - Contract(ContractInfo), - EOA, -} - -#[derive(Encode, Decode)] -pub struct ContractInfo { - pub trie_id: BoundedVec>, - pub code_hash: Hash, - pub storage_bytes: u32, - pub storage_items: u32, - pub storage_byte_deposit: Balance, - pub storage_item_deposit: Balance, - pub storage_base_deposit: Balance, - pub immutable_data_len: u32, -} +use polkadot_sdk::{frame_system, pallet_balances::AccountData, parachains_common::Nonce}; +use substrate_runtime::Balance; -#[derive(Encode, Decode)] -pub struct CodeInfo { - pub owner: AccountId, - #[codec(compact)] - pub deposit: Balance, - #[codec(compact)] - pub refcount: u64, - pub code_len: u32, - pub code_type: ByteCodeType, - pub behaviour_version: u32, -} - -#[derive(Encode, Decode)] -pub enum ByteCodeType { - Pvm, - Evm, -} +pub use pallet_revive_eth_rpc::subxt_client::runtime_types::pallet_revive::{ + storage::{AccountInfo as ReviveAccountInfo, AccountType, ContractInfo}, + vm::{BytecodeType, CodeInfo}, +}; pub type SystemAccountInfo = frame_system::AccountInfo>; @@ -92,6 +48,12 @@ pub mod well_known_keys { 154, 166, 12, 2, 190, 154, 220, 201, 138, 13, 29, ]; + //twox_128(b"TransactionPayment" + b"NextFeeMultiplier") + pub const NEXT_FEE_MULTIPLIER: [u8; 32] = [ + 63, 20, 103, 160, 150, 188, 215, 26, 91, 106, 12, 129, 85, 226, 8, 16, 63, 46, 223, 59, + 223, 56, 29, 235, 227, 49, 171, 116, 70, 173, 223, 220, + ]; + pub fn system_account_info(account_id: AccountId) -> Vec { let mut key = Vec::new(); key.extend_from_slice(&twox_128("System".as_bytes())); diff --git a/crates/anvil-polkadot/substrate-runtime/Cargo.toml b/crates/anvil-polkadot/substrate-runtime/Cargo.toml index 130e21313a834..f3e1b561a3c3b 100644 --- a/crates/anvil-polkadot/substrate-runtime/Cargo.toml +++ b/crates/anvil-polkadot/substrate-runtime/Cargo.toml @@ -21,6 +21,7 @@ polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parachains-common", + "polkadot-runtime-common", "runtime", "sp-consensus-aura", "with-tracing", diff --git a/crates/anvil-polkadot/substrate-runtime/src/lib.rs b/crates/anvil-polkadot/substrate-runtime/src/lib.rs index 9dcb200b5a2f0..8fc4409422450 100644 --- a/crates/anvil-polkadot/substrate-runtime/src/lib.rs +++ b/crates/anvil-polkadot/substrate-runtime/src/lib.rs @@ -22,12 +22,13 @@ use pallet_revive::{ runtime::EthExtra, }, }; -use pallet_transaction_payment::{ConstFeeMultiplier, FeeDetails, Multiplier, RuntimeDispatchInfo}; +use pallet_transaction_payment::{FeeDetails, RuntimeDispatchInfo}; use polkadot_sdk::{ parachains_common::{ AccountId, AssetHubPolkadotAuraId as AuraId, BlockNumber, Hash as CommonHash, Header, Nonce, Signature, }, + polkadot_runtime_common::SlowAdjustingFeeUpdate, polkadot_sdk_frame::{ deps::sp_genesis_builder, runtime::{apis, prelude::*}, @@ -41,6 +42,11 @@ use polkadot_sdk::{ pub use polkadot_sdk::parachains_common::Balance; use sp_weights::ConstantMultiplier; +pub mod constants { + /// DOT precision (1e12) to ETH precision (1e18) ratio. + pub const NATIVE_TO_ETH_RATIO: u32 = 1_000_000; +} + pub mod currency { use super::Balance; pub const DOLLARS: Balance = 1_000_000_000_000; @@ -257,17 +263,27 @@ impl pallet_sudo::Config for Runtime {} impl pallet_timestamp::Config for Runtime {} parameter_types! { - pub const TransactionByteFee: Balance = 10 * MILLICENTS; - pub FeeMultiplier: Multiplier = Multiplier::one(); + // That's how asset-hub-westend sets this. + pub const TransactionByteFee: Balance = MILLICENTS; } +// That's how asset-hub-westend sets this. +pub type WeightToFee = BlockRatioFee< + // p + CENTS, + // q + { 100 * ExtrinsicBaseWeight::get().ref_time() as u128 }, + Runtime, +>; + // Implements the types required for the transaction payment pallet. #[derive_impl(pallet_transaction_payment::config_preludes::TestDefaultConfig)] impl pallet_transaction_payment::Config for Runtime { type OnChargeTransaction = pallet_transaction_payment::FungibleAdapter; - type WeightToFee = BlockRatioFee<1, 1, Self>; + type WeightToFee = WeightToFee; type LengthToFee = ConstantMultiplier; - type FeeMultiplierUpdate = ConstFeeMultiplier; + // That's how asset-hub-westend sets this. + type FeeMultiplierUpdate = SlowAdjustingFeeUpdate; } parameter_types! { @@ -299,7 +315,7 @@ impl pallet_revive::Config for Runtime { // `forking` feature. type FindAuthor = BlockAuthor; type Balance = Balance; - type NativeToEthRatio = ConstU32<1_000_000>; + type NativeToEthRatio = ConstU32<{ constants::NATIVE_TO_ETH_RATIO }>; type UploadOrigin = EnsureSigned; type InstantiateOrigin = EnsureSigned; type Time = Timestamp; diff --git a/crates/anvil-polkadot/test-data/genesis.json b/crates/anvil-polkadot/test-data/genesis.json new file mode 100644 index 0000000000000..239c5304d7abf --- /dev/null +++ b/crates/anvil-polkadot/test-data/genesis.json @@ -0,0 +1,21 @@ +{ + "config": { + "chainId": 42420 + }, + "timestamp": "0x120925", + "number": 7, + "coinbase": "0xee00000000000000000000000000000000000007", + "alloc": { + "71562b71999873db5b286df957af199ec94617f7": { + "balance": "0x1bc16d674ec80000", + "nonce": "0x01" + }, + "821a038b8787187299554cc87ec442cdcd824e65": { + "balance": "0x4563918244f40000", + "code": "0x0101010101010101010101010101010101010101", + "storage": { + "0x00": "0x01f4" + } + } + } +} \ No newline at end of file diff --git a/crates/anvil-polkadot/tests/it/contract_isolation.rs b/crates/anvil-polkadot/tests/it/contract_isolation.rs new file mode 100644 index 0000000000000..9b218a578cf80 --- /dev/null +++ b/crates/anvil-polkadot/tests/it/contract_isolation.rs @@ -0,0 +1,107 @@ +use std::time::Duration; + +use crate::{ + abi::SimpleStorage, + utils::{TestNode, get_contract_code, unwrap_response}, +}; +use alloy_primitives::{Address, U256}; +use alloy_rpc_types::{TransactionInput, TransactionRequest}; +use alloy_sol_types::SolCall; +use anvil_core::eth::EthRequest; +use anvil_polkadot::{ + api_server::revive_conversions::ReviveAddress, config::{AnvilNodeConfig, SubstrateNodeConfig}, +}; +use polkadot_sdk::pallet_revive::evm::Account; + +/// Tests that multiple contract instances maintain independent state +#[tokio::test(flavor = "multi_thread")] +async fn test_multiple_contract_instances_independent_storage() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_address = ReviveAddress::new(alith.address()); + let contract_code = get_contract_code("SimpleStorage"); + + // Deploy 3 instances of SimpleStorage contract (nonces 0, 1, 2) + let contract1_tx = node.deploy_contract(&contract_code.init, alith.address(), None).await; + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let contract2_tx = node.deploy_contract(&contract_code.init, alith.address(), None).await; + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let contract3_tx = node.deploy_contract(&contract_code.init, alith.address(), None).await; + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let receipt1 = node.get_transaction_receipt(contract1_tx).await; + let contract1_address = receipt1.contract_address.unwrap(); + let receipt2 = node.get_transaction_receipt(contract2_tx).await; + let contract2_address = receipt2.contract_address.unwrap(); + let receipt3 = node.get_transaction_receipt(contract3_tx).await; + let contract3_address = receipt3.contract_address.unwrap(); + + // Verify all contracts are deployed successfully + assert_eq!(receipt1.status, Some(polkadot_sdk::pallet_revive::U256::from(1))); + assert_eq!(receipt2.status, Some(polkadot_sdk::pallet_revive::U256::from(1))); + assert_eq!(receipt3.status, Some(polkadot_sdk::pallet_revive::U256::from(1))); + + // Set different values for each contract (Contract 1: 100, Contract 2: 200, Contract 3: 300) + let set_value1 = SimpleStorage::setValueCall::new((U256::from(100),)).abi_encode(); + let call_tx1 = TransactionRequest::default() + .from(Address::from(alith_address)) + .to(Address::from(ReviveAddress::new(contract1_address))) + .input(TransactionInput::both(set_value1.into())); + node.send_transaction(call_tx1, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let set_value2 = SimpleStorage::setValueCall::new((U256::from(200),)).abi_encode(); + let call_tx2 = TransactionRequest::default() + .from(Address::from(alith_address)) + .to(Address::from(ReviveAddress::new(contract2_address))) + .input(TransactionInput::both(set_value2.into())); + node.send_transaction(call_tx2, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let set_value3 = SimpleStorage::setValueCall::new((U256::from(300),)).abi_encode(); + let call_tx3 = TransactionRequest::default() + .from(Address::from(alith_address)) + .to(Address::from(ReviveAddress::new(contract3_address))) + .input(TransactionInput::both(set_value3.into())); + node.send_transaction(call_tx3, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + // Verify each contract maintains its own independent storage + let value1 = node.get_storage_at(U256::from(0), contract1_address).await; + let value2 = node.get_storage_at(U256::from(0), contract2_address).await; + let value3 = node.get_storage_at(U256::from(0), contract3_address).await; + + assert_eq!(value1, 100, "Contract 1 should have value 100"); + assert_eq!(value2, 200, "Contract 2 should have value 200"); + assert_eq!(value3, 300, "Contract 3 should have value 300"); + + // Update contract 2's value to 999 and verify others are unaffected + let update_value2 = SimpleStorage::setValueCall::new((U256::from(999),)).abi_encode(); + let update_tx2 = TransactionRequest::default() + .from(Address::from(alith_address)) + .to(Address::from(ReviveAddress::new(contract2_address))) + .input(TransactionInput::both(update_value2.into())); + node.send_transaction(update_tx2, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + // Verify only contract 2 changed + let value1_after = node.get_storage_at(U256::from(0), contract1_address).await; + let value2_after = node.get_storage_at(U256::from(0), contract2_address).await; + let value3_after = node.get_storage_at(U256::from(0), contract3_address).await; + + assert_eq!(value1_after, 100, "Contract 1 value should remain 100"); + assert_eq!(value2_after, 999, "Contract 2 value should be updated to 999"); + assert_eq!(value3_after, 300, "Contract 3 value should remain 300"); +} diff --git a/crates/anvil-polkadot/tests/it/gas.rs b/crates/anvil-polkadot/tests/it/gas.rs new file mode 100644 index 0000000000000..24ee03aaddd69 --- /dev/null +++ b/crates/anvil-polkadot/tests/it/gas.rs @@ -0,0 +1,171 @@ +use std::time::Duration; + +use crate::utils::{TestNode, unwrap_response}; +use alloy_primitives::{Address, U256}; +use alloy_rpc_types::TransactionRequest; +use anvil_core::eth::EthRequest; +use anvil_polkadot::config::{AnvilNodeConfig, INITIAL_BASE_FEE, SubstrateNodeConfig}; +use polkadot_sdk::pallet_revive::evm::Account; +use rstest::rstest; +use std::ops::Not; + +#[tokio::test(flavor = "multi_thread")] +#[rstest] +#[case(false)] +#[case(true)] +async fn test_set_next_fee_multiplier(#[case] rpc_driven: bool) { + // 1e18 denomination. + let new_base_fee = U256::from(6_000_000); + let anvil_node_config = AnvilNodeConfig::test_config() + .with_base_fee(rpc_driven.not().then_some(new_base_fee.to::())); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); + + let gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); + + if rpc_driven { + assert_eq!(gas_price.to::(), INITIAL_BASE_FEE); + unwrap_response::<()>( + node.eth_rpc(EthRequest::SetNextBlockBaseFeePerGas(new_base_fee)).await.unwrap(), + ) + .unwrap(); + } else { + assert_eq!(gas_price, new_base_fee); + } + + // Currently the gas_price returned from evm is equivalent to the base_fee. + let gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); + assert_eq!(gas_price, new_base_fee); + + // We send a regular eth transfer to check the associated effective gas price used by the + // transaction, after it will be included in a next block. We're interested especially in + // the tx effective gas price to validate that the base_fee_per_gas set previously is also + // considered when computing the fees for the tx execution. + // We could have checked the `base_fee_per_gas` after querying the latest eth block mined + // (which could have been empty too) after setting a new base fee, but it will not report the + // correct base fee because of: https://github.com/paritytech/polkadot-sdk/issues/10177. + let alith = Account::from(subxt_signer::eth::dev::alith()); + let baltathar = Account::from(subxt_signer::eth::dev::baltathar()); + let alith_initial_balance = node.get_balance(alith.address(), None).await; + let baltathar_initial_balance = node.get_balance(baltathar.address(), None).await; + let transfer_amount = U256::from_str_radix("100000000000000000", 10).unwrap(); + let transaction = TransactionRequest::default() + .value(transfer_amount) + .from(Address::from(alith.address().to_fixed_bytes())) + .to(Address::from(baltathar.address().to_fixed_bytes())); + let tx_hash = node.send_transaction(transaction, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + node.wait_for_block_with_timeout(1, Duration::from_millis(400)).await.unwrap(); + tokio::time::sleep(Duration::from_millis(400)).await; + let transaction_receipt = node.get_transaction_receipt(tx_hash).await; + let effective_gas_price = + U256::from_be_bytes(transaction_receipt.effective_gas_price.to_big_endian()); + let gas_used = U256::from_be_bytes(transaction_receipt.gas_used.to_big_endian()); + assert_eq!(effective_gas_price, new_base_fee); + let alith_final_balance = node.get_balance(alith.address(), None).await; + let baltathar_final_balance = node.get_balance(baltathar.address(), None).await; + assert_eq!( + baltathar_final_balance, + baltathar_initial_balance + transfer_amount, + "Baltathar's balance should have changed" + ); + assert_eq!( + alith_final_balance, + alith_initial_balance - transfer_amount - effective_gas_price * gas_used, + "Alith's balance should have changed" + ); + + let block1_hash = node.block_hash_by_number(1).await.unwrap(); + let block1 = node.get_block_by_hash(block1_hash).await; + // This will fail ideally once we update to a polkadot-sdk version that includes a fix for + // https://github.com/paritytech/polkadot-sdk/issues/10177. The reported base_fer_per_gas + // should be the previously set `new_base_fee`. + assert_eq!(U256::from_be_bytes(block1.base_fee_per_gas.to_big_endian()), U256::from(5999888)); + + // Mining a second block should update the base fee according to the logic that determines + // the base_fee in relation to how congested the network is. + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + node.wait_for_block_with_timeout(2, Duration::from_millis(500)).await.unwrap(); + let block2_hash = node.block_hash_by_number(2).await.unwrap(); + let block2 = node.get_block_by_hash(block2_hash).await; + + // This will fail ideally once we update to a polkadot-sdk version that includes a fix for + // https://github.com/paritytech/polkadot-sdk/issues/10177. + assert_eq!(U256::from_be_bytes(block2.base_fee_per_gas.to_big_endian()), 5999775); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_next_fee_multiplier_minimum() { + // 1e18 denomination. + let new_base_fee = U256::from(50_123); + let anvil_node_config = + AnvilNodeConfig::test_config().with_base_fee(Some(new_base_fee.to::())); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); + + // Currently the gas_price returned from evm is equivalent to the base_fee. + let gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); + assert_eq!(gas_price, new_base_fee); + + // We send a regular eth transfer to check the associated effective gas price used by the + // transaction, after it will be included in a next block. We're interested especially in + // the tx effective gas price to validate that the base_fee_per_gas set previously is also + // considered when computing the fees for the tx execution. + // We could have checked the `base_fee_per_gas` after querying the latest eth block mined + // (which could have been empty too) after setting a new base fee, but it will not report the + // correct base fee because of: https://github.com/paritytech/polkadot-sdk/issues/10177. + let alith = Account::from(subxt_signer::eth::dev::alith()); + let baltathar = Account::from(subxt_signer::eth::dev::baltathar()); + let alith_initial_balance = node.get_balance(alith.address(), None).await; + let baltathar_initial_balance = node.get_balance(baltathar.address(), None).await; + let transfer_amount = U256::from_str_radix("100000000000000000", 10).unwrap(); + let transaction = TransactionRequest::default() + .value(transfer_amount) + .from(Address::from(alith.address().to_fixed_bytes())) + .to(Address::from(baltathar.address().to_fixed_bytes())); + let tx_hash = node.send_transaction(transaction, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + node.wait_for_block_with_timeout(1, Duration::from_millis(400)).await.unwrap(); + tokio::time::sleep(Duration::from_millis(400)).await; + let transaction_receipt = node.get_transaction_receipt(tx_hash).await; + let effective_gas_price = + U256::from_be_bytes(transaction_receipt.effective_gas_price.to_big_endian()); + let gas_used = U256::from_be_bytes(transaction_receipt.gas_used.to_big_endian()); + assert_eq!(effective_gas_price, new_base_fee); + let alith_final_balance = node.get_balance(alith.address(), None).await; + let baltathar_final_balance = node.get_balance(baltathar.address(), None).await; + assert_eq!( + baltathar_final_balance, + baltathar_initial_balance + transfer_amount, + "Baltathar's balance should have changed" + ); + assert_eq!( + alith_final_balance, + alith_initial_balance - transfer_amount - effective_gas_price * gas_used, + "Alith's balance should have changed" + ); + + let block1_hash = node.block_hash_by_number(1).await.unwrap(); + let block1 = node.get_block_by_hash(block1_hash).await; + + // The anvil-polkadot substrate-runtime is configured similarly to the assethub runtimes in + // terms of the minimum NextFeeMultiplier value that can be reached. The minimum is the one + // configured in the runtime, which in our case is the same as for asset-hub-westend. This + // assert should fail once https://github.com/paritytech/polkadot-sdk/issues/10177 is fixed. + // The actual value should be the previously set base_fee. + assert_eq!(U256::from_be_bytes(block1.base_fee_per_gas.to_big_endian()), U256::from(100_000)); + + // Mining a second block should update the base fee according to the logic that determines + // the base_fee in relation to how congested the network is. + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + node.wait_for_block_with_timeout(2, Duration::from_millis(500)).await.unwrap(); + let block2_hash = node.block_hash_by_number(2).await.unwrap(); + let block2 = node.get_block_by_hash(block2_hash).await; + + // However, since the previously set base_fee is lower than the minimum, this should be set + // right away to the minimum. + assert_eq!(U256::from_be_bytes(block2.base_fee_per_gas.to_big_endian()), U256::from(100_000)); +} diff --git a/crates/anvil-polkadot/tests/it/genesis.rs b/crates/anvil-polkadot/tests/it/genesis.rs index f44b0f1a9cd2b..29b270c20f086 100644 --- a/crates/anvil-polkadot/tests/it/genesis.rs +++ b/crates/anvil-polkadot/tests/it/genesis.rs @@ -10,9 +10,12 @@ use alloy_primitives::{Address, B256, Bytes, U256}; use alloy_rpc_types::{BlockId, TransactionInput, TransactionRequest}; use alloy_sol_types::SolCall; use anvil_core::eth::EthRequest; -use anvil_polkadot::config::{AnvilNodeConfig, SubstrateNodeConfig}; +use anvil_polkadot::{ + api_server::revive_conversions::ReviveAddress, + config::{AnvilNodeConfig, SubstrateNodeConfig}, +}; use polkadot_sdk::pallet_revive::{self, evm::Account}; -use std::{collections::BTreeMap, time::Duration}; +use std::{collections::BTreeMap, path::PathBuf}; use subxt::utils::H160; #[tokio::test(flavor = "multi_thread")] @@ -29,11 +32,16 @@ async fn test_genesis_params() { // Check that block number, timestamp, and chain id are set correctly at genesis assert_eq!(node.best_block_number().await, genesis_block_number); + assert_eq!(node.eth_best_block().await.number.as_u32(), genesis_block_number); + let genesis_hash = node.block_hash_by_number(genesis_block_number).await.unwrap(); // Anvil genesis timestamp is in seconds, while Substrate timestamp is in milliseconds. let genesis_timestamp = anvil_genesis_timestamp.checked_mul(1000).unwrap(); let actual_genesis_timestamp = node.get_decoded_timestamp(Some(genesis_hash)).await; assert_eq!(actual_genesis_timestamp, genesis_timestamp); + let eth_genesis_timestamp = node.get_eth_timestamp(Some(genesis_hash)).await; + assert_eq!(anvil_genesis_timestamp, eth_genesis_timestamp); + let current_chain_id_hex = unwrap_response::(node.eth_rpc(EthRequest::EthChainId(())).await.unwrap()).unwrap(); assert_eq!(current_chain_id_hex, to_hex_string(chain_id)); @@ -46,8 +54,13 @@ async fn test_genesis_params() { let latest_block_number = node.best_block_number().await; assert_eq!(latest_block_number, genesis_block_number + 2); + assert_eq!(node.eth_best_block().await.number.as_u32(), genesis_block_number + 2); + let hash2 = node.block_hash_by_number(genesis_block_number + 2).await.unwrap(); let timestamp2 = node.get_decoded_timestamp(Some(hash2)).await; + let eth_timestamp2 = node.get_eth_timestamp(Some(hash2)).await; + assert_eq!(eth_timestamp2, timestamp2 / 1000); + assert_with_tolerance( timestamp2.saturating_sub(genesis_timestamp), 2000, @@ -194,17 +207,7 @@ async fn test_genesis_alloc() { assert_eq!(contract_code_result, runtime_bytecode, "Genesis contract code should match"); // Test contract storage - let result = node - .eth_rpc(EthRequest::EthGetStorageAt( - Address::from(test_contract_bytes), - U256::from(0), - None, - )) - .await - .unwrap(); - let hex_string = unwrap_response::(result).unwrap(); - let hex_value = hex_string.strip_prefix("0x").unwrap_or(&hex_string); - let stored_value = U256::from_str_radix(hex_value, 16).unwrap(); + let stored_value = node.get_storage_at(U256::from(0), test_contract_address).await; assert_eq!(stored_value, 511, "Storage slot 0 of genesis contract should contain value 511"); // Test contract functionality by calling getValue() @@ -230,13 +233,12 @@ async fn test_coinbase_genesis() { .with_genesis(Some(Genesis { coinbase: genesis_coinbase, ..Default::default() })); let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); - unwrap_response::<()>(node.eth_rpc(EthRequest::SetAutomine(true)).await.unwrap()).unwrap(); // Deploy multicall contract let alith = Account::from(subxt_signer::eth::dev::alith()); let contract_code = get_contract_code("Multicall"); - let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), Some(1)).await; - tokio::time::sleep(Duration::from_millis(400)).await; + let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; + let _ = node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap(); // Get contract address. let receipt = node.get_transaction_receipt(tx_hash).await; @@ -253,3 +255,128 @@ async fn test_coinbase_genesis() { genesis_coinbase, ); } + +#[tokio::test(flavor = "multi_thread")] +async fn test_genesis_json() { + // Load genesis.json file from test-data directory + let genesis_json_path = + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data").join("genesis.json"); + let genesis_file = std::fs::File::open(&genesis_json_path) + .unwrap_or_else(|_| panic!("Failed to open genesis.json at {genesis_json_path:?}")); + let genesis: Genesis = serde_json::from_reader(genesis_file) + .unwrap_or_else(|e| panic!("Failed to parse genesis.json: {e}")); + + // Expected values from genesis.json + let expected_chain_id = genesis.config.chain_id; + let expected_timestamp = genesis.timestamp; + let expected_block_number = genesis.number.unwrap_or_default(); + let expected_coinbase = genesis.coinbase; + let alloc_accounts = genesis.alloc.iter(); + + // Create node config with genesis from file + let anvil_node_config = AnvilNodeConfig::test_config().with_genesis(Some(genesis.clone())); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + // Test chain ID + let chain_id_hex = + unwrap_response::(node.eth_rpc(EthRequest::EthChainId(())).await.unwrap()).unwrap(); + assert_eq!( + chain_id_hex, + to_hex_string(expected_chain_id), + "Chain ID should match the one in genesis.json" + ); + + // Test block number + let genesis_block_number = node.best_block_number().await; + assert_eq!( + genesis_block_number as u64, expected_block_number, + "Genesis block number should match the one in genesis.json" + ); + + assert_eq!(node.eth_best_block().await.number.as_u64(), expected_block_number); + + // Test timestamp + let genesis_hash = node.block_hash_by_number(genesis_block_number).await.unwrap(); + // Anvil genesis timestamp is in seconds, while Substrate timestamp is in milliseconds + let expected_timestamp_ms = expected_timestamp.checked_mul(1000).unwrap(); + let actual_timestamp = node.get_decoded_timestamp(Some(genesis_hash)).await; + assert_eq!( + actual_timestamp, expected_timestamp_ms, + "Genesis timestamp should match the one in genesis.json" + ); + + let eth_genesis_timestamp = node.get_eth_timestamp(Some(genesis_hash)).await; + assert_eq!(expected_timestamp, eth_genesis_timestamp); + + // Test coinbase + let coinbase = + unwrap_response::
(node.eth_rpc(EthRequest::EthCoinbase(())).await.unwrap()) + .unwrap(); + assert_eq!(coinbase, expected_coinbase, "Coinbase should match the one in genesis.json"); + + // Scan through all accounts in the genesis alloc and test their balances, nonces, codes, and + // storage. + for (&account_addr, account_info) in alloc_accounts { + let account_balance_actual = + node.get_balance(H160::from_slice(account_addr.as_slice()), None).await; + let expected_balance = account_info.balance; + assert_eq!( + account_balance_actual, expected_balance, + "Account balance should match the one in genesis.json" + ); + let account_nonce_actual = node.get_nonce(account_addr).await; + let expected_nonce = account_info.nonce.unwrap_or_default(); + assert_eq!( + account_nonce_actual, expected_nonce, + "Account nonce should match the one in genesis.json" + ); + if account_info.code.is_none() { + let code_actual = unwrap_response::( + node.eth_rpc(EthRequest::EthGetCodeAt( + account_addr, + Some(BlockId::number(genesis_block_number.into())), + )) + .await + .unwrap(), + ) + .unwrap(); + assert!( + code_actual.is_empty(), + "Genesis account should have no code as in genesis.json" + ); + } else { + let code_actual = unwrap_response::( + node.eth_rpc(EthRequest::EthGetCodeAt( + account_addr, + Some(BlockId::number(genesis_block_number.into())), + )) + .await + .unwrap(), + ) + .unwrap(); + assert!( + !code_actual.is_empty(), + "Genesis account should have non-empty code as in genesis.json" + ); + assert_eq!( + code_actual, + account_info.code.clone().unwrap(), + "Genesis account code should match the one in genesis.json" + ); + for (storage_key, storage_value) in &account_info.storage.clone().unwrap_or_default() { + let storage_value_actual = node + .get_storage_at( + U256::from_be_bytes(storage_key.0), + ReviveAddress::from(account_addr).inner(), + ) + .await; + let expected_storage_value = U256::from_be_bytes(storage_value.0); + assert_eq!( + storage_value_actual, expected_storage_value, + "Genesis account storage value should match the one in genesis.json" + ); + } + } + } +} diff --git a/crates/anvil-polkadot/tests/it/main.rs b/crates/anvil-polkadot/tests/it/main.rs index cb7c29087c063..48a587e1bfce8 100644 --- a/crates/anvil-polkadot/tests/it/main.rs +++ b/crates/anvil-polkadot/tests/it/main.rs @@ -1,4 +1,6 @@ mod abi; +mod contract_isolation; +mod gas; mod genesis; mod impersonation; mod mining; diff --git a/crates/anvil-polkadot/tests/it/sign.rs b/crates/anvil-polkadot/tests/it/sign.rs index c7fe1a7be6906..19553e974a92a 100644 --- a/crates/anvil-polkadot/tests/it/sign.rs +++ b/crates/anvil-polkadot/tests/it/sign.rs @@ -12,7 +12,6 @@ use polkadot_sdk::{ pallet_revive::evm::{Account, TransactionSigned}, sp_core::{H256, U256}, }; -use std::time::Duration; #[tokio::test(flavor = "multi_thread")] async fn can_sign_transaction() { @@ -78,7 +77,6 @@ async fn can_sign_transaction() { ) .unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let transaction_receipt = node.get_transaction_receipt(tx_hash).await; assert_eq!(transaction_receipt.from, alith.address()); diff --git a/crates/anvil-polkadot/tests/it/snapshot.rs b/crates/anvil-polkadot/tests/it/snapshot.rs index 754eb6a40f0ec..b676433c8e63c 100644 --- a/crates/anvil-polkadot/tests/it/snapshot.rs +++ b/crates/anvil-polkadot/tests/it/snapshot.rs @@ -2,13 +2,10 @@ use std::time::Duration; use crate::{ abi::Multicall, - utils::{ - BlockWaitTimeout, EXISTENTIAL_DEPOSIT, TestNode, assert_with_tolerance, get_contract_code, - unwrap_response, - }, + utils::{TestNode, assert_with_tolerance, get_contract_code, unwrap_response}, }; use alloy_primitives::{Address, Bytes, U256}; -use alloy_rpc_types::{TransactionInput, TransactionRequest}; +use alloy_rpc_types::{TransactionInput, TransactionRequest, txpool::TxpoolInspect}; use alloy_serde::WithOtherFields; use alloy_sol_types::SolCall; use anvil_core::eth::EthRequest; @@ -31,18 +28,17 @@ async fn assert_block_number_is_best_and_finalized( assert_eq!(std::convert::Into::::into(node.best_block_number().await), n); if let Some(duration) = wait_for_block_provider { tokio::time::sleep(duration).await; - let best_block = unwrap_response::( - node.eth_rpc(EthRequest::EthGetBlockByNumber( - alloy_eips::BlockNumberOrTag::Latest, - false, - )) + } + let best_block = unwrap_response::( + node.eth_rpc(EthRequest::EthGetBlockByNumber(alloy_eips::BlockNumberOrTag::Latest, false)) .await .unwrap(), - ) - .unwrap(); - let n_as_u256 = pallet_revive::U256::from(n); - assert_eq!(best_block.number, n_as_u256); + ) + .unwrap(); + let n_as_u256 = pallet_revive::U256::from(n); + assert_eq!(best_block.number, n_as_u256); + for _ in 0..3 { let finalized_block = unwrap_response::( node.eth_rpc(EthRequest::EthGetBlockByNumber( alloy_eips::BlockNumberOrTag::Finalized, @@ -52,8 +48,12 @@ async fn assert_block_number_is_best_and_finalized( .unwrap(), ) .unwrap(); - assert_eq!(finalized_block.number, n_as_u256); + if finalized_block.number == n_as_u256 { + return; + } + tokio::time::sleep(Duration::from_millis(400)).await; } + panic!("Could not reach the desired finalized block number after 3 retries."); } async fn snapshot(node: &mut TestNode, expected_snapshot_id: U256) -> U256 { @@ -69,18 +69,12 @@ async fn snapshot(node: &mut TestNode, expected_snapshot_id: U256) -> U256 { id } -async fn mine_blocks( - node: &mut TestNode, - blocks: u64, - assert_best_block: u64, - wait_for_block_provider: Option, -) { +async fn mine_blocks(node: &mut TestNode, blocks: u64, assert_best_block: u64) { unwrap_response::<()>( node.eth_rpc(EthRequest::Mine(Some(U256::from(blocks)), None)).await.unwrap(), ) .unwrap(); - assert_block_number_is_best_and_finalized(node, assert_best_block, wait_for_block_provider) - .await; + assert_block_number_is_best_and_finalized(node, assert_best_block, None).await; } async fn revert( @@ -103,7 +97,7 @@ async fn do_transfer( from: Address, to: Option
, amount: U256, - block_wait_timeout: Option, + block_number: Option, ) -> (H256, Option) { let tx_hash = if let Some(to) = to { let transaction = TransactionRequest::default().value(amount).from(from).to(to); @@ -114,8 +108,8 @@ async fn do_transfer( tx_hash }; - if let Some(BlockWaitTimeout { block_number, timeout }) = block_wait_timeout { - mine_blocks(node, 1, block_number.into(), Some(timeout)).await; + if let Some(block_number) = block_number { + mine_blocks(node, 1, block_number).await; return (tx_hash, Some(node.get_transaction_receipt(tx_hash).await)); } @@ -168,26 +162,26 @@ async fn test_best_block_after_evm_revert() { let zero = snapshot(&mut node, U256::ZERO).await; // Mine 5 blocks and assert on the new best block. - mine_blocks(&mut node, 5, 5, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 5).await; // Snapshot at block number 5. let one = snapshot(&mut node, U256::ONE).await; // Mine 5 more blocks. - mine_blocks(&mut node, 5, 10, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 10).await; // Snapshot again at block number 10. let two = snapshot(&mut node, U256::from(2)).await; assert_block_number_is_best_and_finalized(&mut node, 10, None).await; // Mine 5 more blocks. - mine_blocks(&mut node, 5, 15, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 15).await; // Revert to the second snapshot and assert best block number is 10. revert(&mut node, two, 10, true, None).await; // Check mining works fine after reverting. - mine_blocks(&mut node, 10, 20, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 10, 20).await; // Revert immediatelly after a snapshot (same best number is expected after the revert). let id = snapshot(&mut node, U256::from(3)).await; @@ -211,10 +205,12 @@ async fn test_balances_and_txs_index_after_evm_revert() { assert_block_number_is_best_and_finalized(&mut node, 0, None).await; // Mine 5 blocks and assert on the new best block. - mine_blocks(&mut node, 5, 5, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 5).await; // Snapshot at block number 5. let zero = snapshot(&mut node, U256::ZERO).await; + let initial_gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); // Get known accounts initial balances. let (alith_addr, alith_account) = alith(); @@ -224,14 +220,8 @@ async fn test_balances_and_txs_index_after_evm_revert() { // Initialize a random account. Assume its initial balance is 0. let transfer_amount = U256::from(16e17); - let (_, receipt_info) = do_transfer( - &mut node, - alith_addr, - None, - transfer_amount, - Some(BlockWaitTimeout { block_number: 6, timeout: Duration::from_millis(500) }), - ) - .await; + let (_, receipt_info) = + do_transfer(&mut node, alith_addr, None, transfer_amount, Some(6)).await; let receipt_info = receipt_info.unwrap(); let dest_h160 = receipt_info.to.unwrap(); @@ -241,8 +231,7 @@ async fn test_balances_and_txs_index_after_evm_revert() { alith_balance_after_tx0, alith_initial_balance - AlloyU256::from(receipt_info.effective_gas_price * receipt_info.gas_used).inner() - - transfer_amount - - U256::from(EXISTENTIAL_DEPOSIT), + - transfer_amount, "alith's balance should have changed" ); assert_eq!(dest_balance, transfer_amount, "dest's balance should have changed"); @@ -251,14 +240,8 @@ async fn test_balances_and_txs_index_after_evm_revert() { // Make another regular transfer between known accounts. let transfer_amount = U256::from(1e17); - let (_, receipt_info) = do_transfer( - &mut node, - baltathar_addr, - Some(alith_addr), - transfer_amount, - Some(BlockWaitTimeout { block_number: 7, timeout: Duration::from_millis(500) }), - ) - .await; + let (_, receipt_info) = + do_transfer(&mut node, baltathar_addr, Some(alith_addr), transfer_amount, Some(7)).await; let receipt_info = receipt_info.unwrap(); assert_eq!(receipt_info.block_number, pallet_revive::U256::from(7)); @@ -278,9 +261,13 @@ async fn test_balances_and_txs_index_after_evm_revert() { "Alith's balance should have changed" ); - // Revert to a block before the transactions have been mined. + // Revert to a block before the transactions have been included. revert(&mut node, zero, 5, true, Some(Duration::from_millis(500))).await; + let after_revert_gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); + assert_eq!(initial_gas_price, after_revert_gas_price); + // Assert on accounts balances to be the initial balances. let dest_addr = Address::from(dest_h160.to_fixed_bytes()); let alith_balance = node.get_balance(alith_account.address(), None).await; @@ -293,17 +280,11 @@ async fn test_balances_and_txs_index_after_evm_revert() { assert_eq!(node.get_nonce(baltathar_addr).await, U256::ZERO); assert_eq!(node.get_nonce(dest_addr).await, U256::ZERO); - // Remine the 6th block with same txs above. + // Remine the 6th block with the same txs but included in a single block. let (tx_hash1, _) = do_transfer(&mut node, alith_addr, Some(dest_addr), U256::from(16e17), None).await; - let (tx_hash2, receipt_info2) = do_transfer( - &mut node, - baltathar_addr, - Some(alith_addr), - U256::from(1e17), - Some(BlockWaitTimeout { block_number: 6, timeout: Duration::from_millis(500) }), - ) - .await; + let (tx_hash2, receipt_info2) = + do_transfer(&mut node, baltathar_addr, Some(alith_addr), U256::from(1e17), Some(6)).await; let receipt_info2 = receipt_info2.unwrap(); let receipt_info = node.get_transaction_receipt(tx_hash1).await; let mut tx_indices = @@ -343,7 +324,7 @@ async fn test_evm_revert_and_timestamp() { let zero = snapshot(&mut node, U256::ZERO).await; // Assert on first best block number. - mine_blocks(&mut node, 1, 1, None).await; + mine_blocks(&mut node, 1, 1).await; let first_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( first_timestamp.saturating_div(1000), @@ -366,7 +347,7 @@ async fn test_evm_revert_and_timestamp() { ); // Mine 1 blocks and assert on the new best block. - mine_blocks(&mut node, 1, 2, None).await; + mine_blocks(&mut node, 1, 2).await; let second_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( second_timestamp.saturating_sub(first_timestamp), @@ -392,7 +373,7 @@ async fn test_evm_revert_and_timestamp() { "Wrong offset 2", ); - mine_blocks(&mut node, 1, 3, None).await; + mine_blocks(&mut node, 1, 3).await; let third_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( third_timestamp.saturating_sub(second_timestamp), @@ -414,7 +395,7 @@ async fn test_evm_revert_and_timestamp() { // Mine again 1 block and check again the timestamp. We should have the next block timestamp // with 1 second later than the second block timestamp. tokio::time::sleep(Duration::from_secs(1)).await; - mine_blocks(&mut node, 1, 3, None).await; + mine_blocks(&mut node, 1, 3).await; let remined_third_block_ts = node.get_decoded_timestamp(None).await; assert_with_tolerance( remined_third_block_ts.saturating_sub(second_timestamp), @@ -436,7 +417,7 @@ async fn test_evm_revert_and_timestamp() { // Mine 1 block and check the timestamp. We don't check on a specific // timestamp, but expect the time has increased a bit since the revert, which set the time back // to genesis timestamp. - mine_blocks(&mut node, 1, 1, None).await; + mine_blocks(&mut node, 1, 1).await; assert_eq!(node.best_block_number().await, 1); let remined_first_block_ts = node.get_decoded_timestamp(None).await; // Here assert that the time is increasing. @@ -453,14 +434,14 @@ async fn test_rollback() { assert_block_number_is_best_and_finalized(&mut node, 0, None).await; // Mine 5 blocks and assert on the new best block. - mine_blocks(&mut node, 5, 5, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 5).await; // Rollback 2 blocks. unwrap_response::<()>(node.eth_rpc(EthRequest::Rollback(Some(2))).await.unwrap()).unwrap(); assert_block_number_is_best_and_finalized(&mut node, 3, Some(Duration::from_millis(500))).await; // Check mining works fine after reverting. - mine_blocks(&mut node, 10, 13, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 10, 13).await; // Rollback 1 block. unwrap_response::<()>(node.eth_rpc(EthRequest::Rollback(None)).await.unwrap()).unwrap(); @@ -478,11 +459,11 @@ async fn test_mine_with_txs_in_mempool_before_revert() { assert_block_number_is_best_and_finalized(&mut node, 0, None).await; // Mine 5 blocks and assert on the new best block. - mine_blocks(&mut node, 5, 5, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 5).await; // Snapshot at block number 5. let zero = snapshot(&mut node, U256::ZERO).await; - mine_blocks(&mut node, 5, 10, None).await; + mine_blocks(&mut node, 5, 10).await; // Get known accounts. let (alith_addr, _) = alith(); @@ -490,7 +471,7 @@ async fn test_mine_with_txs_in_mempool_before_revert() { // Initialize a random account. let transfer_amount = U256::from(16e17); - let (dest_addr, _) = + let _ = node.eth_transfer_to_unitialized_random_account(alith_addr, transfer_amount, None).await; // Make another regular transfer between known accounts. @@ -501,26 +482,29 @@ async fn test_mine_with_txs_in_mempool_before_revert() { // Revert to a block before the transactions have been sent. revert(&mut node, zero, 5, true, None).await; - let one = snapshot(&mut node, U256::ONE).await; + let inspect: TxpoolInspect = + unwrap_response(node.eth_rpc(EthRequest::TxPoolInspect(())).await.unwrap()).unwrap(); + assert_eq!(inspect.pending.len(), 2); - mine_blocks(&mut node, 1, 6, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 1, 6).await; - let txs_in_block = unwrap_response::( - node.eth_rpc(EthRequest::EthGetTransactionCountByNumber( - alloy_eips::BlockNumberOrTag::Latest, - )) - .await - .unwrap(), - ) - .unwrap(); - assert_eq!(txs_in_block, U256::from(2)); + // Get current block to verify gas_price < base_fee_per_gas + let block_number = node.best_block_number().await; + let block_hash = node.block_hash_by_number(block_number).await.unwrap(); + let block = node.get_block_by_hash(block_hash).await; + let base_fee = block.base_fee_per_gas.as_u128(); + + let pending_alith_txs = inspect.pending.get(&alith_addr).unwrap(); + let pending_baltathar_txs = inspect.pending.get(&baltathar_addr).unwrap(); + + assert_eq!(pending_alith_txs.len(), 1); + assert_eq!(pending_baltathar_txs.len(), 1); + + let summary_alith = pending_alith_txs.get("0").unwrap(); + assert!(summary_alith.gas_price < base_fee); - // Now make two more txs again with same senders, with different nonces than the actual - // accounts nonces at block 5. - let transfer_amount = U256::from(1e15); - do_transfer(&mut node, baltathar_addr, Some(alith_addr), transfer_amount, None).await; - do_transfer(&mut node, alith_addr, Some(dest_addr), transfer_amount, None).await; - revert(&mut node, one, 5, true, None).await; + let summary_baltathar = pending_baltathar_txs.get("0").unwrap(); + assert!(summary_baltathar.gas_price < base_fee); let txs_in_block = unwrap_response::( node.eth_rpc(EthRequest::EthGetTransactionCountByNumber( @@ -530,6 +514,8 @@ async fn test_mine_with_txs_in_mempool_before_revert() { .unwrap(), ) .unwrap(); + // Previous txs are not included in the block because they have + // a gas_price smaller than the current block's base_fee_per_gas. assert_eq!(txs_in_block, U256::ZERO); } @@ -546,7 +532,7 @@ async fn test_timestmap_in_contract_after_revert() { let alith = Account::from(subxt_signer::eth::dev::alith()); let contract_code = get_contract_code("Multicall"); let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; - mine_blocks(&mut node, 1, 1, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 1, 1).await; let first_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( @@ -589,7 +575,7 @@ async fn test_timestmap_in_contract_after_revert() { assert_eq!(timestamp, U256::from(first_timestamp.saturating_div(1000))); // Mine 1 block again and expect on the set timestamp. - mine_blocks(&mut node, 1, 2, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 1, 2).await; let second_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( second_timestamp.saturating_sub(first_timestamp), diff --git a/crates/anvil-polkadot/tests/it/standard_rpc.rs b/crates/anvil-polkadot/tests/it/standard_rpc.rs index 39ad1bf3aa5e6..ad410326203e1 100644 --- a/crates/anvil-polkadot/tests/it/standard_rpc.rs +++ b/crates/anvil-polkadot/tests/it/standard_rpc.rs @@ -164,7 +164,6 @@ async fn test_estimate_gas() { .unwrap(); let tx_hash = node.send_transaction(transaction, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let receipt = node.get_transaction_receipt(tx_hash).await; // https://github.com/paritytech/polkadot-sdk/blob/b21cbb58ab50d5d10371393967537f6f221bb92f/substrate/frame/revive/src/primitives.rs#L76 // eth_gas that is returned by estimate_gas holds both the storage deposit and @@ -274,8 +273,8 @@ async fn test_eth_get_transaction_count() { .unwrap(), ) .unwrap_err(); - assert_eq!(err.code, ErrorCode::InternalError); - assert_eq!(err.message, "Revive call failed: Client error: hash not found"); + assert_eq!(err.code, ErrorCode::InvalidParams); + assert_eq!(err.message, "Block number not found"); assert_eq!( unwrap_response::( @@ -299,7 +298,6 @@ async fn test_eth_get_transaction_count() { ))); let _tx_hash0 = node.send_transaction(transaction.clone(), None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; assert_eq!( unwrap_response::( node.eth_rpc(EthRequest::EthGetTransactionCount( @@ -355,7 +353,6 @@ async fn test_get_transaction_count_by_hash_number() { U256::from(0) ); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; assert_eq!( unwrap_response::>( node.eth_rpc(EthRequest::EthGetTransactionCountByHash(B256::from_slice( @@ -388,7 +385,6 @@ async fn test_get_code_at() { let anvil_node_config = AnvilNodeConfig::test_config(); let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); - unwrap_response::<()>(node.eth_rpc(EthRequest::SetAutomine(true)).await.unwrap()).unwrap(); // Check random address let code = unwrap_response::( @@ -399,8 +395,8 @@ async fn test_get_code_at() { assert!(code.is_empty(), "Contract code should be empty"); let alith = Account::from(subxt_signer::eth::dev::alith()); let contract_code = get_contract_code("SimpleStorage"); - let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), Some(1)).await; - tokio::time::sleep(Duration::from_millis(400)).await; + let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; + let _ = node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap(); let receipt = node.get_transaction_receipt(tx_hash).await; assert_eq!(receipt.status, Some(pallet_revive::U256::from(1))); let contract_address = receipt.contract_address.unwrap(); @@ -458,7 +454,6 @@ async fn test_get_transaction_by_hash_and_index() { .await .unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; assert_eq!( unwrap_response::>( node.eth_rpc(EthRequest::EthGetTransactionByBlockHashAndIndex( @@ -494,11 +489,12 @@ async fn test_get_transaction_by_hash_and_index() { .unwrap() .unwrap(); - assert_eq!(first_hash, transaction_info_1.block_hash); + let eth_first_hash = node.resolve_ethereum_hash(first_hash).unwrap(); + assert_eq!(eth_first_hash, transaction_info_1.block_hash); assert_eq!(transaction_info_1.from, alith.address()); assert_eq!(tx_hash0, transaction_info_1.hash); - assert_eq!(first_hash, transaction_info_2.block_hash); + assert_eq!(eth_first_hash, transaction_info_2.block_hash); assert_eq!(transaction_info_2.from, baltathar.address()); assert_eq!(tx_hash1, transaction_info_2.hash); } @@ -527,7 +523,6 @@ async fn test_get_transaction_by_number_and_index() { .await .unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let transaction_info_1 = unwrap_response::>( node.eth_rpc(EthRequest::EthGetTransactionByBlockNumberAndIndex( @@ -550,7 +545,7 @@ async fn test_get_transaction_by_number_and_index() { .unwrap() .unwrap(); - let first_hash = node.block_hash_by_number(1).await.unwrap(); + let first_hash = node.eth_block_hash_by_number(1).await.unwrap(); assert_eq!(first_hash, transaction_info_1.block_hash); assert_eq!(transaction_info_1.from, alith.address()); assert_eq!(tx_hash0, transaction_info_1.hash); @@ -575,8 +570,6 @@ async fn test_get_transaction_by_hash() { .to(Address::from(ReviveAddress::new(baltathar.address()))); let tx_hash0 = node.send_transaction(transaction.clone(), None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; - let transaction_info = unwrap_response::>( node.eth_rpc(EthRequest::EthGetTransactionByHash(B256::from_slice(tx_hash0.as_ref()))) .await @@ -585,7 +578,7 @@ async fn test_get_transaction_by_hash() { .unwrap() .unwrap(); - let first_hash = node.block_hash_by_number(1).await.unwrap(); + let first_hash = node.eth_block_hash_by_number(1).await.unwrap(); assert_eq!(first_hash, transaction_info.block_hash); assert_eq!(transaction_info.from, alith.address()); assert_eq!(tx_hash0, transaction_info.hash); @@ -599,6 +592,16 @@ async fn test_get_storage() { unwrap_response::<()>(node.eth_rpc(EthRequest::SetAutomine(true)).await.unwrap()).unwrap(); let alith = Account::from(subxt_signer::eth::dev::alith()); + // Test retrieving the storage of an EOA account (alith) + let stored_value = node.get_storage_at(U256::from(0), alith.address()).await; + assert_eq!(stored_value, 0); + + // Test retrieving the storage of a non-existant account. + let random_addr = Address::random(); + let stored_value = + node.get_storage_at(U256::from(0), ReviveAddress::from(random_addr).inner()).await; + assert_eq!(stored_value, 0); + let contract_code = get_contract_code("SimpleStorage"); let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), Some(1)).await; tokio::time::sleep(Duration::from_millis(400)).await; @@ -684,7 +687,13 @@ async fn test_fee_history() { ) .unwrap(); assert_eq!(fee_history.gas_used_ratio.len(), 10); - assert!(fee_history.base_fee_per_gas.iter().all(|&v| v == pallet_revive::U256::from(1000000))); + // The `SlowAdjustingFeeUpdate` logic decreases the base_fee block by block if the + // activity contained within them is low. + let base_fees = + [999981, 999962, 999944, 999925, 999906, 999888, 999869, 999851, 999832, 999813, 999813]; + for (idx, base_fee) in fee_history.base_fee_per_gas.into_iter().enumerate() { + assert_eq!(base_fee, pallet_revive::U256::from(base_fees[idx])); + } } #[tokio::test(flavor = "multi_thread")] @@ -694,7 +703,7 @@ async fn test_max_fee_per_gas() { let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); assert_eq!( - "0x30d40", + "0x0", unwrap_response::( node.eth_rpc(EthRequest::EthMaxPriorityFeePerGas(())).await.unwrap() ) @@ -737,7 +746,6 @@ async fn test_get_logs() { let contract_code = get_contract_code("SimpleStorage"); let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(500)).await; let receipt = node.get_transaction_receipt(tx_hash).await; let contract_address = receipt.contract_address.unwrap(); @@ -752,7 +760,6 @@ async fn test_get_logs() { let _call_tx_hash = node.send_transaction(call_tx, None).await.unwrap(); } unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let filter = alloy_rpc_types::Filter::new() .address(Address::from(ReviveAddress::new(contract_address))) @@ -928,7 +935,6 @@ async fn test_anvil_node_info() { // Mine some blocks and check that node_info updates unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(Some(U256::from(3)), None)).await.unwrap()) .unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let node_info_after = unwrap_response::(node.eth_rpc(EthRequest::NodeInfo(())).await.unwrap()).unwrap(); @@ -987,7 +993,6 @@ async fn test_anvil_metadata() { // Mine some blocks and check that metadata updates unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(Some(U256::from(5)), None)).await.unwrap()) .unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let metadata_after_mining = unwrap_response::( node.eth_rpc(EthRequest::AnvilMetadata(())).await.unwrap(), diff --git a/crates/anvil-polkadot/tests/it/state_injector.rs b/crates/anvil-polkadot/tests/it/state_injector.rs index 69b2930423fe0..b0e1ff50a6bfb 100644 --- a/crates/anvil-polkadot/tests/it/state_injector.rs +++ b/crates/anvil-polkadot/tests/it/state_injector.rs @@ -14,7 +14,6 @@ use anvil_polkadot::{ use anvil_rpc::error::{ErrorCode, RpcError}; use assert_matches::assert_matches; use polkadot_sdk::pallet_revive::{self, evm::Account}; -use std::time::Duration; use subxt::utils::H160; #[tokio::test(flavor = "multi_thread")] @@ -79,8 +78,6 @@ async fn test_set_chain_id() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; - let transaction_receipt = node.get_transaction_receipt(tx_hash).await; assert_eq!(transaction_receipt.block_number, pallet_revive::U256::from(2)); @@ -108,7 +105,6 @@ async fn test_set_nonce() { assert_eq!(node.get_nonce(address).await, U256::from(10)); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_nonce(address).await, U256::from(10)); @@ -131,8 +127,6 @@ async fn test_set_nonce() { unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; - let transaction_receipt = node.get_transaction_receipt(tx_hash).await; assert_eq!(transaction_receipt.block_number, pallet_revive::U256::from(2)); @@ -149,7 +143,6 @@ async fn test_set_nonce() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let transaction_receipt = node.get_transaction_receipt(tx_hash).await; @@ -173,7 +166,6 @@ async fn test_set_nonce() { assert_eq!(node.get_nonce(address).await, U256::from(1)); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_nonce(address).await, U256::from(1)); } @@ -205,12 +197,10 @@ async fn test_set_balance() { assert_eq!(node.get_balance(alith, None).await, new_balance); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_balance(alith, None).await, new_balance); - // Send 2 dollars to another account. We'll actually send 3, to cover for the existential - // deposit of 1 dollar. + // Send 2 dollars to another account. let charleth = Account::from(subxt_signer::eth::dev::charleth()); let tx = TransactionRequest::default() .value(U256::from(2e18)) @@ -220,14 +210,13 @@ async fn test_set_balance() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let transaction_receipt = node.get_transaction_receipt(tx_hash).await; assert_eq!(transaction_receipt.block_number, pallet_revive::U256::from(2)); assert_eq!(transaction_receipt.transaction_hash, tx_hash); - let alith_new_balance = U256::from(2e18) + let alith_new_balance = U256::from(3e18) - AlloyU256::from(transaction_receipt.effective_gas_price * transaction_receipt.gas_used) .inner(); assert_eq!(node.get_balance(alith, None).await, alith_new_balance); @@ -271,7 +260,6 @@ async fn test_set_balance() { assert_eq!(node.get_balance(baltathar, None).await, new_balance); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_balance(baltathar, None).await, new_balance); @@ -290,7 +278,6 @@ async fn test_set_balance() { assert_eq!(node.get_balance(random_addr, None).await, new_balance); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_balance(random_addr, None).await, new_balance); } @@ -315,7 +302,6 @@ async fn test_set_code_existing_contract() { .await; unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let receipt = node.get_transaction_receipt(tx_hash).await; let contract_address = Address::from(ReviveAddress::new(receipt.contract_address.unwrap())); @@ -344,7 +330,6 @@ async fn test_set_code_existing_contract() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let _receipt = node.get_transaction_receipt(tx_hash).await; @@ -391,7 +376,6 @@ async fn test_set_code_existing_contract() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let _receipt = node.get_transaction_receipt(tx_hash).await; @@ -466,7 +450,6 @@ async fn test_set_code_new() { assert_eq!(code, Bytes::from(runtime_bytecode.clone())); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(code, Bytes::from(runtime_bytecode)); @@ -479,7 +462,6 @@ async fn test_set_code_new() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let _receipt = node.get_transaction_receipt(tx_hash).await; @@ -555,7 +537,6 @@ async fn test_set_code_of_regular_account() { assert_eq!(code, Bytes::from(runtime_bytecode.clone())); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(code, Bytes::from(runtime_bytecode)); @@ -568,7 +549,6 @@ async fn test_set_code_of_regular_account() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let _receipt = node.get_transaction_receipt(tx_hash).await; @@ -595,51 +575,80 @@ async fn test_set_storage() { let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); let alith = Account::from(subxt_signer::eth::dev::alith()); - let contract_code = get_contract_code("SimpleStorage"); - let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; - unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(std::time::Duration::from_millis(400)).await; - let receipt = node.get_transaction_receipt(tx_hash).await; - let contract_address = receipt.contract_address.unwrap(); - - // Check the default value for slot 0. - let result = node - .eth_rpc(EthRequest::EthGetStorageAt( - Address::from(ReviveAddress::new(contract_address)), - U256::from(0), - None, - )) - .await + // Set storage of a new random account. + { + let random_addr = Address::random(); + + let stored_value = + node.get_storage_at(U256::from(0), ReviveAddress::from(random_addr).inner()).await; + assert_eq!(stored_value, 0); + + // Set a new value for the slot 0. + unwrap_response::<()>( + node.eth_rpc(EthRequest::SetStorageAt( + random_addr, + U256::from(0), + B256::from(U256::from(511)), + )) + .await + .unwrap(), + ) .unwrap(); - let hex_string = unwrap_response::(result).unwrap(); - let hex_value = hex_string.strip_prefix("0x").unwrap_or(&hex_string); - let stored_value = U256::from_str_radix(hex_value, 16).unwrap(); - assert_eq!(stored_value, 0); - - // Set a new value for the slot 0. - unwrap_response::<()>( - node.eth_rpc(EthRequest::SetStorageAt( - Address::from(ReviveAddress::new(contract_address)), - U256::from(0), - B256::from(U256::from(511)), - )) - .await - .unwrap(), - ) - .unwrap(); + // Check that the value was updated + let stored_value = + node.get_storage_at(U256::from(0), ReviveAddress::from(random_addr).inner()).await; + assert_eq!(stored_value, 511); + } + + // Update the storage of an existing account + { + let contract_code = get_contract_code("SimpleStorage"); + let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + let receipt = node.get_transaction_receipt(tx_hash).await; + let contract_address = receipt.contract_address.unwrap(); + + // Check the default value for slot 0. + let stored_value = node.get_storage_at(U256::from(0), contract_address).await; + assert_eq!(stored_value, 0); + + // Set a new value for the slot 0. + unwrap_response::<()>( + node.eth_rpc(EthRequest::SetStorageAt( + Address::from(ReviveAddress::new(contract_address)), + U256::from(0), + B256::from(U256::from(511)), + )) + .await + .unwrap(), + ) + .unwrap(); - // Check that the value was updated - let result = node - .eth_rpc(EthRequest::EthGetStorageAt( - Address::from(ReviveAddress::new(contract_address)), - U256::from(0), - None, - )) - .await + // Check that the value was updated + let stored_value = node.get_storage_at(U256::from(0), contract_address).await; + assert_eq!(stored_value, 511); + } + + // Set storage for a EOA account (Alith). + { + let stored_value = node.get_storage_at(U256::from(0), alith.address()).await; + assert_eq!(stored_value, 0); + + // Set a new value for the slot 0. + unwrap_response::<()>( + node.eth_rpc(EthRequest::SetStorageAt( + Address::from(ReviveAddress::new(alith.address())), + U256::from(0), + B256::from(U256::from(511)), + )) + .await + .unwrap(), + ) .unwrap(); - let hex_string = unwrap_response::(result).unwrap(); - let hex_value = hex_string.strip_prefix("0x").unwrap_or(&hex_string); - let stored_value = U256::from_str_radix(hex_value, 16).unwrap(); - assert_eq!(stored_value, 511); + + // Check that the value was updated + let stored_value = node.get_storage_at(U256::from(0), alith.address()).await; + assert_eq!(stored_value, 511); + } } diff --git a/crates/anvil-polkadot/tests/it/txpool.rs b/crates/anvil-polkadot/tests/it/txpool.rs index 42c6d6cffe210..6ba5c51ec336a 100644 --- a/crates/anvil-polkadot/tests/it/txpool.rs +++ b/crates/anvil-polkadot/tests/it/txpool.rs @@ -1,9 +1,12 @@ use crate::utils::{TestNode, unwrap_response}; use alloy_primitives::{Address, B256, U256}; -use alloy_rpc_types::{TransactionRequest, txpool::TxpoolStatus}; +use alloy_rpc_types::{ + TransactionRequest, + txpool::{TxpoolContent, TxpoolInspect, TxpoolStatus}, +}; use anvil_core::eth::EthRequest; use anvil_polkadot::{ - api_server::revive_conversions::ReviveAddress, + api_server::{TxpoolTransactionInfo, revive_conversions::ReviveAddress}, config::{AnvilNodeConfig, SubstrateNodeConfig}, }; use polkadot_sdk::pallet_revive::evm::Account; @@ -146,3 +149,286 @@ async fn test_drop_all_transactions() { assert_eq!(status.pending, 0); assert_eq!(status.queued, 0); } + +#[tokio::test(flavor = "multi_thread")] +async fn test_txpool_inspect() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_addr = Address::from(ReviveAddress::new(alith.address())); + let recipient_addr = Address::repeat_byte(0x42); + + let inspect: TxpoolInspect = + unwrap_response(node.eth_rpc(EthRequest::TxPoolInspect(())).await.unwrap()).unwrap(); + assert!(inspect.pending.is_empty()); + assert!(inspect.queued.is_empty()); + + for i in 0..3 { + let tx = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(1000 * (i + 1))) + .nonce(i); + node.send_transaction(tx, None).await.unwrap(); + } + + let tx_future = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(5000)) + .nonce(5); + node.send_transaction(tx_future, None).await.unwrap(); + + let inspect: TxpoolInspect = + unwrap_response(node.eth_rpc(EthRequest::TxPoolInspect(())).await.unwrap()).unwrap(); + + assert_eq!(inspect.pending.len(), 1); + assert_eq!(inspect.queued.len(), 1); + + // Get current block to verify gas_price >= base_fee_per_gas + let block_number = node.best_block_number().await; + let block_hash = node.block_hash_by_number(block_number).await.unwrap(); + let block = node.get_block_by_hash(block_hash).await; + let base_fee = block.base_fee_per_gas.as_u128(); + + let pending_txs = inspect.pending.get(&alith_addr).unwrap(); + assert_eq!(pending_txs.len(), 3); + + for i in 0..3 { + let summary = pending_txs.get(&i.to_string()).unwrap(); + assert_eq!(summary.to.unwrap(), recipient_addr); + assert_eq!(summary.value, U256::from(1000 * (i + 1))); + assert!(summary.gas > 0); + assert!(summary.gas_price >= base_fee); + } + + let queued_txs = inspect.queued.get(&alith_addr).unwrap(); + assert_eq!(queued_txs.len(), 1); + + let summary = queued_txs.get("5").unwrap(); + assert_eq!(summary.to.unwrap(), recipient_addr); + assert_eq!(summary.value, U256::from(5000)); + assert!(summary.gas > 0); + assert!(summary.gas_price >= base_fee); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_txpool_content() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_addr = Address::from(ReviveAddress::new(alith.address())); + let recipient_addr = Address::repeat_byte(0x42); + + let content: TxpoolContent = + unwrap_response(node.eth_rpc(EthRequest::TxPoolContent(())).await.unwrap()).unwrap(); + assert!(content.pending.is_empty()); + assert!(content.queued.is_empty()); + + let mut pending_hashes = vec![]; + for i in 0..3 { + let tx = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(1000 * (i + 1))) + .nonce(i); + let hash = node.send_transaction(tx, None).await.unwrap(); + pending_hashes.push(hash); + } + + let tx_future = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(5000)) + .nonce(5); + let queued_hash = node.send_transaction(tx_future, None).await.unwrap(); + + let content: TxpoolContent = + unwrap_response(node.eth_rpc(EthRequest::TxPoolContent(())).await.unwrap()).unwrap(); + + assert_eq!(content.pending.len(), 1); + assert_eq!(content.queued.len(), 1); + + let pending_txs = content.pending.get(&alith_addr).unwrap(); + assert_eq!(pending_txs.len(), 3); + + for i in 0..3 { + let tx_info = pending_txs.get(&i.to_string()).unwrap(); + let from_addr = Address::from_slice(tx_info.from.as_bytes()); + assert_eq!(from_addr, alith_addr); + + let expected_hash = B256::from_slice(pending_hashes[i as usize].0.as_ref()); + let actual_hash = B256::from_slice(tx_info.hash.as_ref()); + assert_eq!(actual_hash, expected_hash); + + // Pending transactions should have None for block-related fields + assert_eq!(tx_info.block_hash, None); + assert_eq!(tx_info.block_number, None); + assert_eq!(tx_info.transaction_index, None); + } + + let queued_txs = content.queued.get(&alith_addr).unwrap(); + assert_eq!(queued_txs.len(), 1); + + let tx_info = queued_txs.get("5").unwrap(); + let from_addr = Address::from_slice(tx_info.from.as_bytes()); + assert_eq!(from_addr, alith_addr); + + let expected_hash = B256::from_slice(queued_hash.0.as_ref()); + let actual_hash = B256::from_slice(tx_info.hash.as_ref()); + assert_eq!(actual_hash, expected_hash); + + // Queued transactions should also have None for block-related fields + assert_eq!(tx_info.block_hash, None); + assert_eq!(tx_info.block_number, None); + assert_eq!(tx_info.transaction_index, None); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_remove_pool_transactions() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_addr = Address::from(ReviveAddress::new(alith.address())); + + let baltathar = Account::from(subxt_signer::eth::dev::baltathar()); + let baltathar_addr = Address::from(ReviveAddress::new(baltathar.address())); + + let recipient_addr = Address::repeat_byte(0x42); + + // Send 3 transactions from Alith + for i in 0..3 { + let tx = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(1000 * (i + 1))) + .nonce(i); + node.send_transaction(tx, None).await.unwrap(); + } + + // Send 2 transactions from Baltathar + for i in 0..2 { + let tx = TransactionRequest::default() + .from(baltathar_addr) + .to(recipient_addr) + .value(U256::from(2000 * (i + 1))) + .nonce(i); + node.send_transaction(tx, None).await.unwrap(); + } + + let status: TxpoolStatus = + unwrap_response(node.eth_rpc(EthRequest::TxPoolStatus(())).await.unwrap()).unwrap(); + assert_eq!(status.pending, 5); + assert_eq!(status.queued, 0); + + // Remove all transactions from Alith + unwrap_response::<()>( + node.eth_rpc(EthRequest::RemovePoolTransactions(alith_addr)).await.unwrap(), + ) + .unwrap(); + + let status: TxpoolStatus = + unwrap_response(node.eth_rpc(EthRequest::TxPoolStatus(())).await.unwrap()).unwrap(); + assert_eq!(status.pending, 2); + assert_eq!(status.queued, 0); + + // Verify only Baltathar's transactions remain + let content: TxpoolContent = + unwrap_response(node.eth_rpc(EthRequest::TxPoolContent(())).await.unwrap()).unwrap(); + + assert_eq!(content.pending.len(), 1); + assert!(content.pending.contains_key(&baltathar_addr)); + assert!(!content.pending.contains_key(&alith_addr)); + + let baltathar_txs = content.pending.get(&baltathar_addr).unwrap(); + assert_eq!(baltathar_txs.len(), 2); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_txpool_with_impersonated_transactions() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_addr = Address::from(ReviveAddress::new(alith.address())); + + let dorothy = Account::from(subxt_signer::eth::dev::dorothy()); + let impersonated_addr = Address::from(ReviveAddress::new(dorothy.address())); + let recipient_addr = Address::repeat_byte(0x42); + + // Fund dorothy account (dorothy is not initialized in genesis) + let fund_tx = TransactionRequest::default() + .from(alith_addr) + .to(impersonated_addr) + .value(U256::from(10000000000000000000u64)); + node.send_transaction(fund_tx, None).await.unwrap(); + + // Mine the funding transaction + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + + unwrap_response::<()>( + node.eth_rpc(EthRequest::ImpersonateAccount(impersonated_addr)).await.unwrap(), + ) + .unwrap(); + + for i in 0..3 { + let tx = TransactionRequest::default() + .from(impersonated_addr) + .to(recipient_addr) + .value(U256::from(1000 * (i + 1))) + .nonce(i); + node.send_unsigned_transaction(tx, None).await.unwrap(); + } + + let status: TxpoolStatus = + unwrap_response(node.eth_rpc(EthRequest::TxPoolStatus(())).await.unwrap()).unwrap(); + assert_eq!(status.pending, 3); + assert_eq!(status.queued, 0); + + // Test txpool_inspect (uses extract_tx_summary with impersonation support) + let inspect: TxpoolInspect = + unwrap_response(node.eth_rpc(EthRequest::TxPoolInspect(())).await.unwrap()).unwrap(); + assert_eq!(inspect.pending.len(), 1); + assert!(inspect.pending.contains_key(&impersonated_addr)); + + let impersonated_txs = inspect.pending.get(&impersonated_addr).unwrap(); + assert_eq!(impersonated_txs.len(), 3); + + // Test txpool_content (uses extract_tx_info with impersonation support) + let content: TxpoolContent = + unwrap_response(node.eth_rpc(EthRequest::TxPoolContent(())).await.unwrap()).unwrap(); + assert_eq!(content.pending.len(), 1); + + let pending_txs = content.pending.get(&impersonated_addr).unwrap(); + assert_eq!(pending_txs.len(), 3); + + for i in 0..3 { + let tx_info = pending_txs.get(&i.to_string()).unwrap(); + let from_addr = Address::from_slice(tx_info.from.as_bytes()); + assert_eq!(from_addr, impersonated_addr); + assert!(tx_info.hash != Default::default()); + } + + // Test anvil_removePoolTransactions (uses extract_sender with impersonation support) + unwrap_response::<()>( + node.eth_rpc(EthRequest::RemovePoolTransactions(impersonated_addr)).await.unwrap(), + ) + .unwrap(); + + let status: TxpoolStatus = + unwrap_response(node.eth_rpc(EthRequest::TxPoolStatus(())).await.unwrap()).unwrap(); + assert_eq!(status.pending, 0); + assert_eq!(status.queued, 0); + + unwrap_response::<()>( + node.eth_rpc(EthRequest::StopImpersonatingAccount(impersonated_addr)).await.unwrap(), + ) + .unwrap(); +} diff --git a/crates/anvil-polkadot/tests/it/utils.rs b/crates/anvil-polkadot/tests/it/utils.rs index 73a25c73397b0..b34669401de0b 100644 --- a/crates/anvil-polkadot/tests/it/utils.rs +++ b/crates/anvil-polkadot/tests/it/utils.rs @@ -27,11 +27,15 @@ use codec::Decode; use eyre::{Result, WrapErr}; use futures::{StreamExt, channel::oneshot}; use polkadot_sdk::{ - pallet_revive::evm::{Block, HashesOrTransactionInfos, ReceiptInfo}, + pallet_revive::{ + ReviveApi, + evm::{Block, HashesOrTransactionInfos, ReceiptInfo}, + }, polkadot_sdk_frame::traits::Header, sc_cli::CliConfiguration, sc_client_api::{BlockBackend, BlockchainEvents}, sc_service::TaskManager, + sp_api::ProvideRuntimeApi, sp_core::H256, sp_state_machine::StorageKey, }; @@ -42,9 +46,6 @@ use tempfile::TempDir; use crate::abi::Multicall; -const NATIVE_TO_ETH_RATIO: u128 = 1000000; -pub const EXISTENTIAL_DEPOSIT: u128 = substrate_runtime::currency::DOLLARS * NATIVE_TO_ETH_RATIO; - pub struct BlockWaitTimeout { pub block_number: u32, pub timeout: Duration, @@ -120,6 +121,12 @@ impl TestNode { .ok_or_else(|| eyre::eyre!("no hash for block {}", n)) } + pub async fn eth_block_hash_by_number(&self, n: u32) -> eyre::Result { + let substrate_block_hash = self.block_hash_by_number(n).await?; + + self.resolve_ethereum_hash(substrate_block_hash) + } + /// Execute an ethereum transaction. pub async fn send_transaction( &mut self, @@ -177,6 +184,16 @@ impl TestNode { Decode::decode(&mut input).unwrap() } + pub async fn get_eth_timestamp(&mut self, at: Option) -> u64 { + if let Some(hash) = at { + self.get_block_by_hash(hash).await + } else { + self.eth_best_block().await + } + .timestamp + .as_u64() + } + pub async fn get_nonce(&mut self, address: Address) -> U256 { unwrap_response::( self.eth_rpc(EthRequest::EthGetTransactionCount(address, None)).await.unwrap(), @@ -196,6 +213,18 @@ impl TestNode { u32::from_str_radix(num.trim_start_matches("0x"), 16).unwrap() } + pub async fn eth_best_block(&mut self) -> Block { + unwrap_response::( + self.eth_rpc(EthRequest::EthGetBlockByNumber( + alloy_eips::BlockNumberOrTag::Latest, + false, + )) + .await + .unwrap(), + ) + .unwrap() + } + pub async fn wait_for_block_with_timeout( &self, n: u32, @@ -281,8 +310,7 @@ impl TestNode { from_initial_balance - AlloyU256::from(receipt_info.effective_gas_price * receipt_info.gas_used) .inner() - - transfer_amount - - U256::from(EXISTENTIAL_DEPOSIT), + - transfer_amount, "signer's balance should have changed" ); assert_eq!( @@ -325,6 +353,10 @@ impl TestNode { U256::from_str_radix(hex_value, 16).unwrap() } + pub fn resolve_ethereum_hash(&self, substrate_hash: H256) -> eyre::Result { + Ok(self.service.client.runtime_api().eth_block(substrate_hash)?.hash) + } + async fn wait_for_block_with_number(&self, n: u32) { let mut import_stream = self.service.client.import_notification_stream(); diff --git a/crates/cheatcodes/src/evm.rs b/crates/cheatcodes/src/evm.rs index 243cddc261cf9..c5300184ec1fe 100644 --- a/crates/cheatcodes/src/evm.rs +++ b/crates/cheatcodes/src/evm.rs @@ -39,7 +39,7 @@ use serde::Serialize; mod fork; pub(crate) mod mapping; -pub(crate) mod mock; +pub mod mock; pub(crate) mod prank; /// Records storage slots reads and writes. diff --git a/crates/cheatcodes/src/inspector.rs b/crates/cheatcodes/src/inspector.rs index 1a9354e382062..3d55d29922e8e 100644 --- a/crates/cheatcodes/src/inspector.rs +++ b/crates/cheatcodes/src/inspector.rs @@ -939,6 +939,11 @@ impl Cheatcodes { self.strategy.runner.revive_remove_duplicate_account_access(self); } + // Tells whether PVM is enabled or not. + pub fn is_pvm_enabled(&mut self) -> bool { + self.strategy.runner.is_pvm_enabled(self) + } + pub fn call_with_executor( &mut self, ecx: Ecx, @@ -1022,40 +1027,51 @@ impl Cheatcodes { } } - // Handle mocked calls - if let Some(mocks) = self.mocked_calls.get_mut(&call.bytecode_address) { - let ctx = MockCallDataContext { - calldata: call.input.bytes(ecx), - value: call.transfer_value(), - }; + // Do not handle mocked calls if PVM is enabled and let the revive call handle it. + // There is literally no problem with handling mocked calls with PVM enabled here as well, + // but the downside is that if call a mocked call from the test it will not exercise the + // paths in revive that handle mocked calls and only nested mocks will be handle by the + // revive specific calls. + // This is undesirable because conformity tests could accidentally pass and the revive code + // paths be broken. + if !self.is_pvm_enabled() { + // Handle mocked calls + if let Some(mocks) = self.mocked_calls.get_mut(&call.bytecode_address) { + let ctx = MockCallDataContext { + calldata: call.input.bytes(ecx), + value: call.transfer_value(), + }; - if let Some(return_data_queue) = match mocks.get_mut(&ctx) { - Some(queue) => Some(queue), - None => mocks - .iter_mut() - .find(|(mock, _)| { - call.input.bytes(ecx).get(..mock.calldata.len()) == Some(&mock.calldata[..]) - && mock.value.is_none_or(|value| Some(value) == call.transfer_value()) - }) - .map(|(_, v)| v), - } && let Some(return_data) = if return_data_queue.len() == 1 { - // If the mocked calls stack has a single element in it, don't empty it - return_data_queue.front().map(|x| x.to_owned()) - } else { - // Else, we pop the front element - return_data_queue.pop_front() - } { - return Some(CallOutcome { - result: InterpreterResult { - result: return_data.ret_type, - output: return_data.data, - gas, - }, - memory_offset: call.return_memory_offset.clone(), - }); + if let Some(return_data_queue) = match mocks.get_mut(&ctx) { + Some(queue) => Some(queue), + None => mocks + .iter_mut() + .find(|(mock, _)| { + call.input.bytes(ecx).get(..mock.calldata.len()) + == Some(&mock.calldata[..]) + && mock + .value + .is_none_or(|value| Some(value) == call.transfer_value()) + }) + .map(|(_, v)| v), + } && let Some(return_data) = if return_data_queue.len() == 1 { + // If the mocked calls stack has a single element in it, don't empty it + return_data_queue.front().map(|x| x.to_owned()) + } else { + // Else, we pop the front element + return_data_queue.pop_front() + } { + return Some(CallOutcome { + result: InterpreterResult { + result: return_data.ret_type, + output: return_data.data, + gas, + }, + memory_offset: call.return_memory_offset.clone(), + }); + } } } - // Apply our prank if let Some(prank) = &self.get_prank(curr_depth) { // Apply delegate call, `call.caller`` will not equal `prank.prank_caller` diff --git a/crates/cheatcodes/src/lib.rs b/crates/cheatcodes/src/lib.rs index 5fe73d1aaafae..cad78b79a6038 100644 --- a/crates/cheatcodes/src/lib.rs +++ b/crates/cheatcodes/src/lib.rs @@ -57,6 +57,7 @@ mod script; pub use script::{Broadcast, Wallets, WalletsInner}; mod strategy; +pub use evm::mock::{MockCallDataContext, MockCallReturnData}; pub use strategy::{ CheatcodeInspectorStrategy, CheatcodeInspectorStrategyContext, CheatcodeInspectorStrategyExt, CheatcodeInspectorStrategyRunner, CheatcodesStrategy, EvmCheatcodeInspectorStrategyRunner, diff --git a/crates/cheatcodes/src/strategy.rs b/crates/cheatcodes/src/strategy.rs index 2623c3fd04893..e286d66c0fc1b 100644 --- a/crates/cheatcodes/src/strategy.rs +++ b/crates/cheatcodes/src/strategy.rs @@ -238,6 +238,10 @@ impl Clone for CheatcodeInspectorStrategy { /// Defined in revive-strategy pub trait CheatcodeInspectorStrategyExt { + fn is_pvm_enabled(&self, _state: &mut crate::Cheatcodes) -> bool { + false + } + fn revive_try_create( &self, _state: &mut crate::Cheatcodes, diff --git a/crates/evm/evm/src/executors/fuzz/mod.rs b/crates/evm/evm/src/executors/fuzz/mod.rs index bf5004bd497bb..93fef3eeb0fc8 100644 --- a/crates/evm/evm/src/executors/fuzz/mod.rs +++ b/crates/evm/evm/src/executors/fuzz/mod.rs @@ -107,9 +107,11 @@ impl FuzzedExecutor { return Err(TestCaseError::fail(TEST_TIMEOUT)); } self.executor.strategy.runner.checkpoint(); - let fuzz_res = self.single_fuzz(address, calldata)?; + let fuzz_res = self.single_fuzz(address, calldata); self.executor.strategy.runner.reload_checkpoint(); + let fuzz_res = fuzz_res?; + // If running with progress then increment current run. if let Some(progress) = progress { progress.inc(1); diff --git a/crates/evm/evm/src/executors/invariant/mod.rs b/crates/evm/evm/src/executors/invariant/mod.rs index 54a73cf2733b8..3c284a4a0a70d 100644 --- a/crates/evm/evm/src/executors/invariant/mod.rs +++ b/crates/evm/evm/src/executors/invariant/mod.rs @@ -392,9 +392,10 @@ impl<'a> InvariantExecutor<'a> { .last() .ok_or_else(|| eyre!("no input generated to call fuzzed target."))?; + self.executor.strategy.runner.checkpoint(); // Execute call from the randomly generated sequence without committing state. // State is committed only if call is not a magic assume. - let mut call_result = current_run + let call_result = current_run .executor .call_raw( tx.sender, @@ -402,7 +403,10 @@ impl<'a> InvariantExecutor<'a> { tx.call_details.calldata.clone(), U256::ZERO, ) - .map_err(|e| eyre!(format!("Could not make raw evm call: {e}")))?; + .map_err(|e| eyre!(format!("Could not make raw evm call: {e}"))); + self.executor.strategy.runner.reload_checkpoint(); + + let mut call_result = call_result?; let discarded = call_result.result.as_ref() == MAGIC_ASSUME; if self.config.show_metrics { diff --git a/crates/evm/evm/src/inspectors/stack.rs b/crates/evm/evm/src/inspectors/stack.rs index 652d990b3314d..7daed9fbfae3a 100644 --- a/crates/evm/evm/src/inspectors/stack.rs +++ b/crates/evm/evm/src/inspectors/stack.rs @@ -914,8 +914,19 @@ impl Inspector> for InspectorStackRefMut<'_> ); if let Some(cheatcodes) = self.cheatcodes.as_deref_mut() { + let is_pvm_enabled = cheatcodes.is_pvm_enabled(); // Handle mocked functions, replace bytecode address with mock if matched. - if let Some(mocks) = cheatcodes.mocked_functions.get(&call.target_address) { + + // Do not handle mocked functions if PVM is enabled and let the revive call handle it. + // There is literally no problem with handling mocked functions with PVM enabled here as + // well, but the downside is that if we call a mocked functions from the test it + // will not exercise the paths in revive that handle mocked calls and only + // nested mocks will be handle by the revive specific calls. + // This is undesirable because conformity tests could accidentally pass and the revive + // code paths be broken. + if let Some(mocks) = cheatcodes.mocked_functions.get(&call.target_address) + && !is_pvm_enabled + { // Check if any mock function set for call data or if catch-all mock function set // for selector. if let Some(target) = mocks.get(&call.input.bytes(ecx)).or_else(|| { diff --git a/crates/forge/src/runner.rs b/crates/forge/src/runner.rs index aa5a2fecb037b..7f50d54c4e4ee 100644 --- a/crates/forge/src/runner.rs +++ b/crates/forge/src/runner.rs @@ -558,7 +558,11 @@ impl<'a> FunctionRunner<'a> { /// test ends, similar to `eth_call`. fn run_unit_test(mut self, func: &Function) -> TestResult { // Prepare unit test execution. + self.executor.strategy.runner.checkpoint(); + if self.prepare_test(func).is_err() { + self.executor.strategy.runner.reload_checkpoint(); + return self.result; } let mut binding = self.executor.clone(); @@ -576,10 +580,14 @@ impl<'a> FunctionRunner<'a> { Err(EvmError::Execution(err)) => (err.raw, Some(err.reason)), Err(EvmError::Skip(reason)) => { self.result.single_skip(reason); + self.executor.strategy.runner.reload_checkpoint(); + return self.result; } Err(err) => { self.result.single_fail(Some(err.to_string())); + self.executor.strategy.runner.reload_checkpoint(); + return self.result; } }; @@ -587,6 +595,8 @@ impl<'a> FunctionRunner<'a> { let success = self.executor.is_raw_call_mut_success(self.address, &mut raw_call_result, false); self.result.single_result(success, reason, raw_call_result); + self.executor.strategy.runner.reload_checkpoint(); + self.result } diff --git a/crates/forge/tests/cli/revive_vm.rs b/crates/forge/tests/cli/revive_vm.rs index ef6d68101a6d1..60f91b8c8cd87 100644 --- a/crates/forge/tests/cli/revive_vm.rs +++ b/crates/forge/tests/cli/revive_vm.rs @@ -472,53 +472,53 @@ Compiler run successful! Ran 2 tests for src/CounterTest.t.sol:CounterTest [PASS] test_Increment() ([GAS]) Traces: - [765075403] CounterTest::setUp() - ├─ [262294819] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 7404 bytes of code - ├─ [0] VM::expectEmit() + [..] CounterTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code + ├─ [..] VM::expectEmit() │ └─ ← [Return] ├─ emit SetNumber(result: 5) - ├─ [385250826] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setNumber(5) │ ├─ emit SetNumber(result: 5) │ └─ ← [Stop] - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 5 └─ ← [Stop] - [737726031] CounterTest::test_Increment() - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + [..] CounterTest::test_Increment() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 5 - ├─ [385250826] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(55) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setNumber(55) │ ├─ emit SetNumber(result: 55) │ └─ ← [Stop] - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 55 - ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::increment() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::increment() │ ├─ emit Increment(result: 56) │ └─ ← [Stop] - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 56 └─ ← [Stop] [PASS] test_expectRevert() ([GAS]) Traces: - [765075403] CounterTest::setUp() - ├─ [262294819] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 7404 bytes of code - ├─ [0] VM::expectEmit() + [..] CounterTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code + ├─ [..] VM::expectEmit() │ └─ ← [Return] ├─ emit SetNumber(result: 5) - ├─ [385250826] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setNumber(5) │ ├─ emit SetNumber(result: 5) │ └─ ← [Stop] - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 5 └─ ← [Stop] - [56930227] CounterTest::test_expectRevert() - ├─ [0] VM::expectRevert(custom error 0xf28dceb3: 0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000006456941a80000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000076661696c7572650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000) + [..] CounterTest::test_expectRevert() + ├─ [..] VM::expectRevert(custom error 0xf28dceb3: 0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000006456941a80000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000076661696c7572650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000) │ └─ ← [Return] - ├─ [56921388] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::failed_call() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::failed_call() [staticcall] │ └─ ← [Revert] Revert("failure") └─ ← [Stop] @@ -654,51 +654,51 @@ Compiler run successful! Ran 2 tests for src/Test.t.sol:RecordTest [PASS] testRecordAccess() ([GAS]) Traces: - [961089406] RecordTest::testRecordAccess() - ├─ [16788608] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 4095 bytes of code - ├─ [16788608] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 2182 bytes of code - ├─ [0] VM::record() + [..] RecordTest::testRecordAccess() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 + │ └─ ← [Return] [..] bytes of code + ├─ [..] VM::record() │ └─ ← [Return] - ├─ [927440089] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) - │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::record(0x90193C961A926261B756D1E5bb255e67ff9498A1) + │ ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::record() │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::accesses(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC) + ├─ [..] VM::accesses(0x34A1D3fff3958843C43aD80F30b94c510645C316) │ └─ ← [Return] [0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000001], [0x0000000000000000000000000000000000000000000000000000000000000001] - ├─ [0] VM::accesses(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) + ├─ [..] VM::accesses(0x90193C961A926261B756D1E5bb255e67ff9498A1) │ └─ ← [Return] [0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000002], [0x0000000000000000000000000000000000000000000000000000000000000002] └─ ← [Stop] [PASS] testStopRecordAccess() ([GAS]) Traces: - [961093272] RecordTest::testStopRecordAccess() - ├─ [16788608] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 4095 bytes of code - ├─ [16788608] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 2182 bytes of code - ├─ [0] VM::record() + [..] RecordTest::testStopRecordAccess() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 + │ └─ ← [Return] [..] bytes of code + ├─ [..] VM::record() │ └─ ← [Return] - ├─ [927440089] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) - │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::record(0x90193C961A926261B756D1E5bb255e67ff9498A1) + │ ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::record() │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::accesses(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC) + ├─ [..] VM::accesses(0x34A1D3fff3958843C43aD80F30b94c510645C316) │ └─ ← [Return] [0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000001], [0x0000000000000000000000000000000000000000000000000000000000000001] - ├─ [0] VM::stopRecord() + ├─ [..] VM::stopRecord() │ └─ ← [Return] - ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) - │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::record(0x90193C961A926261B756D1E5bb255e67ff9498A1) + │ ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::record() │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::accesses(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC) + ├─ [..] VM::accesses(0x34A1D3fff3958843C43aD80F30b94c510645C316) │ └─ ← [Return] [0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000001], [0x0000000000000000000000000000000000000000000000000000000000000001] - ├─ [0] VM::record() + ├─ [..] VM::record() │ └─ ← [Return] - ├─ [0] VM::stopRecord() + ├─ [..] VM::stopRecord() │ └─ ← [Return] - ├─ [0] VM::accesses(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC) + ├─ [..] VM::accesses(0x34A1D3fff3958843C43aD80F30b94c510645C316) │ └─ ← [Return] [], [] └─ ← [Stop] @@ -971,38 +971,38 @@ Compiler run successful! Ran 7 tests for src/Test.t.sol:RecordLogsTest [PASS] testEmitRecordEmit() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [357757177] RecordLogsTest::testEmitRecordEmit() - ├─ [183812741] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) + [..] RecordLogsTest::testEmitRecordEmit() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(1, 2, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ ├─ emit LogTopic12(topic1: 1, topic2: 2, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ └─ ← [Stop] - ├─ [0] VM::recordLogs() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [173888857] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(3, 0x2e38edeff9493e0004540e975027a429) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(3, 0x2e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic1(topic1: 3, data: 0x2e38edeff9493e0004540e975027a429) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0x7c7d81fafce31d4330303f05da0ccb9d970101c475382b40aa072986ee4caaad, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000102e38edeff9493e0004540e975027a42900000000000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0x7c7d81fafce31d4330303f05da0ccb9d970101c475382b40aa072986ee4caaad, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000102e38edeff9493e0004540e975027a42900000000000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] ├─ storage changes: │ @ 1: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350 → 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4232d03ee63e14e30 └─ ← [Stop] [PASS] testRecordOffGetsNothing() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [202674284] RecordLogsTest::testRecordOffGetsNothing() - ├─ [202625294] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) + [..] RecordLogsTest::testRecordOffGetsNothing() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(1, 2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic123(topic1: 1, topic2: 2, topic3: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() + ├─ [..] VM::getRecordedLogs() │ └─ ← [Return] [] ├─ storage changes: │ @ 1: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350 → 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4232d03ee63e14e30 @@ -1010,114 +1010,114 @@ Traces: [PASS] testRecordOnEmitDifferentDepths() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [999237291] RecordLogsTest::testRecordOnEmitDifferentDepths() - ├─ [0] VM::recordLogs() + [..] RecordLogsTest::testRecordOnEmitDifferentDepths() + ├─ [..] VM::recordLogs() │ └─ ← [Return] ├─ emit LogTopic(topic1: 1, data: 0x43a26051362b8040b289abe93334a5e3) - ├─ [180758801] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa) │ ├─ emit LogTopic12(topic1: 2, topic2: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa) │ └─ ← [Stop] - ├─ [818371229] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 10554 bytes of code - ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) - │ ├─ [0] 0x104fBc016F4bb334D775a19E8A6510109AC63E00::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 + │ └─ ← [Return] [..] bytes of code + ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) + │ ├─ [..] 0xd04404bcf6d969FC0Ec22021b4736510CAcec492::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ │ ├─ emit LogTopic123(topic1: 4, topic2: 5, topic3: 6, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0x61fb7db3625c10432927a76bb32400c33a94e9bb6374137c4cd59f6e465bfdcb, 0x0000000000000000000000000000000000000000000000000000000000000001], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001043a26051362b8040b289abe93334a5e300000000000000000000000000000000, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496), ([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC), ([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0x104fBc016F4bb334D775a19E8A6510109AC63E00)] - ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::getEmitterAddr() [staticcall] - │ └─ ← [Return] 0x104fBc016F4bb334D775a19E8A6510109AC63E00 + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0x61fb7db3625c10432927a76bb32400c33a94e9bb6374137c4cd59f6e465bfdcb, 0x0000000000000000000000000000000000000000000000000000000000000001], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001043a26051362b8040b289abe93334a5e300000000000000000000000000000000, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496), ([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316), ([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0xd04404bcf6d969FC0Ec22021b4736510CAcec492)] + ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::getEmitterAddr() [staticcall] + │ └─ ← [Return] 0xd04404bcf6d969FC0Ec22021b4736510CAcec492 └─ ← [Stop] [PASS] testRecordOnNoLogs() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [4118] RecordLogsTest::testRecordOnNoLogs() - ├─ [0] VM::recordLogs() + [..] RecordLogsTest::testRecordOnNoLogs() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [0] VM::getRecordedLogs() + ├─ [..] VM::getRecordedLogs() │ └─ ← [Return] [] └─ ← [Stop] [PASS] testRecordOnSingleLog() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [187093023] RecordLogsTest::testRecordOnSingleLog() - ├─ [0] VM::recordLogs() + [..] RecordLogsTest::testRecordOnSingleLog() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [187077066] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x4576656e74204461746120696e20537472696e67) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(1, 2, 3, 0x4576656e74204461746120696e20537472696e67) │ ├─ emit LogTopic123(topic1: 1, topic2: 2, topic3: 3, data: 0x4576656e74204461746120696e20537472696e67) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000144576656e74204461746120696e20537472696e67000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000144576656e74204461746120696e20537472696e67000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] └─ ← [Stop] [PASS] testRecordOnSingleLogTopic0() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [184656340] RecordLogsTest::testRecordOnSingleLogTopic0() - ├─ [0] VM::recordLogs() + [..] RecordLogsTest::testRecordOnSingleLogTopic0() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [184603101] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic0(data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a42900000000000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a42900000000000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] ├─ storage changes: │ @ 1: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350 → 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4232d03ee63e14e30 └─ ← [Stop] [PASS] testRecordsConsumednAsRead() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [903065419] RecordLogsTest::testRecordsConsumednAsRead() - ├─ [173888857] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 0x43a26051362b8040b289abe93334a5e3) + [..] RecordLogsTest::testRecordsConsumednAsRead() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(1, 0x43a26051362b8040b289abe93334a5e3) │ ├─ emit LogTopic1(topic1: 1, data: 0x43a26051362b8040b289abe93334a5e3) │ └─ ← [Stop] - ├─ [0] VM::recordLogs() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [0] VM::getRecordedLogs() + ├─ [..] VM::getRecordedLogs() │ └─ ← [Return] [] - ├─ [181776781] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ ├─ emit LogTopic12(topic1: 2, topic2: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] - ├─ [187077066] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa) + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa) │ ├─ emit LogTopic123(topic1: 4, topic2: 5, topic3: 6, data: 0x43a26051362b8040b289abe93334a5e3662751aa) │ └─ ← [Stop] - ├─ [172108813] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ ├─ emit LogTopic0(data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC), ([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] - ├─ [188095046] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(7, 8, 9, 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316), ([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(7, 8, 9, 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) │ ├─ emit LogTopic123(topic1: 7, topic2: 8, topic3: 9, data: 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000007, 0x0000000000000000000000000000000000000000000000000000000000000008, 0x0000000000000000000000000000000000000000000000000000000000000009], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000182e38edeff9493e0004540e975027a429ee666d1289f2c7a40000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000007, 0x0000000000000000000000000000000000000000000000000000000000000008, 0x0000000000000000000000000000000000000000000000000000000000000009], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000182e38edeff9493e0004540e975027a429ee666d1289f2c7a40000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] ├─ storage changes: │ @ 1: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350 → 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4232d03ee63e14e30 └─ ← [Stop] @@ -1267,58 +1267,58 @@ Compiler run successful! Ran 3 tests for src/Test.t.sol:StateDiffTest [PASS] testCallProxyaccesses() ([GAS]) Traces: - [585251161] StateDiffTest::setUp() - ├─ [292049387] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 5531 bytes of code - ├─ [293109162] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 6405 bytes of code + [..] StateDiffTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [728077974] StateDiffTest::testCallProxyaccesses() - ├─ [0] VM::startStateDiffRecording() + [..] StateDiffTest::testCallProxyaccesses() + ├─ [..] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [728040641] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::proxyCall(55) - │ ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) + ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::proxyCall(55) + │ ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setter(55) │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::stopAndReturnStateDiff() - │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 0, 0xac1b14ff0000000000000000000000000000000000000000000000000000000000000037, false, [(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f, 0x0000000000000000000000000000000000000000000000000000000000000000, false, 0x0000000000000000000000007d8cb8f412b3ee9ac79558791333f41d2b1ccdac, 0x0000000000000000000000007d8cb8f412b3ee9ac79558791333f41d2b1ccdac, false)], 1), ((0, 31337 [3.133e4]), 0, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 2)] + ├─ [..] VM::stopAndReturnStateDiff() + │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x90193C961A926261B756D1E5bb255e67ff9498A1, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 0, 0xac1b14ff0000000000000000000000000000000000000000000000000000000000000037, false, [(0x90193C961A926261B756D1E5bb255e67ff9498A1, 0x0000000000000000000000000000000000000000000000000000000000000000, false, 0x00000000000000000000000034a1d3fff3958843c43ad80f30b94c510645c316, 0x00000000000000000000000034a1d3fff3958843c43ad80f30b94c510645c316, false)], 1), ((0, 31337 [3.133e4]), 0, 0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x90193C961A926261B756D1E5bb255e67ff9498A1, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 2)] └─ ← [Stop] [PASS] testCallaccesses() ([GAS]) Traces: - [585251161] StateDiffTest::setUp() - ├─ [292049387] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 5531 bytes of code - ├─ [293109162] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 6405 bytes of code + [..] StateDiffTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [276825754] StateDiffTest::testCallaccesses() - ├─ [0] VM::startStateDiffRecording() + [..] StateDiffTest::testCallaccesses() + ├─ [..] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [276796934] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setter(55) │ └─ ← [Stop] - ├─ [0] VM::stopAndReturnStateDiff() - │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 1)] + ├─ [..] VM::stopAndReturnStateDiff() + │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 1)] └─ ← [Stop] [PASS] testCreateaccesses() ([GAS]) Traces: - [585251161] StateDiffTest::setUp() - ├─ [292049387] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 5531 bytes of code - ├─ [293109162] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 6405 bytes of code + [..] StateDiffTest::setUp() + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [292103665] StateDiffTest::testCreateaccesses() - ├─ [0] VM::startStateDiffRecording() + [..] StateDiffTest::testCreateaccesses() + ├─ [..] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [292049387] → new @0x2e234DAe75C793f67A35089C9d99245E1C58470b - │ └─ ← [Return] 5531 bytes of code - ├─ [0] VM::stopAndReturnStateDiff() - │ └─ ← [Return] [((0, 31337 [3.133e4]), 4, 0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 1000000000000000000 [1e18], 0x0000000000000000000000000000000000000000000000000000000000000064, false, [(0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000000, false), (0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000064, false)], 1)] + ├─ [..] → new @0xA8452Ec99ce0C64f20701dB7dD3abDb607c00496 + │ └─ ← [Return] [..] bytes of code + ├─ [..] VM::stopAndReturnStateDiff() + │ └─ ← [Return] [((0, 31337 [3.133e4]), 4, 0xA8452Ec99ce0C64f20701dB7dD3abDb607c00496, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 1000000000000000000 [1e18], 0x0000000000000000000000000000000000000000000000000000000000000064, false, [(0xA8452Ec99ce0C64f20701dB7dD3abDb607c00496, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000000, false), (0xA8452Ec99ce0C64f20701dB7dD3abDb607c00496, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000064, false)], 1)] └─ ← [Stop] Suite result: ok. 3 passed; 0 failed; 0 skipped; [ELAPSED] diff --git a/crates/forge/tests/it/revive/cheat_etch.rs b/crates/forge/tests/it/revive/cheat_etch.rs new file mode 100644 index 0000000000000..87b043e58d63b --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_etch.rs @@ -0,0 +1,16 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm_mode_with_any_etched_evm_code(ReviveRuntimeMode::Pvm)] +#[case::evm_mode_with_any_etched_evm_code(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_etch(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new(".*", "EtchTest", ".*/revive/EtchTest.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_gas_metering.rs b/crates/forge/tests/it/revive/cheat_gas_metering.rs new file mode 100644 index 0000000000000..6eb4f9c12287e --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_gas_metering.rs @@ -0,0 +1,49 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_pause_gas_metering_with_pvm_call(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testPauseGasMeteringWithPvmCall", "GasMetering", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_resume_gas_metering_with_pvm_call(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testResumeGasMeteringWithPvmCall", "GasMetering", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_reset_gas_metering_with_pvm_call(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testResetGasMeteringWithPvmCall", "GasMetering", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_create_during_paused_metering(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testCreateDuringPausedMetering", "GasMetering", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_mock_call.rs b/crates/forge/tests/it/revive/cheat_mock_call.rs new file mode 100644 index 0000000000000..d291db31cc65a --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_mock_call.rs @@ -0,0 +1,16 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mock_call(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new(".*", "MockCall", ".*/revive/MockCall.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_mock_calls.rs b/crates/forge/tests/it/revive/cheat_mock_calls.rs new file mode 100644 index 0000000000000..a4fa94f31ce41 --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_mock_calls.rs @@ -0,0 +1,16 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mock_calls(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new(".*", ".*", ".*/revive/MockCalls.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_mock_functions.rs b/crates/forge/tests/it/revive/cheat_mock_functions.rs new file mode 100644 index 0000000000000..f31e5ed17e91c --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_mock_functions.rs @@ -0,0 +1,43 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mockx_function(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("test_mockx_function", "MockFunction", ".*/revive/MockFunction.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mock_function_concrete_args(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new( + "test_mock_function_concrete_args", + "MockFunction", + ".*/revive/MockFunction.t.sol", + ); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mock_function_all_args(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = + Filter::new("test_mock_function_all_args", "MockFunction", ".*/revive/MockFunction.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_prank.rs b/crates/forge/tests/it/revive/cheat_prank.rs new file mode 100644 index 0000000000000..31c850d07b0ef --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_prank.rs @@ -0,0 +1,16 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_revive_prank(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new(".*", ".*", ".*/revive/Prank.t.sol.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/migration.rs b/crates/forge/tests/it/revive/migration.rs index b9a4620058a36..4907e39e9088b 100644 --- a/crates/forge/tests/it/revive/migration.rs +++ b/crates/forge/tests/it/revive/migration.rs @@ -39,7 +39,7 @@ async fn test_revive_bytecode_migration(#[case] runtime_mode: ReviveRuntimeMode) #[rstest] #[case::pvm(ReviveRuntimeMode::Pvm)] -// TODO: Add Evm test when pallet-revive will allow for Evm bytecode upload +#[case::evm(ReviveRuntimeMode::Evm)] #[tokio::test(flavor = "multi_thread")] async fn test_revive_bytecode_migration_to_revive(#[case] runtime_mode: ReviveRuntimeMode) { let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); @@ -67,3 +67,23 @@ async fn test_revive_timestamp_migration(#[case] runtime_mode: ReviveRuntimeMode let filter = Filter::new("testTimestampMigration", "EvmReviveMigrationTest", ".*/revive/.*"); TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; } + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_revive_immutables_migration(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testImmutablesMigration", "EvmReviveMigrationTest", ".*/revive/.*"); + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_revive_callback_from_revive(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testCallbackFromRevive", "EvmReviveMigrationTest", ".*/revive/.*"); + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/mod.rs b/crates/forge/tests/it/revive/mod.rs index dd8e7248c9892..2d929b8d534d8 100644 --- a/crates/forge/tests/it/revive/mod.rs +++ b/crates/forge/tests/it/revive/mod.rs @@ -1,4 +1,11 @@ //! Revive strategy tests +pub mod cheat_etch; +pub mod cheat_gas_metering; +pub mod cheat_mock_call; +pub mod cheat_mock_calls; +pub mod cheat_mock_functions; +pub mod cheat_prank; pub mod cheat_store; pub mod migration; +pub mod tx_gas_price; diff --git a/crates/forge/tests/it/revive/tx_gas_price.rs b/crates/forge/tests/it/revive/tx_gas_price.rs new file mode 100644 index 0000000000000..1485b4c42c5b9 --- /dev/null +++ b/crates/forge/tests/it/revive/tx_gas_price.rs @@ -0,0 +1,60 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_works(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPriceWorks", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_works_with_zero(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPriceWorksWithZero", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_works_with_large_value(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPriceWorksWithLargeValue", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_works_in_both_modes(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPriceWorksInBothModes", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_preserved_in_pvm_contract(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPricePreservedInPvmContract", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/revive-env/Cargo.toml b/crates/revive-env/Cargo.toml index f02c3fb552718..379242b4204be 100644 --- a/crates/revive-env/Cargo.toml +++ b/crates/revive-env/Cargo.toml @@ -16,14 +16,7 @@ std = ["polkadot-sdk/std"] codec = { version = "3.7.5", default-features = false, package = "parity-scale-codec" } scale-info = { version = "2.11.6", default-features = false } -polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch = "master", features = [ - "experimental", - "runtime", - "polkadot-runtime-common", - "pallet-revive", - "pallet-balances", - "pallet-timestamp" -]} +polkadot-sdk.workspace = true [lints] workspace = true diff --git a/crates/revive-env/src/runtime.rs b/crates/revive-env/src/runtime.rs index a3e09e7cb0992..417000c927f80 100644 --- a/crates/revive-env/src/runtime.rs +++ b/crates/revive-env/src/runtime.rs @@ -87,6 +87,7 @@ impl pallet_revive::Config for Runtime { type ChainId = ConstU64<420_420_420>; type NativeToEthRatio = ConstU32<1_000_000_000>; type FindAuthor = Self; + type DebugEnabled = ConstBool; } impl FindAuthor<::AccountId> for Runtime { diff --git a/crates/revive-strategy/Cargo.toml b/crates/revive-strategy/Cargo.toml index a6141c95fe9e3..1ac668581d30a 100644 --- a/crates/revive-strategy/Cargo.toml +++ b/crates/revive-strategy/Cargo.toml @@ -36,15 +36,7 @@ semver.workspace = true codec = { version = "3.7.5", default-features = false, package = "parity-scale-codec" } scale-info = { version = "2.11.6", default-features = false } -polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch = "master", features = [ - "experimental", - "runtime", - "polkadot-runtime-common", - "pallet-revive", - "pallet-balances", - "pallet-timestamp" -]} - +polkadot-sdk.workspace = true [lints] workspace = true diff --git a/crates/revive-strategy/src/cheatcodes/mock_handler.rs b/crates/revive-strategy/src/cheatcodes/mock_handler.rs new file mode 100644 index 0000000000000..a81a8d0cf2d06 --- /dev/null +++ b/crates/revive-strategy/src/cheatcodes/mock_handler.rs @@ -0,0 +1,211 @@ +use std::{ + cell::RefCell, + collections::{BTreeMap, VecDeque}, + rc::Rc, +}; + +use alloy_primitives::{Address, Bytes, map::foldhash::HashMap, ruint::aliases::U256}; +use foundry_cheatcodes::{Ecx, MockCallDataContext, MockCallReturnData}; +use polkadot_sdk::{ + frame_system, + pallet_revive::{ + self, AddressMapper, DelegateInfo, ExecOrigin, ExecReturnValue, Pallet, mock::MockHandler, + }, + pallet_revive_uapi::ReturnFlags, + polkadot_sdk_frame::prelude::OriginFor, + sp_core::H160, +}; +use revive_env::{AccountId, Runtime}; + +use revm::{context::JournalTr, interpreter::InstructionResult}; + +// Implementation object that holds the mock state and implements the MockHandler trait for Revive. +// It is only purpose is to make transferring the mock state into the Revive EVM easier and then +// synchronize whatever mocks got consumed back into the Cheatcodes state after the call. +#[derive(Clone)] +pub(crate) struct MockHandlerImpl { + inner: Rc>>, + pub _prank_enabled: bool, +} + +impl MockHandlerImpl { + /// Creates a new MockHandlerImpl from the given Ecx and Cheatcodes state. + pub(crate) fn new( + ecx: &Ecx<'_, '_, '_>, + caller: &Address, + target_address: Option<&Address>, + callee: Option<&Address>, + state: &mut foundry_cheatcodes::Cheatcodes, + ) -> Self { + let (inject_env, prank_enabled) = + MockHandlerInner::new(ecx, caller, target_address, callee, state); + Self { inner: Rc::new(RefCell::new(inject_env)), _prank_enabled: prank_enabled } + } + + /// Updates the given Cheatcodes state with the current mock state. + /// This is used to synchronize the mock state after a call has been executed in Revive + pub(crate) fn update_state_mocks(&self, state: &mut foundry_cheatcodes::Cheatcodes) { + let mock_inner = self.inner.borrow(); + state.mocked_calls = mock_inner.mocked_calls.clone(); + state.mocked_functions = mock_inner.mocked_functions.clone(); + } + + pub(crate) fn fund_pranked_accounts(&self, account: Address) { + // Fuzzed prank addresses have no balance, so they won't exist in revive, and + // calls will fail, this is not a problem when running in REVM. + // TODO: Figure it out why this is still needed. + let balance = Pallet::::evm_balance(&H160::from_slice(account.as_slice())); + if balance == 0.into() { + Pallet::::set_evm_balance( + &H160::from_slice(account.as_slice()), + u128::MAX.into(), + ) + .expect("Could not fund pranked account"); + } + } +} + +impl MockHandler for MockHandlerImpl { + fn mock_call( + &self, + callee: H160, + call_data: &[u8], + value_transferred: polkadot_sdk::pallet_revive::U256, + ) -> Option { + let mut mock_inner = self.inner.borrow_mut(); + let ctx = MockCallDataContext { + calldata: call_data.to_vec().into(), + value: Some(U256::from_limbs(value_transferred.0)), + }; + + // Use the same logic as in inspect.rs to find the correct mocked call and consume some of + // them. https://github.com/paritytech/foundry-polkadot/blob/26eda0de53ac03f7ac9b6a6023d8243101cffaf1/crates/cheatcodes/src/inspector.rs#L1013 + if let Some(mock_data) = + mock_inner.mocked_calls.get_mut(&Address::from_slice(callee.as_bytes())) + { + if let Some(return_data_queue) = match mock_data.get_mut(&ctx) { + Some(found) => Some(found), + None => mock_data + .iter_mut() + .find(|(key, _)| { + ctx.calldata.starts_with(&key.calldata) + && (key.value.is_none() + || ctx.value == key.value + || (ctx.value == Some(U256::ZERO) && key.value.is_none())) + }) + .map(|(_, v)| v), + } && let Some(return_data) = if return_data_queue.len() == 1 { + // If the mocked calls stack has a single element in it, don't empty it + return_data_queue.front().map(|x| x.to_owned()) + } else { + // Else, we pop the front element + return_data_queue.pop_front() + } { + return Some(ExecReturnValue { + flags: if matches!(return_data.ret_type, InstructionResult::Revert) { + ReturnFlags::REVERT + } else { + ReturnFlags::default() + }, + data: return_data.data.0.to_vec(), + }); + } + }; + None + } + + fn mock_caller(&self, frames_len: usize) -> Option> { + let mock_inner = self.inner.borrow(); + if frames_len == 0 && mock_inner.delegated_caller.is_none() { + return Some(mock_inner.caller.clone()); + } + None + } + + fn mock_delegated_caller( + &self, + dest: H160, + input_data: &[u8], + ) -> Option> { + let mock_inner = self.inner.borrow(); + + // Mocked functions are implemented by making use of the hooks for delegated calls. + if let Some(mocked_function) = + mock_inner.mocked_functions.get(&Address::from_slice(dest.as_bytes())) + { + let input_data = Bytes::from(input_data.to_vec()); + if let Some(target) = mocked_function + .get(&input_data) + .or_else(|| input_data.get(..4).and_then(|selector| mocked_function.get(selector))) + { + return Some(DelegateInfo { + caller: + ExecOrigin::::from_runtime_origin(OriginFor::::signed( + ::AddressMapper::to_account_id(&dest), + )).ok()?, + callee: H160::from_slice(target.as_slice()) + } + ); + } + } + + mock_inner.delegated_caller.as_ref().and_then(|delegate_caller| { + Some(DelegateInfo { + caller: ExecOrigin::::from_runtime_origin(delegate_caller.clone()).ok()?, + callee: mock_inner.callee, + }) + }) + } +} + +// Internal struct that holds the mock state. It is wrapped in an Arc> in MockHandlerImpl +// to make it easier to transfer the state into Revive and back and be able to mutate it from the +// MockHandler trait methods. +#[derive(Clone)] +struct MockHandlerInner { + pub caller: OriginFor, + pub delegated_caller: Option>, + pub callee: H160, + + pub mocked_calls: HashMap>>, + pub mocked_functions: HashMap>, +} + +impl MockHandlerInner { + /// Creates a new MockHandlerInner from the given Ecx and Cheatcodes state. + /// Also returns whether a prank is currently enabled. + fn new( + ecx: &Ecx<'_, '_, '_>, + caller: &Address, + target_address: Option<&Address>, + callee: Option<&Address>, + state: &mut foundry_cheatcodes::Cheatcodes, + ) -> (Self, bool) { + let curr_depth = ecx.journaled_state.depth(); + let mut prank_enabled = false; + let pranked_caller = OriginFor::::signed(AccountId::to_fallback_account_id( + &H160::from_slice(caller.as_slice()), + )); + + let delegated_caller = target_address.map(|addr| { + OriginFor::::signed(AccountId::to_fallback_account_id(&H160::from_slice( + addr.as_slice(), + ))) + }); + + let state_inject = Self { + caller: pranked_caller, + delegated_caller, + mocked_calls: state.mocked_calls.clone(), + callee: callee.map(|addr| H160::from_slice(addr.as_slice())).unwrap_or_default(), + mocked_functions: state.mocked_functions.clone(), + }; + if let Some(prank) = &state.get_prank(curr_depth) { + if curr_depth >= prank.depth { + prank_enabled = true; + } + } + (state_inject, prank_enabled) + } +} diff --git a/crates/revive-strategy/src/cheatcodes/mod.rs b/crates/revive-strategy/src/cheatcodes/mod.rs index b31660617ae7f..1d70b7399a4a3 100644 --- a/crates/revive-strategy/src/cheatcodes/mod.rs +++ b/crates/revive-strategy/src/cheatcodes/mod.rs @@ -1,3 +1,5 @@ +mod mock_handler; + use alloy_primitives::{Address, B256, Bytes, Log, hex, ruint::aliases::U256}; use alloy_rpc_types::BlobTransactionSidecar; use alloy_sol_types::SolValue; @@ -6,11 +8,12 @@ use foundry_cheatcodes::{ CheatcodeInspectorStrategyContext, CheatcodeInspectorStrategyRunner, CheatsConfig, CheatsCtxt, CommonCreateInput, DealRecord, Ecx, Error, EvmCheatcodeInspectorStrategyRunner, Result, Vm::{ - dealCall, getNonce_0Call, loadCall, pvmCall, resetNonceCall, rollCall, setNonceCall, - setNonceUnsafeCall, storeCall, warpCall, + dealCall, etchCall, getNonce_0Call, loadCall, pvmCall, resetNonceCall, rollCall, + setNonceCall, setNonceUnsafeCall, storeCall, warpCall, }, journaled_account, precompile_error, }; + use foundry_compilers::resolc::dual_compiled_contracts::DualCompiledContracts; use revive_env::{AccountId, Runtime, System, Timestamp}; use std::{ @@ -20,28 +23,30 @@ use std::{ }; use tracing::warn; +use alloy_eips::eip7702::SignedAuthorization; use polkadot_sdk::{ pallet_revive::{ - self, AccountInfo, AddressMapper, BalanceOf, Code, ContractInfo, ExecConfig, Pallet, - evm::CallTrace, + self, AccountInfo, AddressMapper, BalanceOf, BytecodeType, Code, ContractInfo, + DebugSettings, ExecConfig, Executable, Pallet, evm::CallTrace, }, polkadot_sdk_frame::prelude::OriginFor, - sp_core::{self, H160}, + sp_core::{self, H160, H256}, + sp_io, sp_weights::Weight, }; use crate::{ + cheatcodes::mock_handler::MockHandlerImpl, execute_with_externalities, tracing::{Tracer, storage_tracer::AccountAccess}, }; use foundry_cheatcodes::Vm::{AccountAccess as FAccountAccess, ChainInfo}; -use alloy_eips::eip7702::SignedAuthorization; use revm::{ bytecode::opcode as op, context::{CreateScheme, JournalTr}, interpreter::{ - CallInputs, CallOutcome, CreateOutcome, Gas, InstructionResult, Interpreter, + CallInputs, CallOutcome, CallScheme, CreateOutcome, Gas, InstructionResult, Interpreter, InterpreterResult, interpreter_types::Jumps, }, state::Bytecode, @@ -196,6 +201,49 @@ fn set_block_number(new_height: U256, ecx: Ecx<'_, '_, '_>) { }); } +// Implements the `etch` cheatcode for PVM. +fn etch_call(target: &Address, new_runtime_code: &Bytes, ecx: Ecx<'_, '_, '_>) -> Result { + let origin_address = H160::from_slice(ecx.tx.caller.as_slice()); + let origin_account = AccountId::to_fallback_account_id(&origin_address); + + execute_with_externalities(|externalities| { + externalities.execute_with(|| { + let code = new_runtime_code.to_vec(); + let code_type = + if code.starts_with(b"PVM\0") { BytecodeType::Pvm } else { BytecodeType::Evm }; + let contract_blob = Pallet::::try_upload_code( + origin_account.clone(), + code, + code_type, + BalanceOf::::MAX, + &ExecConfig::new_substrate_tx(), + ) + .map_err(|_| <&str as Into>::into("Could not upload PVM code"))? + .0; + + let mut contract_info = if let Some(contract_info) = + AccountInfo::::load_contract(&H160::from_slice(target.as_slice())) + { + contract_info + } else { + ContractInfo::::new( + &origin_address, + System::account_nonce(origin_account), + *contract_blob.code_hash(), + ) + .map_err(|_| <&str as Into>::into("Could not create contract info"))? + }; + contract_info.code_hash = *contract_blob.code_hash(); + AccountInfo::::insert_contract( + &H160::from_slice(target.as_slice()), + contract_info, + ); + Ok::<(), Error>(()) + }) + })?; + Ok(Default::default()) +} + fn set_timestamp(new_timestamp: U256, ecx: Ecx<'_, '_, '_>) { // Set timestamp in EVM context (seconds). ecx.block.timestamp = new_timestamp; @@ -389,6 +437,12 @@ impl CheatcodeInspectorStrategyRunner for PvmCheatcodeInspectorStrategyRunner { Ok(Default::default()) } + t if using_pvm && is::(t) => { + let etchCall { target, newRuntimeBytecode } = + cheatcode.as_any().downcast_ref().unwrap(); + etch_call(target, newRuntimeBytecode, ccx.ecx)?; + Ok(Default::default()) + } t if using_pvm && is::(t) => { tracing::info!(cheatcode = ?cheatcode.as_debug() , using_pvm = ?using_pvm); let &loadCall { target, slot } = cheatcode.as_any().downcast_ref().unwrap(); @@ -559,9 +613,7 @@ fn select_revive(ctx: &mut PvmCheatcodeInspectorStrategyContext, data: Ecx<'_, ' System::set_block_number(block_number.saturating_to()); Timestamp::set_timestamp(timestamp.saturating_to::() * 1000); - let test_contract = data.journaled_state.database.get_test_contract_address(); let persistent_accounts = data.journaled_state.database.persistent_accounts().clone(); - for address in persistent_accounts.into_iter().chain([data.tx.caller]) { let acc = data.journaled_state.load_account(address).expect("failed to load account"); let amount = acc.data.info.balance; @@ -577,57 +629,75 @@ fn select_revive(ctx: &mut PvmCheatcodeInspectorStrategyContext, data: Ecx<'_, ' a.nonce = nonce.min(u32::MAX.into()).try_into().expect("shouldn't happen"); }); - // TODO handle immutables - // Migrate bytecode for deployed contracts (skip test contract) - if test_contract != Some(address) - && let Some(bytecode) = acc.data.info.code.as_ref() { - + if let Some(bytecode) = acc.data.info.code.as_ref() { let account_h160 = H160::from_slice(address.as_slice()); // Skip if contract already exists in pallet-revive if AccountInfo::::load_contract(&account_h160).is_none() { - // Determine which bytecode to upload based on runtime mode - let bytecode_to_upload = ctx.dual_compiled_contracts - .find_by_evm_deployed_bytecode_with_immutables(bytecode.original_byte_slice()) - .and_then(|(_, contract)| { - match ctx.runtime_mode { - crate::ReviveRuntimeMode::Pvm => contract.resolc_bytecode.as_bytes().map(|b| b.to_vec()), - crate::ReviveRuntimeMode::Evm => None, - // TODO: We do not have method to upload the EVM bytecode to pallet-revive - //contract.evm_bytecode.as_bytes().map(|b| b.to_vec()) + // Find the matching dual-compiled contract by EVM bytecode + if let Some((_, contract)) = ctx.dual_compiled_contracts + .find_by_evm_deployed_bytecode_with_immutables(bytecode.original_byte_slice()) + { + let (code_bytes, immutable_data, code_type) = match ctx.runtime_mode { + crate::ReviveRuntimeMode::Pvm => { + let immutable_data = contract.evm_immutable_references + .as_ref() + .map(|immutable_refs| { + let evm_bytecode = bytecode.original_byte_slice(); + + // Collect all immutable bytes from their scattered offsets + immutable_refs + .values() + .flatten() + .flat_map(|offset| { + let start = offset.start as usize; + let end = start + offset.length as usize; + evm_bytecode.get(start..end).unwrap_or_else(|| panic!("Immutable offset out of bounds: address={:?}, offset={}..{}, bytecode_len={}", + address, start, end, evm_bytecode.len())) + }) + .copied() + .collect::>() + }); + (contract.resolc_deployed_bytecode.as_bytes().map(|b| b.to_vec()),immutable_data, BytecodeType::Pvm) + }, + crate::ReviveRuntimeMode::Evm => (contract.evm_deployed_bytecode.as_bytes().map(|b| b.to_vec()), None, BytecodeType::Evm), + }; + + if let Some(code_bytes) = code_bytes { + let upload_result = Pallet::::try_upload_code( + Pallet::::account_id(), + code_bytes.clone(), + code_type, + u64::MAX.into(), + &ExecConfig::new_substrate_tx(), + ); + match upload_result { + Ok(_) => { + let code_hash = H256(sp_io::hashing::keccak_256(&code_bytes)); + let contract_info = ContractInfo::::new(&account_h160, nonce as u32, code_hash) + .expect("Failed to create contract info"); + AccountInfo::::insert_contract(&account_h160, contract_info); + if let Some(data) = immutable_data.and_then(|immutables| immutables.try_into().ok()) + { + Pallet::::set_immutables(account_h160, data).expect("Failed to migrate immutables"); } - }); - - if let Some(code_bytes) = bytecode_to_upload { - let origin = OriginFor::::signed(Pallet::::account_id()); - let upload_result = Pallet::::bare_upload_code( - origin, - code_bytes.clone(), - BalanceOf::::MAX, - ); - - match upload_result { - Ok(result) => { - let code_hash = result.code_hash; - let contract_info = ContractInfo::::new(&account_h160, nonce as u32, code_hash) - .expect("Failed to create contract info"); - AccountInfo::::insert_contract(&account_h160, contract_info); - } - Err(err) => { - tracing::warn!( - address = ?address, - runtime_mode = ?ctx.runtime_mode, - bytecode_len = code_bytes.len(), - error = ?err, - "Failed to upload bytecode to pallet-revive, skipping migration" - ); + } + Err(err) => { + tracing::warn!( + address = ?address, + runtime_mode = ?ctx.runtime_mode, + bytecode_len = code_bytes.len(), + error = ?err, + "Failed to upload bytecode to pallet-revive, skipping migration" + ); + } } + } else { + tracing::info!( + address = ?address, + "no PVM equivalent found for EVM bytecode, skipping migration" + ); } - } else { - tracing::info!( - address = ?address, - "no PVM equivalent found for EVM bytecode, skipping migration" - ); } } } @@ -709,6 +779,12 @@ fn select_evm(ctx: &mut PvmCheatcodeInspectorStrategyContext, data: Ecx<'_, '_, } impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspectorStrategyRunner { + fn is_pvm_enabled(&self, state: &mut foundry_cheatcodes::Cheatcodes) -> bool { + let ctx = get_context_ref_mut(state.strategy.context.as_mut()); + + ctx.using_pvm + } + /// Try handling the `CREATE` within PVM. /// /// If `Some` is returned then the result must be returned immediately, else the call must be @@ -720,7 +796,10 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector input: &dyn CommonCreateInput, executor: &mut dyn foundry_cheatcodes::CheatcodesExecutor, ) -> Option { - let ctx = get_context_ref_mut(state.strategy.context.as_mut()); + let mock_handler = MockHandlerImpl::new(&ecx, &input.caller(), None, None, state); + + let ctx: &mut PvmCheatcodeInspectorStrategyContext = + get_context_ref_mut(state.strategy.context.as_mut()); if !ctx.using_pvm { return None; @@ -772,15 +851,32 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector } }; + let gas_price_pvm = + sp_core::U256::from_little_endian(&U256::from(ecx.tx.gas_price).as_le_bytes()); let mut tracer = Tracer::new(true); let res = execute_with_externalities(|externalities| { externalities.execute_with(|| { tracer.trace(|| { let origin = OriginFor::::signed(AccountId::to_fallback_account_id( - &H160::from_slice(input.caller().as_slice()), + &H160::from_slice(ecx.tx.caller.as_slice()), )); let evm_value = sp_core::U256::from_little_endian(&input.value().as_le_bytes()); + mock_handler.fund_pranked_accounts(ecx.tx.caller); + + // Pre-Dispatch Increments the nonce of the origin, so let's make sure we do + // that here too to replicate the same address generation. + System::inc_account_nonce(AccountId::to_fallback_account_id( + &H160::from_slice(ecx.tx.caller.as_slice()), + )); + + let exec_config = ExecConfig { + bump_nonce: true, + collect_deposit_from_hold: None, + effective_gas_price: Some(gas_price_pvm), + mock_handler: Some(Box::new(mock_handler.clone())), + is_dry_run: None, + }; let code = Code::Upload(code_bytes.clone()); let data = constructor_args; let salt = match input.scheme() { @@ -795,6 +891,13 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector _ => None, }; + // If limits are set to max, enable debug mode to bypass them in revive + if ecx.cfg.limit_contract_code_size == Some(usize::MAX) + || ecx.cfg.limit_contract_initcode_size == Some(usize::MAX) + { + let debug_settings = DebugSettings::new(true); + debug_settings.write_to_storage::(); + } Pallet::::bare_instantiate( origin, evm_value, @@ -804,20 +907,25 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector code, data, salt, - ExecConfig::new_substrate_tx(), + exec_config, ) }) }) }); - let mut gas = Gas::new(input.gas_limit()); if res.result.as_ref().is_ok_and(|r| !r.result.did_revert()) { self.append_recorded_accesses(state, ecx, tracer.get_recorded_accesses()); } post_exec(state, ecx, executor, &mut tracer, false); + mock_handler.update_state_mocks(state); + match &res.result { Ok(result) => { - let _ = gas.record_cost(res.gas_required.ref_time()); + // Only record gas cost if gas metering is not paused. + // When paused, the gas counter should remain frozen. + if !state.gas_metering.paused { + let _ = gas.record_cost(res.gas_required.ref_time()); + } let outcome = if result.result.did_revert() { CreateOutcome { @@ -869,6 +977,10 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector executor: &mut dyn foundry_cheatcodes::CheatcodesExecutor, ) -> Option { let ctx = get_context_ref_mut(state.strategy.context.as_mut()); + let target_address = match call.scheme { + CallScheme::DelegateCall => Some(call.target_address), + _ => None, + }; if !ctx.using_pvm { return None; @@ -878,7 +990,7 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector .journaled_state .database .get_test_contract_address() - .map(|addr| call.bytecode_address == addr) + .map(|addr| call.bytecode_address == addr || call.target_address == addr) .unwrap_or_default() { tracing::info!( @@ -889,18 +1001,44 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector } tracing::info!("running call on pallet-revive with {} {:#?}", ctx.runtime_mode, call); + + let gas_price_pvm = + sp_core::U256::from_little_endian(&U256::from(ecx.tx.gas_price).as_le_bytes()); + let mock_handler = MockHandlerImpl::new( + &ecx, + &call.caller, + target_address.as_ref(), + Some(&call.bytecode_address), + state, + ); + let mut tracer = Tracer::new(true); let res = execute_with_externalities(|externalities| { externalities.execute_with(|| { tracer.trace(|| { let origin = OriginFor::::signed(AccountId::to_fallback_account_id( - &H160::from_slice(call.caller.as_slice()), + &H160::from_slice(ecx.tx.caller.as_slice()), )); + mock_handler.fund_pranked_accounts(ecx.tx.caller); + let evm_value = sp_core::U256::from_little_endian(&call.call_value().as_le_bytes()); - let target = H160::from_slice(call.target_address.as_slice()); + let exec_config = ExecConfig { + bump_nonce: true, + collect_deposit_from_hold: None, + effective_gas_price: Some(gas_price_pvm), + mock_handler: Some(Box::new(mock_handler.clone())), + is_dry_run: None, + }; + // If limits are set to max, enable debug mode to bypass them in revive + if ecx.cfg.limit_contract_code_size == Some(usize::MAX) + || ecx.cfg.limit_contract_initcode_size == Some(usize::MAX) + { + let debug_settings = DebugSettings::new(true); + debug_settings.write_to_storage::(); + } Pallet::::bare_call( origin, @@ -910,12 +1048,12 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector // TODO: fixing. BalanceOf::::MAX, call.input.bytes(ecx).to_vec(), - ExecConfig::new_substrate_tx(), + exec_config, ) }) }) }); - + mock_handler.update_state_mocks(state); let mut gas = Gas::new(call.gas_limit); if res.result.as_ref().is_ok_and(|r| !r.did_revert()) { self.append_recorded_accesses(state, ecx, tracer.get_recorded_accesses()); @@ -923,7 +1061,11 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector post_exec(state, ecx, executor, &mut tracer, call.is_static); match res.result { Ok(result) => { - let _ = gas.record_cost(res.gas_required.ref_time()); + // Only record gas cost if gas metering is not paused. + // When paused, the gas counter should remain frozen. + if !state.gas_metering.paused { + let _ = gas.record_cost(res.gas_required.ref_time()); + } let outcome = if result.did_revert() { tracing::info!("Contract call reverted"); diff --git a/crates/revive-strategy/src/lib.rs b/crates/revive-strategy/src/lib.rs index 711040cd4a417..5ce172f2bfce5 100644 --- a/crates/revive-strategy/src/lib.rs +++ b/crates/revive-strategy/src/lib.rs @@ -23,7 +23,7 @@ mod cheatcodes; mod executor; mod tracing; -pub use cheatcodes::PvmStartupMigration; +pub use cheatcodes::{PvmCheatcodeInspectorStrategyBuilder, PvmStartupMigration}; /// Runtime backend mode for pallet-revive #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] diff --git a/crates/revive-strategy/tests/gas_metering.rs b/crates/revive-strategy/tests/gas_metering.rs new file mode 100644 index 0000000000000..1873e71a6518d --- /dev/null +++ b/crates/revive-strategy/tests/gas_metering.rs @@ -0,0 +1,78 @@ +//! Tests for pauseGasMetering, resumeGasMetering, and resetGasMetering cheatcodes +//! +//! ## Test Strategy +//! +//! These tests verify that gas metering cheatcodes work correctly in both EVM and PVM modes. +//! Gas metering operates at the EVM interpreter level (for testing/debugging), affecting: +//! - How gas is tracked during test execution +//! - The ability to pause/resume/reset gas consumption counting +//! - Gas reporting for test analysis + +use foundry_cheatcodes::Cheatcodes; +use foundry_compilers::resolc::dual_compiled_contracts::DualCompiledContracts; +use revive_strategy::{PvmCheatcodeInspectorStrategyBuilder, ReviveRuntimeMode}; + +#[test] +fn evm_pause_gas_metering_sets_flag() { + let state = Cheatcodes::default(); + + assert!(!state.gas_metering.paused, "Gas metering should not be paused initially"); +} + +#[test] +fn evm_gas_metering_initial_state() { + let state = Cheatcodes::default(); + + assert!(!state.gas_metering.paused); + assert!(!state.gas_metering.touched); + assert!(!state.gas_metering.reset); + assert!(state.gas_metering.paused_frames.is_empty()); + assert!(state.gas_metering.last_call_gas.is_none()); +} + +#[test] +fn pvm_cheatcodes_state_initializes() { + use foundry_cheatcodes::CheatcodeInspectorStrategy; + + let mut state = Cheatcodes::default(); + state.strategy = CheatcodeInspectorStrategy::new_pvm( + DualCompiledContracts::default(), + ReviveRuntimeMode::Pvm, + ); + + assert!(!state.gas_metering.paused, "Gas metering should not be paused initially in PVM"); +} + +#[test] +fn pvm_gas_metering_state_structure() { + use foundry_cheatcodes::CheatcodeInspectorStrategy; + + let mut state = Cheatcodes::default(); + state.strategy = CheatcodeInspectorStrategy::new_pvm( + DualCompiledContracts::default(), + ReviveRuntimeMode::Pvm, + ); + + assert!(!state.gas_metering.paused); + assert!(!state.gas_metering.touched); + assert!(!state.gas_metering.reset); + assert!(state.gas_metering.paused_frames.is_empty()); +} + +#[test] +fn gas_metering_is_independent_of_mode() { + use foundry_cheatcodes::CheatcodeInspectorStrategy; + + let evm_state = Cheatcodes::default(); + + let mut pvm_state = Cheatcodes::default(); + pvm_state.strategy = CheatcodeInspectorStrategy::new_pvm( + DualCompiledContracts::default(), + ReviveRuntimeMode::Pvm, + ); + + assert_eq!( + evm_state.gas_metering.paused, pvm_state.gas_metering.paused, + "Gas metering state should be identical in EVM and PVM modes initially" + ); +} diff --git a/crates/revive-utils/Cargo.toml b/crates/revive-utils/Cargo.toml index 80e74221d3922..bbc084ff525e0 100644 --- a/crates/revive-utils/Cargo.toml +++ b/crates/revive-utils/Cargo.toml @@ -14,14 +14,7 @@ exclude.workspace = true [dependencies] foundry-evm-core.workspace = true foundry-evm-traces.workspace = true -polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch = "master", features = [ - "experimental", - "runtime", - "polkadot-runtime-common", - "pallet-revive", - "pallet-balances", - "pallet-timestamp" -]} +polkadot-sdk.workspace = true revive-env.workspace = true alloy-primitives.workspace = true diff --git a/crates/script/src/progress.rs b/crates/script/src/progress.rs index 09329ad6b05ab..2f97d895fd141 100644 --- a/crates/script/src/progress.rs +++ b/crates/script/src/progress.rs @@ -171,7 +171,7 @@ impl ScriptProgress { progress } - /// Traverses a set of pendings and either finds receipts, or clears them from + /// Traverses a set of pending transactions and either finds receipts, or clears them from /// the deployment sequence. /// /// For each `tx_hash`, we check if it has confirmed. If it has diff --git a/testdata/default/revive/EtchTest.t.sol b/testdata/default/revive/EtchTest.t.sol new file mode 100644 index 0000000000000..d470f9080230f --- /dev/null +++ b/testdata/default/revive/EtchTest.t.sol @@ -0,0 +1,132 @@ +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract Adder { + function add(uint256 a, uint256 b) public pure returns (uint256) { + return a * b; // Intentional bug to verify etch works + } +} + +contract NestedAdder { + uint256 public inner_a; + uint256 public inner_b; + + constructor(uint256 a, uint256 b) { + inner_a = a; + inner_b = b; + } + + function nested_call(address target) public returns (uint256) { + // Perform the add call on the target contract address + (bool success, bytes memory data) = + target.call(abi.encodeWithSignature("add(uint256,uint256)", inner_a, inner_b)); + require(success, "Nested call failed"); + uint256 result = abi.decode(data, (uint256)); + assert(success); + return result; + } +} + +contract EtchTest is DSTest { + Vm constant vm = Vm(address(bytes20(uint160(uint256(keccak256("hevm cheat code")))))); + // This is the bytecode for the correct Adder contract above compiled with resolc. + bytes constant code = + hex"50564d00008c0c000000000000010700c13000c00080047106000000000e0000001c0000002a000000350000004700000063616c6c5f646174615f636f707963616c6c5f646174615f6c6f616463616c6c5f646174615f73697a657365616c5f72657475726e7365745f696d6d757461626c655f6461746176616c75655f7472616e7366657272656405110287920463616c6c879e066465706c6f79068bec25028a531a001f004b007c00a500ae00c100cd00fa005e01630192019701bb0212036003b103c0033c048d04b2046c05ec05f5050106180627063d0660061a0769079e07ab07c607ca07080850099511f07b10087b158475010a02013d0700000251050750100209501004b3009511807b10787b15707b1668951580008411e04911384911304911208000330740951820491128501006e6084911584911504911484911408317400a0501821750821858821948821a40d49808d4a707d4870752072e6417501008ec008217188218108219088216d49707d48609d47909989920d48707977720d497075107090050100a61056467330850100c75098377330833090a283e076467330850100e62098378330733093300100a03019511a07b10587b15507b16489515608411e064164916384916304916208000330740956820491628501012370839070000025317045f9517e08477e07b67186471837733080a010182671880771c977720140800000000f7021677ab8736330014951120fe7b10d8017b15d0017b16c8019515e0018411e04921b8014921b0014921a8014921a0018317a0010a05288d02501016a3049511c07b10387b15307b16289515408411f0647664173300189511f07b10087b156475330820330740330048288f0850121a3a063200828910828a18828b088288d4ba0ad4980bd4ab0b98bb20d4a909979920d4b9095209449511c07b10387b15307b16289515408411e06476838883170a01821718821810821908821a7b67187b68107b69087b6a9551c08210388215308216289511403200009511e0fe7b1018017b1510017b160801951520018411f0828310828208829a08828c829b7b1b20829410d32a067b1638d82a06d8cb00821b38dab006828b188298187b1838c93409c969087b1828d869087b1330d8340664b4821938c9b909c96909c98909c92a08c908087b1c38821a20c9ca0a8e8b88aa4085aa01db8b0a8f98821c288ecbdb980bd49c08db8b0a510ace0064768217387b17c0007b127b12c8008217307b17d0009517e0009518c0007b14087b14d80050101ce3fe8217e8007b17288217e0007b17208217f8007b17188217f0007b1710821838958720d88708821ac88a09d8a90ada880a821830c88a0ad88a08821b08c88b0b7b1a707b19687b1760951780009518607b1b7850101e8cfe821790008218980082198000821a88007b67307b68387b69207b6a288217107b67108217187b67188217207b678217287b67089551e0fe821018018215100182160801951120013200501020a602828a10828b18828c088289d4cb0bd4a908d4b808988820d4ba0a97aa20d4a80852083f9511d07b10287b15209515308411f0827a18827810827b0882777b177b1b087b181064187b1a18649750102280059551d08210288215209511303200008218108217087b87088217187b877b861082177b87189551808210788215708216689511800032008217b0018218b8018219a801821aa001d49808d4a707d487075207e70138070000024921380149213001492128017b1720014921580149215001492140010495176001951840019519200149214801501026c7fd82126001821768018218700182197801821b8001821c880182169001821a98017b1ad8007b16d0007b1cc8007b1bc0007b19f8007b18f0007b17e800951700019518e0009519c0007b12e000501028d801821700017b1738821708017b1730821710017b1728821718017b17209517a00050102ab6fc8217b8007b17188216b0008218a8007b1810821aa0007b1a088219207b19588219287b19508219307b19488219387b19407b17787b16707b1868951780009518609519407b1a6033002c9511807b10787b15707b1668951580008411f07b171082878292828b08829308957a207b1a18d87a06c86b0a7b1a08d8ba0cda660c828a10828818829410829918c8ca06d8a60cc88c0c7b1c7b18387b1a307b1b287b17207b19587b14507b13489517409518207b124033002428f7fd821918821b10821008d49b07d46008d47808988820d46707977720d4870752075d646482178800821898007b183882138000821a9000d3b706d8b70cd80308da680cc94a06c9c602d8c606d84a0a821c38c99c0cc9ac0cc96c0cc9b707c98707c90306d4c707d42608d47808d42707988820977720d487075107080050102e19640764685010302e048378836933073300320a03019511f87b103308100002838833070133093300340a03013308491718491710491708490732004911184911104901113307046418491108501038f402390804000256183f0b200304000240013308100002838833070133092433003a0a03019511e87b10107b15087b16828b188294188282828c08829a088295828610829810c8ca09c82503d85305c85909d3a900d8a90ada050ac86805c85a0ad85a00c8b404d88508c84808c88000d86a05d3b008d8b00bda850bd3a606d46808d3c906d8c90cd82305db6c05db8b0552051b7b737b79087b7a107b70188210108215088216951118320033003c9511b07b10487b15409515508411f0491130491128491120140700000000717b484e9518207b173833073300362815029511807b10787b15707b1668951580008411f08282828308828410828818829a829b08829c1082991864767b19187b1c107b1b087b1a7b18387b14307b132895174095182064197b122050103efcfe821750821858821940821a487b67107b68187b697b6a089551808210788215708216689511800032009511f87b10330750104067f89511f87b103307015010425af89511c07b10387b15307b16289515408411f064766417501044a2f95012460632008217108218188219821a087b67107b68187b697b6a089551c0821038821530821628951140320239080800025108c0f8330730000383770a0428b3f87c78017c797c7a027c7b03978808d4980897aa1097bb18d4ba0ad4a8087c79057c7a047c7b067c7c07979908d4a90997bb1097cc18d4cb0bd4b909979920d489027c79097c7a087c7b0a7c7c0b979908d4a90997bb1097cc18d4cb0bd4b9097c7a0d7c7b0c7c7c0e7c780f97aa08d4ba0a97cc10978818d4c808d4a808978820d498037c78117c7a107c7b127c7c13978808d4a80897bb1097cc18d4cb0bd4b8087c7a157c7b147c7c167c791797aa08d4ba0a97cc10979918d4c909d4a909979920d4890a7c78197c79187c7b1a7c7c1b978808d4980897bb1097cc18d4cb0bd4b8087c791d7c7b1c7c7c1e7c771f979908d4b90997cc10977718d4c707d49707977720d487076f776fa86f396f2a7b5a187b59107b58087b57821008821595111032009511d87b10207b15187b161082897b19088289087b1982851082861833082050104ad3006f686f59821a6faa821b086fbb787b18787a10787908787898bc38787c1f98bc30787c1e98bc28787c1d98bc20787c1c98bc18787c1b98bc10787c1a98bb08787b1998ab38787b1798ab30787b1698ab28787b1598ab20787b1498ab18787b1398ab10787b1298aa08787a11989a38787a0f989a30787a0e989a28787a0d989a20787a0c989a18787a0b989a10787a0a98990878790998893878790798893078790698892878790598892078790498891878790398891078790298880878780182102082151882161095112832008b7910520931c8780883881f8488e05638000001253309040002390a040002ae8a093d080400020133081000028377c887073200004969488424892421494892344992a490a4244992a1423515aa3449929290248448523549004944442422224a4892a4925492849294244992244955929294246944442449254992244992a424499294a494244a1489884488442291242549922489244444244992a42422220a494a9224499224a9a41492a449524a924411114922444444848888101111111111111111112112894422442212499224491249922491549224499224494992244992244992a4884424499224499224499251a14a5328a525a9242921443094a4494a4a4a922449922449922449929224a52449920490942449a4242529494992129224294912a5424a4892a4429494a424490a112a294992244992244992244992244992244992244992244992244992244992244992aa24a5942425250955294949922449922449922449922449922449922449922449529224082161280500"; + bytes constant code_evm = + hex"608060405234801561000f575f5ffd5b5060043610610029575f3560e01c8063771602f71461002d575b5f5ffd5b610047600480360381019061004291906100a9565b61005d565b60405161005491906100f6565b60405180910390f35b5f818361006a919061013c565b905092915050565b5f5ffd5b5f819050919050565b61008881610076565b8114610092575f5ffd5b50565b5f813590506100a38161007f565b92915050565b5f5f604083850312156100bf576100be610072565b5b5f6100cc85828601610095565b92505060206100dd85828601610095565b9150509250929050565b6100f081610076565b82525050565b5f6020820190506101095f8301846100e7565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f61014682610076565b915061015183610076565b92508282019050808211156101695761016861010f565b5b9291505056fea2646970667358221220bde9424751d367d702063695cb7d0afb42c0a83d370954296d07e2b1684208fb64736f6c634300081e0033"; + + // Test etching code into an existing contract instance works correctly. + function testEtchExistingContractPvmCode() public { + vm.pvm(true); + Adder adder = new Adder(); + + // Without etch, the add function is broken + uint256 buggy_result = adder.add(1, 2); + assertEq(buggy_result, 2); + + // Etch the correct bytecode into the existing contract + vm.etch(address(adder), code); + uint256 result = adder.add(1, 2); + assertEq(result, 3); + + // Verify that nested calls also work correctly after etch + uint256 nested_call_result = (new NestedAdder(1, 2)).nested_call(address(adder)); + assertEq(nested_call_result, 3); + } + + // Test etching code into an existing contract instance works correctly. + function testEtchExistingContractEvmCode() public { + vm.pvm(true); + Adder adder = new Adder(); + + // Without etch, the add function is broken + uint256 buggy_result = adder.add(1, 2); + assertEq(buggy_result, 2); + + // Etch the correct bytecode into the existing contract + vm.etch(address(adder), code_evm); + uint256 result = adder.add(1, 2); + assertEq(result, 3); + + // Verify that nested calls also work correctly after etch + uint256 nested_call_result = (new NestedAdder(1, 2)).nested_call(address(adder)); + assertEq(nested_call_result, 3); + } + + // Test etching code into any arbitrary address works correctly. + function testEtchAnyContractPvmCode() public { + vm.pvm(true); + // Etch the correct bytecode into an arbitrary address + address target = address(7070707); + vm.etch(target, code); + (bool success, bytes memory output) = target.call(abi.encodeWithSignature("add(uint256,uint256)", 1, 2)); + uint256 result1 = abi.decode(output, (uint256)); + + assert(success); + assertEq(result1, 3); + + uint256 nested_call_result = (new NestedAdder(1, 2)).nested_call(address(target)); + assertEq(nested_call_result, 3); + + // Etch into the zero address as well to verify it works for reserved addresses + address target2 = address(0); + vm.etch(target2, code); + (bool success2, bytes memory output2) = target2.call(abi.encodeWithSignature("add(uint256,uint256)", 1, 2)); + uint256 result2 = abi.decode(output2, (uint256)); + + assert(success2); + assertEq(result2, 3); + + uint256 nested_call_result2 = (new NestedAdder(1, 2)).nested_call(address(target2)); + assertEq(nested_call_result2, 3); + } + + // Test etching code into any arbitrary address works correctly. + function testEtchAnyContractEvmCode() public { + vm.pvm(true); + // Etch the correct bytecode into an arbitrary address + address target = address(7070707); + vm.etch(target, code); + (bool success, bytes memory output) = target.call(abi.encodeWithSignature("add(uint256,uint256)", 1, 2)); + uint256 result1 = abi.decode(output, (uint256)); + + assert(success); + assertEq(result1, 3); + + uint256 nested_call_result = (new NestedAdder(1, 2)).nested_call(address(target)); + assertEq(nested_call_result, 3); + + // Etch into the zero address as well to verify it works for reserved addresses + address target2 = address(0); + vm.etch(target2, code_evm); + (bool success2, bytes memory output2) = target2.call(abi.encodeWithSignature("add(uint256,uint256)", 1, 2)); + uint256 result2 = abi.decode(output2, (uint256)); + + assert(success2); + assertEq(result2, 3); + + uint256 nested_call_result2 = (new NestedAdder(1, 2)).nested_call(address(target2)); + assertEq(nested_call_result2, 3); + } +} diff --git a/testdata/default/revive/EvmToReviveMigration.t.sol b/testdata/default/revive/EvmToReviveMigration.t.sol index cd11baca0b5a9..8e7cd8c3e06a6 100644 --- a/testdata/default/revive/EvmToReviveMigration.t.sol +++ b/testdata/default/revive/EvmToReviveMigration.t.sol @@ -16,6 +16,67 @@ contract SimpleStorage { } } +contract StorageWithImmutables { + uint256 public immutable deployedAt; + address public immutable deployer; + uint256 public immutable magicNumber; + + constructor(uint256 _magicNumber) { + deployedAt = block.timestamp; + deployer = msg.sender; + magicNumber = _magicNumber; + } + + function getDeployedAt() public view returns (uint256) { + return deployedAt; + } + + function getDeployer() public view returns (address) { + return deployer; + } + + function getMagicNumber() public view returns (uint256) { + return magicNumber; + } +} + +interface IAuthorizationCallback { + function onAuthorization(address caller, uint256 value) external returns (bool); +} + +contract CallbackContract { + address public owner; + uint256 public lastValue; + address public lastCaller; + + constructor() { + owner = msg.sender; + } + + // This function calls back to the caller to verify authorization + // Similar to how Morpho calls back to verify permissions + function executeWithCallback(uint256 value) public returns (bool) { + // Call back to the msg.sender to verify authorization + bool authorized = IAuthorizationCallback(msg.sender).onAuthorization(msg.sender, value); + + if (authorized) { + lastValue = value; + lastCaller = msg.sender; + return true; + } + + return false; + } + + function getLastValue() public view returns (uint256) { + return lastValue; + } + + function getLastCaller() public view returns (address) { + return lastCaller; + } +} + contract EvmReviveMigrationTest is DSTest { Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code"))))); address alice = address(0x1111); @@ -137,4 +198,49 @@ contract EvmReviveMigrationTest is DSTest { uint256 finalReviveTimestamp = block.timestamp; assertEq(finalReviveTimestamp, newEvmTimestamp, "Timestamp should migrate from EVM to Revive"); } + + function testImmutablesMigration() public { + vm.pvm(false); + + uint256 deploymentTimestamp = 1234567890; + vm.warp(deploymentTimestamp); + uint256 magicNumber = 0x42424242; + StorageWithImmutables immutableContract = new StorageWithImmutables(magicNumber); + + vm.makePersistent(address(immutableContract)); + + assertEq(immutableContract.getDeployedAt(), deploymentTimestamp, "Deployed timestamp should match in EVM"); + assertEq(immutableContract.getDeployer(), address(this), "Deployer should match in EVM"); + assertEq(immutableContract.getMagicNumber(), magicNumber, "Magic number should match in EVM"); + + vm.pvm(true); + + assertEq( + immutableContract.getDeployedAt(), deploymentTimestamp, "Deployed timestamp should be preserved in Revive" + ); + assertEq(immutableContract.getDeployer(), address(this), "Deployer should be preserved in Revive"); + assertEq(immutableContract.getMagicNumber(), magicNumber, "Magic number should be preserved in Revive"); + } + + // Implement the authorization callback interface + function onAuthorization(address caller, uint256 value) external returns (bool) { + // Simple authorization: allow if value is less than 1000 + return value < 1000; + } + + function testCallbackFromRevive() public { + CallbackContract callbackContract = new CallbackContract(); + // Try to execute with authorized value (should succeed) + uint256 authorizedValue = 500; + bool result = callbackContract.executeWithCallback(authorizedValue); + assertTrue(result, "Authorized callback should succeed"); + assertEq(callbackContract.getLastValue(), authorizedValue, "Last value should be updated"); + assertEq(callbackContract.getLastCaller(), address(this), "Last caller should be test contract"); + + // Try to execute with unauthorized value (should fail) + uint256 unauthorizedValue = 1500; + bool result2 = callbackContract.executeWithCallback(unauthorizedValue); + assertTrue(!result2, "Unauthorized callback should fail"); + assertEq(callbackContract.getLastValue(), authorizedValue, "Last value should not be updated"); + } } diff --git a/testdata/default/revive/GasMetering.t.sol b/testdata/default/revive/GasMetering.t.sol new file mode 100644 index 0000000000000..4328464e3d3a0 --- /dev/null +++ b/testdata/default/revive/GasMetering.t.sol @@ -0,0 +1,91 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; + +contract Worker { + uint256 public result; + + function doWork() public returns (uint256) { + uint256 sum = 0; + for (uint256 i = 0; i < 100; i++) { + sum += i; + } + result = sum; + return sum; + } + + function expensiveWork() public returns (uint256) { + uint256 sum = 0; + for (uint256 i = 0; i < 1000; i++) { + sum += i; + } + result = sum; + return sum; + } +} + +contract GasMeteringTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + Worker public worker; + + function setUp() public { + vm.pvm(true); + worker = new Worker(); + } + + function testPauseGasMeteringWithPvmCall() public { + uint256 gasStart = gasleft(); + worker.doWork(); + uint256 gasUsedNormal = gasStart - gasleft(); + + vm.pauseGasMetering(); + uint256 gasPausedStart = gasleft(); + worker.doWork(); + uint256 gasUsedPaused = gasPausedStart - gasleft(); + vm.resumeGasMetering(); + + assertTrue(gasUsedNormal > 0); + assertEq(gasUsedPaused, 0); + } + + function testResumeGasMeteringWithPvmCall() public { + vm.pauseGasMetering(); + worker.doWork(); + vm.resumeGasMetering(); + + uint256 gasStart = gasleft(); + worker.doWork(); + uint256 gasUsed = gasStart - gasleft(); + + assertTrue(gasUsed > 0); + } + + function testResetGasMeteringWithPvmCall() public { + uint256 gasStart = gasleft(); + worker.expensiveWork(); + uint256 gasAfterWork = gasleft(); + uint256 gasConsumed = gasStart - gasAfterWork; + + vm.resetGasMetering(); + uint256 gasAfterReset = gasleft(); + + assertTrue(gasAfterReset > gasAfterWork); + uint256 gasRecovered = gasAfterReset - gasAfterWork; + assertTrue(gasRecovered > gasConsumed / 2); + } + + function testCreateDuringPausedMetering() public { + vm.pauseGasMetering(); + uint256 gasStart = gasleft(); + + Worker newWorker = new Worker(); + newWorker.doWork(); + + uint256 gasUsed = gasStart - gasleft(); + vm.resumeGasMetering(); + + assertEq(gasUsed, 0); + } +} diff --git a/testdata/default/revive/MockCall.t.sol b/testdata/default/revive/MockCall.t.sol new file mode 100644 index 0000000000000..43b04d5d5c29b --- /dev/null +++ b/testdata/default/revive/MockCall.t.sol @@ -0,0 +1,422 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract Mock { + uint256 state = 0; + + function numberA() public pure returns (uint256) { + return 1; + } + + function numberB() public pure returns (uint256) { + return 2; + } + + function numberBPayable() public payable returns (uint256) { + return 2; + } + + function add(uint256 a, uint256 b) public pure returns (uint256) { + return a + b; + } + + function pay(uint256 a) public payable returns (uint256) { + return a; + } + + function noReturnValue() public { + // Does nothing of value, but also ensures that Solidity will 100% + // generate an `extcodesize` check. + state += 1; + } +} + +contract NestedMock { + Mock private inner; + + constructor(Mock _inner) { + inner = _inner; + } + + function sum() public view returns (uint256) { + return inner.numberA() + inner.numberB(); + } + + function sumPay() public returns (uint256) { + return inner.numberA() + inner.numberBPayable{value: 10}(); + } +} + +contract NestedMockDelegateCall { + Mock private inner; + + constructor(Mock _inner) { + inner = _inner; + } + + function sum() public returns (uint256) { + (, bytes memory dataA) = address(inner).delegatecall(abi.encodeWithSelector(Mock.numberA.selector)); + (, bytes memory dataB) = address(inner).delegatecall(abi.encodeWithSelector(Mock.numberB.selector)); + return abi.decode(dataA, (uint256)) + abi.decode(dataB, (uint256)); + } +} + +contract MockCallTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testMockGetters() public { + vm.pvm(true); + Mock target = new Mock(); + + // pre-mock + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + + vm.mockCall(address(target), abi.encodeWithSelector(target.numberB.selector), abi.encode(10)); + + // post-mock + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 10); + } + + function testMockNestedSimple() public { + vm.pvm(true); + + Mock inner = new Mock(); + NestedMock target = new NestedMock(inner); + + // pre-mock + assertEq(target.sum(), 3); + console.log("SUM BEFORE MOCK", address(inner)); + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberB.selector), abi.encode(9)); + + // post-mock + assertEq(target.sum(), 10); + } + + function testMockNestedEmptyAccount() public { + vm.pvm(true); + + Mock inner = Mock(address(100)); + NestedMock target = new NestedMock(inner); + + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberB.selector), abi.encode(9)); + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberA.selector), abi.encode(1)); + + // post-mock + assertEq(target.sum(), 10); + } + + function testMockNestedPayDoesntTransfer() public { + vm.pvm(true); + + Mock inner = new Mock(); + NestedMock target = new NestedMock(inner); + + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberBPayable.selector), abi.encode(9)); + // Check balance of inner before and after call to ensure no ETH was transferred + uint256 balance_before = address(inner).balance; + assertEq(target.sumPay(), 10); + uint256 balance_after = address(inner).balance; + assertEq(balance_before, balance_after); + } + + // Ref: https://github.com/foundry-rs/foundry/issues/8066 + function testMockNestedDelegate() public { + vm.pvm(true); + + Mock inner = new Mock(); + NestedMockDelegateCall target = new NestedMockDelegateCall(inner); + + assertEq(target.sum(), 3); + + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberB.selector), abi.encode(9)); + + assertEq(target.sum(), 10); + } + + function testMockSelector() public { + vm.pvm(true); + + Mock target = new Mock(); + assertEq(target.add(5, 5), 10); + + vm.mockCall(address(target), abi.encodeWithSelector(target.add.selector), abi.encode(11)); + + assertEq(target.add(5, 5), 11); + } + + function testMockCalldata() public { + vm.pvm(true); + + Mock target = new Mock(); + assertEq(target.add(5, 5), 10); + assertEq(target.add(6, 4), 10); + + vm.mockCall(address(target), abi.encodeWithSelector(target.add.selector, 5, 5), abi.encode(11)); + + assertEq(target.add(5, 5), 11); + assertEq(target.add(6, 4), 10); + } + + function testClearMockedCalls() public { + vm.pvm(true); + + Mock target = new Mock(); + + vm.mockCall(address(target), abi.encodeWithSelector(target.numberB.selector), abi.encode(10)); + + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 10); + + vm.clearMockedCalls(); + + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + } + + function testMockCallMultiplePartialMatch() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector), abi.encode(10)); + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector, 2), abi.encode(20)); + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector, 2, 3), abi.encode(30)); + + assertEq(mock.add(1, 2), 10); + assertEq(mock.add(2, 2), 20); + assertEq(mock.add(2, 3), 30); + } + + function testMockCallWithValue() public { + vm.pvm(true); + Mock mock = new Mock(); + + vm.mockCall(address(mock), 10, abi.encodeWithSelector(mock.pay.selector), abi.encode(10)); + + assertEq(mock.pay{value: 10}(1), 10); + assertEq(mock.pay(1), 1); + + for (uint256 i = 0; i < 100; i++) { + vm.mockCall(address(mock), i, abi.encodeWithSelector(mock.pay.selector), abi.encode(i * 2)); + } + + assertEq(mock.pay(1), 0); + assertEq(mock.pay{value: 10}(1), 20); + assertEq(mock.pay{value: 50}(1), 100); + } + + function testMockCallWithValueCalldataPrecedence() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCall(address(mock), 10, abi.encodeWithSelector(mock.pay.selector), abi.encode(10)); + vm.mockCall(address(mock), abi.encodeWithSelector(mock.pay.selector, 2), abi.encode(2)); + + assertEq(mock.pay{value: 10}(1), 10); + assertEq(mock.pay{value: 10}(2), 2); + assertEq(mock.pay(2), 2); + } + + function testMockCallEmptyAccount() public { + vm.pvm(true); + + Mock mock = Mock(address(100)); + + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector), abi.encode(10)); + vm.mockCall(address(mock), mock.noReturnValue.selector, abi.encode()); + + assertEq(mock.add(1, 2), 10); + mock.noReturnValue(); + } +} + +contract MockCallRevertTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + error TestError(bytes msg); + + bytes constant ERROR_MESSAGE = "ERROR_MESSAGE"; + + function testMockGettersRevert() public { + vm.pvm(true); + + Mock target = new Mock(); + + // pre-mock + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + + vm.mockCallRevert(address(target), target.numberB.selector, ERROR_MESSAGE); + + // post-mock + assertEq(target.numberA(), 1); + try target.numberB() { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockRevertWithCustomError() public { + vm.pvm(true); + + Mock target = new Mock(); + + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + + bytes memory customError = abi.encodeWithSelector(TestError.selector, ERROR_MESSAGE); + + vm.mockCallRevert(address(target), abi.encodeWithSelector(target.numberB.selector), customError); + + assertEq(target.numberA(), 1); + try target.numberB() { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(customError)); + } + } + + function testMockNestedRevert() public { + vm.pvm(true); + Mock inner = new Mock(); + NestedMock target = new NestedMock(inner); + + assertEq(target.sum(), 3); + + vm.mockCallRevert(address(inner), abi.encodeWithSelector(inner.numberB.selector), ERROR_MESSAGE); + + try target.sum() { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockCalldataRevert() public { + vm.pvm(true); + + Mock target = new Mock(); + assertEq(target.add(5, 5), 10); + assertEq(target.add(6, 4), 10); + + vm.mockCallRevert(address(target), abi.encodeWithSelector(target.add.selector, 5, 5), ERROR_MESSAGE); + + assertEq(target.add(6, 4), 10); + + try target.add(5, 5) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testClearMockRevertedCalls() public { + vm.pvm(true); + + Mock target = new Mock(); + + vm.mockCallRevert(address(target), abi.encodeWithSelector(target.numberB.selector), ERROR_MESSAGE); + + vm.clearMockedCalls(); + + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + } + + function testMockCallRevertPartialMatch() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector, 2), ERROR_MESSAGE); + + assertEq(mock.add(1, 2), 3); + + try mock.add(2, 3) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockCallRevertWithValue() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCallRevert(address(mock), 10, abi.encodeWithSelector(mock.pay.selector), ERROR_MESSAGE); + + assertEq(mock.pay(1), 1); + assertEq(mock.pay(2), 2); + + try mock.pay{value: 10}(1) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockCallResetsMockCallRevert() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector), ERROR_MESSAGE); + + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector), abi.encode(5)); + assertEq(mock.add(2, 3), 5); + } + + function testMockCallRevertResetsMockCall() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector), abi.encode(5)); + assertEq(mock.add(2, 3), 5); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector), ERROR_MESSAGE); + + try mock.add(2, 3) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockCallRevertWithCall() public { + vm.pvm(true); + + Mock mock = new Mock(); + + bytes memory customError = abi.encodeWithSelector(TestError.selector, ERROR_MESSAGE); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector), customError); + + (bool success, bytes memory data) = address(mock).call(abi.encodeWithSelector(Mock.add.selector, 2, 3)); + assertEq(success, false); + assertEq(data, customError); + } + + function testMockCallEmptyAccountRevert() public { + vm.pvm(true); + + Mock mock = Mock(address(100)); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector), ERROR_MESSAGE); + + try mock.add(2, 3) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } +} diff --git a/testdata/default/revive/MockCalls.t.sol b/testdata/default/revive/MockCalls.t.sol new file mode 100644 index 0000000000000..445279db832e5 --- /dev/null +++ b/testdata/default/revive/MockCalls.t.sol @@ -0,0 +1,65 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract MockCallsTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testMockCallsLastShouldPersist() public { + vm.pvm(true); + address mockUser = vm.addr(vm.randomUint()); + address mockErc20 = vm.addr(vm.randomUint()); + bytes memory data = abi.encodeWithSignature("balanceOf(address)", mockUser); + bytes[] memory mocks = new bytes[](2); + mocks[0] = abi.encode(2 ether); + mocks[1] = abi.encode(7.219 ether); + vm.mockCalls(mockErc20, data, mocks); + (, bytes memory ret1) = mockErc20.call(data); + assertEq(abi.decode(ret1, (uint256)), 2 ether); + (, bytes memory ret2) = mockErc20.call(data); + assertEq(abi.decode(ret2, (uint256)), 7.219 ether); + (, bytes memory ret3) = mockErc20.call(data); + assertEq(abi.decode(ret3, (uint256)), 7.219 ether); + } + + function testMockCallsWithValue() public { + vm.pvm(true); + + address mockUser = vm.addr(vm.randomUint()); + address mockErc20 = vm.addr(vm.randomUint()); + bytes memory data = abi.encodeWithSignature("balanceOf(address)", mockUser); + bytes[] memory mocks = new bytes[](3); + mocks[0] = abi.encode(2 ether); + mocks[1] = abi.encode(1 ether); + mocks[2] = abi.encode(6.423 ether); + vm.mockCalls(mockErc20, 1 ether, data, mocks); + (, bytes memory ret1) = mockErc20.call{value: 1 ether}(data); + assertEq(abi.decode(ret1, (uint256)), 2 ether); + (, bytes memory ret2) = mockErc20.call{value: 1 ether}(data); + assertEq(abi.decode(ret2, (uint256)), 1 ether); + (, bytes memory ret3) = mockErc20.call{value: 1 ether}(data); + assertEq(abi.decode(ret3, (uint256)), 6.423 ether); + } + + function testMockCalls() public { + vm.pvm(true); + + address mockUser = vm.addr(vm.randomUint()); + address mockErc20 = vm.addr(vm.randomUint()); + bytes memory data = abi.encodeWithSignature("balanceOf(address)", mockUser); + bytes[] memory mocks = new bytes[](3); + mocks[0] = abi.encode(2 ether); + mocks[1] = abi.encode(1 ether); + mocks[2] = abi.encode(6.423 ether); + vm.mockCalls(mockErc20, data, mocks); + (, bytes memory ret1) = mockErc20.call(data); + assertEq(abi.decode(ret1, (uint256)), 2 ether); + (, bytes memory ret2) = mockErc20.call(data); + assertEq(abi.decode(ret2, (uint256)), 1 ether); + (, bytes memory ret3) = mockErc20.call(data); + assertEq(abi.decode(ret3, (uint256)), 6.423 ether); + } +} diff --git a/testdata/default/revive/MockFunction.t.sol b/testdata/default/revive/MockFunction.t.sol new file mode 100644 index 0000000000000..c3b99ad812215 --- /dev/null +++ b/testdata/default/revive/MockFunction.t.sol @@ -0,0 +1,75 @@ +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract MockFunctionContract { + uint256 public a; + + function mocked_function() public { + a = 321; + } + + function mocked_args_function(uint256 x) public { + a = 321 + x; + } +} + +contract ModelMockFunctionContract { + uint256 public a; + + function mocked_function() public { + a = 123; + } + + function mocked_args_function(uint256 x) public { + a = 123 + x; + } +} + +contract MockFunctionTest is DSTest { + MockFunctionContract my_contract; + ModelMockFunctionContract model_contract; + Vm vm = Vm(HEVM_ADDRESS); + + function setUp() public { + vm.pvm(true); + my_contract = new MockFunctionContract(); + model_contract = new ModelMockFunctionContract(); + } + + function test_mockx_function() public { + vm.mockFunction( + address(my_contract), + address(model_contract), + abi.encodeWithSelector(MockFunctionContract.mocked_function.selector) + ); + my_contract.mocked_function(); + assertEq(my_contract.a(), 123); + } + + function test_mock_function_concrete_args() public { + vm.mockFunction( + address(my_contract), + address(model_contract), + abi.encodeWithSelector(MockFunctionContract.mocked_args_function.selector, 456) + ); + my_contract.mocked_args_function(456); + assertEq(my_contract.a(), 123 + 456); + my_contract.mocked_args_function(567); + assertEq(my_contract.a(), 321 + 567); + } + + function test_mock_function_all_args() public { + vm.mockFunction( + address(my_contract), + address(model_contract), + abi.encodeWithSelector(MockFunctionContract.mocked_args_function.selector) + ); + my_contract.mocked_args_function(678); + assertEq(my_contract.a(), 123 + 678); + my_contract.mocked_args_function(789); + assertEq(my_contract.a(), 123 + 789); + } +} diff --git a/testdata/default/revive/Prank.t.sol b/testdata/default/revive/Prank.t.sol new file mode 100644 index 0000000000000..45383c1b62824 --- /dev/null +++ b/testdata/default/revive/Prank.t.sol @@ -0,0 +1,701 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract Victim { + function assertCallerAndOrigin( + address expectedSender, + string memory senderMessage, + address expectedOrigin, + string memory originMessage + ) public view { + require(msg.sender == expectedSender, senderMessage); + require(tx.origin == expectedOrigin, originMessage); + } +} + +contract ConstructorVictim is Victim { + constructor( + address expectedSender, + string memory senderMessage, + address expectedOrigin, + string memory originMessage + ) { + require(msg.sender == expectedSender, senderMessage); + require(tx.origin == expectedOrigin, originMessage); + } +} + +contract NestedVictim { + Victim innerVictim; + + constructor(Victim victim) { + innerVictim = victim; + } + + function assertCallerAndOrigin( + address expectedSender, + string memory senderMessage, + address expectedOrigin, + string memory originMessage + ) public view { + require(msg.sender == expectedSender, senderMessage); + require(tx.origin == expectedOrigin, originMessage); + innerVictim.assertCallerAndOrigin( + address(this), + "msg.sender was incorrectly set for nested victim", + expectedOrigin, + "tx.origin was incorrectly set for nested victim" + ); + } +} + +contract NestedPranker { + Vm constant vm = Vm(address(bytes20(uint160(uint256(keccak256("hevm cheat code")))))); + + address newSender; + address newOrigin; + address oldOrigin; + + constructor(address _newSender, address _newOrigin) { + newSender = _newSender; + newOrigin = _newOrigin; + oldOrigin = tx.origin; + } + + function incompletePrank() public { + vm.startPrank(newSender, newOrigin); + } + + function completePrank(NestedVictim victim) public { + vm.pvm(true); + + victim.assertCallerAndOrigin( + newSender, "msg.sender was not set in nested prank", newOrigin, "tx.origin was not set in nested prank" + ); + + vm.pvm(false); + + vm.stopPrank(); + + vm.pvm(true); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), + "msg.sender was not cleaned up in nested prank", + oldOrigin, + "tx.origin was not cleaned up in nested prank" + ); + } +} + +contract ImplementationTest { + uint256 public num; + address public sender; + + function assertCorrectCaller(address expectedSender) public { + require(msg.sender == expectedSender); + } + + function assertCorrectOrigin(address expectedOrigin) public { + require(tx.origin == expectedOrigin); + } + + function setNum(uint256 _num) public { + num = _num; + } +} + +contract ProxyTest { + uint256 public num; + address public sender; +} + +contract PrankTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testPrankDelegateCallPrank2() public { + vm.pvm(true); + ProxyTest proxy = new ProxyTest(); + ImplementationTest impl = new ImplementationTest(); + vm.prank(address(proxy), true); + // console.log("Proxy address:", address(proxy)); + // console.log("Impl address:", address(impl)); + // console.log("THIS address:", address(this)); + // Assert correct `msg.sender` + (bool success,) = + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", address(proxy))); + + require(success, "prank2: delegate call failed assertCorrectCaller"); + + // Assert storage updates + uint256 num = 42; + vm.prank(address(proxy), true); + (bool successTwo,) = address(impl).delegatecall(abi.encodeWithSignature("setNum(uint256)", num)); + require(successTwo, "prank2: delegate call failed setNum"); + require(proxy.num() == num, "prank2: proxy's storage was not set correctly"); + vm.stopPrank(); + } + + function testPrankDelegateCallStartPrank2() public { + vm.pvm(true); + ProxyTest proxy = new ProxyTest(); + ImplementationTest impl = new ImplementationTest(); + vm.startPrank(address(proxy), true); + + // Assert correct `msg.sender` + (bool success,) = + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", address(proxy))); + require(success, "startPrank2: delegate call failed assertCorrectCaller"); + + // Assert storage updates + uint256 num = 42; + (bool successTwo,) = address(impl).delegatecall(abi.encodeWithSignature("setNum(uint256)", num)); + require(successTwo, "startPrank2: delegate call failed setNum"); + require(proxy.num() == num, "startPrank2: proxy's storage was not set correctly"); + vm.stopPrank(); + } + + function testPrankDelegateCallPrank3() public { + address origin = address(999); + vm.assume(isNotReserved(origin)); + vm.pvm(true); + ProxyTest proxy = new ProxyTest(); + ImplementationTest impl = new ImplementationTest(); + vm.prank(address(proxy), origin, true); + + // Assert correct `msg.sender` + (bool success,) = + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", address(proxy))); + require(success, "prank3: delegate call failed assertCorrectCaller"); + + // Assert correct `tx.origin` + vm.prank(address(proxy), origin, true); + (bool successTwo,) = address(impl).delegatecall(abi.encodeWithSignature("assertCorrectOrigin(address)", origin)); + require(successTwo, "prank3: delegate call failed assertCorrectOrigin"); + + // Assert storage updates + uint256 num = 42; + vm.prank(address(proxy), address(origin), true); + (bool successThree,) = address(impl).delegatecall(abi.encodeWithSignature("setNum(uint256)", num)); + require(successThree, "prank3: delegate call failed setNum"); + require(proxy.num() == num, "prank3: proxy's storage was not set correctly"); + vm.stopPrank(); + } + + function testPrankDelegateCallStartPrank3(address origin) public { + vm.assume(isNotReserved(origin)); + vm.pvm(true); + + ProxyTest proxy = new ProxyTest(); + ImplementationTest impl = new ImplementationTest(); + vm.startPrank(address(proxy), origin, true); + + // Assert correct `msg.sender` + (bool success,) = + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", address(proxy))); + require(success, "startPrank3: delegate call failed assertCorrectCaller"); + + // Assert correct `tx.origin` + (bool successTwo,) = address(impl).delegatecall(abi.encodeWithSignature("assertCorrectOrigin(address)", origin)); + require(successTwo, "startPrank3: delegate call failed assertCorrectOrigin"); + + // Assert storage updates + uint256 num = 42; + (bool successThree,) = address(impl).delegatecall(abi.encodeWithSignature("setNum(uint256)", num)); + require(successThree, "startPrank3: delegate call failed setNum"); + require(proxy.num() == num, "startPrank3: proxy's storage was not set correctly"); + vm.stopPrank(); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testRevertIfPrankDelegateCalltoEOA() public { + uint256 privateKey = uint256(keccak256(abi.encodePacked("alice"))); + address alice = vm.addr(privateKey); + ImplementationTest impl = new ImplementationTest(); + vm.expectRevert("vm.prank: cannot `prank` delegate call from an EOA"); + vm.prank(alice, true); + // Should fail when EOA pranked with delegatecall. + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", alice)); + } + + function testPrankSender(address sender) public { + vm.assume(isNotReserved(sender)); + // Perform the prank + vm.pvm(true); + + Victim victim = new Victim(); + vm.prank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", tx.origin, "tx.origin invariant failed" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", tx.origin, "tx.origin invariant failed" + ); + } + + function testPrankOrigin(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + address oldOrigin = tx.origin; + vm.pvm(true); + + // Perform the prank + Victim victim = new Victim(); + vm.prank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin was not cleaned up" + ); + } + + function testPrank1AfterPrank0(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + address oldOrigin = tx.origin; + vm.pvm(true); + + Victim victim = new Victim(); + vm.prank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin was not set during prank" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + + // Overwrite the prank + vm.prank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin invariant failed" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + function isNotReserved(address addr) internal returns (bool) { + // Check for zero address and common precompiles (addresses 1-9) + if ( + addr == address(0) || addr == address(1) || addr == address(2) || addr == address(3) || addr == address(4) + || addr == address(5) || addr == address(6) || addr == address(7) || addr == address(8) + || addr == address(9) || addr == address(10) || addr == address(11) || addr == address(12) + || addr == address(13) || addr == address(14) || addr == address(15) || addr == address(this) + ) { + return false; + } + return true; + } + + function testPrank0AfterPrank1(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + address oldOrigin = tx.origin; + vm.pvm(true); + Victim victim = new Victim(); + console.log("Balance of sender before prank:", sender.balance); + console.log("Balance of origin before prank:", origin.balance); + vm.prank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + console.log("After first prank - msg.sender:", address(this)); + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + + // Overwrite the prank + vm.prank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin invariant failed" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + function testStartPrank0AfterPrank1(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + + // Perform the prank + vm.pvm(true); + address oldOrigin = tx.origin; + Victim victim = new Victim(); + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + // Overwrite the prank + vm.startPrank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin invariant failed" + ); + + vm.stopPrank(); + // Ensure we cleaned up correctly after stopping the prank + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + function testStartPrank1AfterStartPrank0(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + // Perform the prank + address oldOrigin = tx.origin; + Victim victim = new Victim(); + vm.startPrank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin was set during prank incorrectly" + ); + + // Ensure prank is still up as startPrank covers multiple calls + victim.assertCallerAndOrigin( + sender, "msg.sender was cleaned up incorrectly", oldOrigin, "tx.origin invariant failed" + ); + + // Overwrite the prank + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin(sender, "msg.sender was not set during prank", origin, "tx.origin was not set"); + + // Ensure prank is still up as startPrank covers multiple calls + victim.assertCallerAndOrigin( + sender, "msg.sender was cleaned up incorrectly", origin, "tx.origin invariant failed" + ); + + vm.stopPrank(); + // Ensure everything is back to normal after stopPrank + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testRevertIfOverwriteUnusedPrank(address sender, address origin) public { + // Set the prank, but not use it + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + address oldOrigin = tx.origin; + Victim victim = new Victim(); + vm.startPrank(sender, origin); + // try to overwrite the prank. This should fail. + vm.expectRevert("vm.startPrank: cannot overwrite a prank until it is applied at least once"); + vm.startPrank(address(this), origin); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testRevertIfOverwriteUnusedPrankAfterSuccessfulPrank(address sender, address origin) public { + // Set the prank, but not use it + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Set the prank, but not use it + address oldOrigin = tx.origin; + vm.pvm(true); + Victim victim = new Victim(); + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was set during prank incorrectly" + ); + vm.startPrank(address(this), origin); + // try to overwrite the prank. This should fail. + vm.expectRevert("vm.startPrank: cannot overwrite a prank until it is applied at least once"); + vm.startPrank(sender, origin); + } + + function testStartPrank0AfterStartPrank1(address sender, address origin) public { + // Perform the prank + // Set the prank, but not use it + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + address oldOrigin = tx.origin; + Victim victim = new Victim(); + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + // Ensure prank is still ongoing as we haven't called stopPrank + victim.assertCallerAndOrigin( + sender, "msg.sender was cleaned up incorrectly", origin, "tx.origin was cleaned up incorrectly" + ); + + // Overwrite the prank + vm.startPrank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin was not reset correctly" + ); + + vm.stopPrank(); + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + function testPrankConstructorSender(address sender) public { + // Set the prank, but not use it + vm.assume(isNotReserved(sender)); + // Perform the prank + vm.pvm(true); + vm.prank(sender); + ConstructorVictim victim = new ConstructorVictim( + sender, "msg.sender was not set during prank", tx.origin, "tx.origin invariant failed" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", tx.origin, "tx.origin invariant failed" + ); + } + + function testPrankConstructorOrigin(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + // Perform the prank + vm.prank(sender, origin); + ConstructorVictim victim = new ConstructorVictim( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", tx.origin, "tx.origin was not cleaned up" + ); + } + + function testPrankStartStop(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + address oldOrigin = tx.origin; + + // Perform the prank + Victim victim = new Victim(); + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + victim.assertCallerAndOrigin( + sender, + "msg.sender was not set during prank (call 2)", + origin, + "tx.origin was not set during prank (call 2)" + ); + vm.stopPrank(); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin was not cleaned up" + ); + } + + function testPrankStartStopConstructor(address sender, address origin) public { + // Perform the prank + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + vm.startPrank(sender, origin); + ConstructorVictim victim = new ConstructorVictim( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + new ConstructorVictim( + sender, + "msg.sender was not set during prank (call 2)", + origin, + "tx.origin was not set during prank (call 2)" + ); + vm.stopPrank(); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", tx.origin, "tx.origin was not cleaned up" + ); + } + + /// This test checks that depth is working correctly with respect + /// to the `startPrank` and `stopPrank` cheatcodes. + /// + /// The nested pranker calls `startPrank` but does not call + /// `stopPrank` at first. + /// + /// Then, we call our victim from the main test: this call + /// should NOT have altered `msg.sender` or `tx.origin`. + /// + /// Then, the nested pranker will complete their prank: this call + /// SHOULD have altered `msg.sender` and `tx.origin`. + /// + /// Each call to the victim calls yet another victim. The expected + /// behavior for this call is that `tx.origin` is altered when + /// the nested pranker calls, otherwise not. In both cases, + /// `msg.sender` should be the address of the first victim. + /// + /// Success case: + /// + /// ┌────┐ ┌───────┐ ┌──────┐ ┌──────┐ ┌────────────┐ + /// │Test│ │Pranker│ │Vm│ │Victim│ │Inner Victim│ + /// └─┬──┘ └───┬───┘ └──┬───┘ └──┬───┘ └─────┬──────┘ + /// │ │ │ │ │ + /// │incompletePrank()│ │ │ │ + /// │────────────────>│ │ │ │ + /// │ │ │ │ │ + /// │ │startPrank()│ │ │ + /// │ │───────────>│ │ │ + /// │ │ │ │ │ + /// │ should not be pranked│ │ │ + /// │──────────────────────────────────────>│ │ + /// │ │ │ │ │ + /// │ │ │ │ should not be pranked │ + /// │ │ │ │────────────────────────>│ + /// │ │ │ │ │ + /// │ completePrank() │ │ │ │ + /// │────────────────>│ │ │ │ + /// │ │ │ │ │ + /// │ │ should be pranked │ │ + /// │ │────────────────────>│ │ + /// │ │ │ │ │ + /// │ │ │ │only tx.origin is pranked│ + /// │ │ │ │────────────────────────>│ + /// │ │ │ │ │ + /// │ │stopPrank() │ │ │ + /// │ │───────────>│ │ │ + /// │ │ │ │ │ + /// │ │should not be pranked│ │ + /// │ │────────────────────>│ │ + /// │ │ │ │ │ + /// │ │ │ │ should not be pranked │ + /// │ │ │ │────────────────────────>│ + /// ┌─┴──┐ ┌───┴───┐ ┌──┴───┐ ┌──┴───┐ ┌─────┴──────┐ + /// │Test│ │Pranker│ │Vm│ │Victim│ │Inner Victim│ + /// └────┘ └───────┘ └──────┘ └──────┘ └────────────┘ + /// If this behavior is incorrectly implemented then the victim + /// will be pranked the first time it is called. + /// + /// !!!!! Currently failing until switch back to evm is added !!!! + // function testPrankComplex(address sender, address origin) public { + // vm.assume(isNotReserved(sender)); + // vm.assume(isNotReserved(origin)); + // // Perform the prank + // address oldOrigin = tx.origin; + + // NestedPranker pranker = new NestedPranker(sender, origin); + + // vm.pvm(true); + // Victim innerVictim = new Victim(); + // NestedVictim victim = new NestedVictim(innerVictim); + + // vm.pvm(false); + // pranker.incompletePrank(); + // vm.pvm(true); + + // victim.assertCallerAndOrigin( + // address(this), + // "msg.sender was altered at an incorrect depth", + // oldOrigin, + // "tx.origin was altered at an incorrect depth" + // ); + + // pranker.completePrank(victim); + // } + + /// Checks that `tx.origin` is set for all subcalls of a `prank`. + /// + /// Ref: issue #1210 + function testTxOriginInNestedPrank(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + address oldSender = msg.sender; + address oldOrigin = tx.origin; + + Victim innerVictim = new Victim(); + NestedVictim victim = new NestedVictim(innerVictim); + + vm.prank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set correctly", origin, "tx.origin was not set correctly" + ); + } +} + +contract Issue9990 is DSTest { + Vm constant vm = Vm(address(bytes20(uint160(uint256(keccak256("hevm cheat code")))))); + + // TODO: Enable when Etch support is merged. + // function testDelegatePrank() external { + // A a = new A(); + // vm.etch(address(0x11111), hex"11"); + // vm.startPrank(address(0x11111), true); + // (bool success,) = address(a).delegatecall(abi.encodeWithSelector(A.foo.selector)); + // require(success, "MyTest: error calling foo on A"); + // vm.stopPrank(); + // } +} + +// Contracts for DELEGATECALL test case: testDelegatePrank +contract A { + function foo() external { + require(address(0x11111) == msg.sender, "wrong msg.sender in A"); + require(address(0x11111) == address(this), "wrong address(this) in A"); + B b = new B(); + (bool success,) = address(b).call(abi.encodeWithSelector(B.bar.selector)); + require(success, "A: error calling B.bar"); + } +} + +contract B { + function bar() external { + require(address(0x11111) == msg.sender, "wrong msg.sender in B"); + require(0x769A6A5f81bD725e4302751162A7cb30482A222d == address(this), "wrong address(this) in B"); + C c = new C(); + (bool success,) = address(c).delegatecall(abi.encodeWithSelector(C.bar.selector)); + require(success, "B: error calling C.bar"); + } +} + +contract C { + function bar() external view { + require(address(0x11111) == msg.sender, "wrong msg.sender in C"); + require(0x769A6A5f81bD725e4302751162A7cb30482A222d == address(this), "wrong address(this) in C"); + } +} + +contract Counter { + uint256 number; + + function increment() external { + number++; + } +} diff --git a/testdata/default/revive/TxGasPrice.t.sol b/testdata/default/revive/TxGasPrice.t.sol new file mode 100644 index 0000000000000..aa107f4c8fb9a --- /dev/null +++ b/testdata/default/revive/TxGasPrice.t.sol @@ -0,0 +1,71 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; + +contract GasPriceChecker { + function getGasPrice() public view returns (uint256) { + return tx.gasprice; + } +} + +contract TxGasPriceTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testTxGasPriceWorks() public { + // Set a new gas price + uint256 newGasPrice = 100_000_000_000; // 100 gwei + vm.txGasPrice(newGasPrice); + + // Verify the gas price was updated + assertEq(tx.gasprice, newGasPrice, "gas price should be updated"); + } + + function testTxGasPriceWorksWithZero() public { + // Set gas price to zero + vm.txGasPrice(0); + + // Verify the gas price was updated to zero + assertEq(tx.gasprice, 0, "gas price should be zero"); + } + + function testTxGasPriceWorksWithLargeValue() public { + uint256 largeGasPrice = 1_000_000_000_000_000; // 1 million gwei + vm.txGasPrice(largeGasPrice); + + // Verify the gas price was updated + assertEq(tx.gasprice, largeGasPrice, "gas price should be updated to large value"); + } + + function testTxGasPriceWorksInBothModes() public { + // Test in EVM mode + vm.pvm(false); + uint256 evmGasPrice = 50_000_000_000; // 50 gwei + vm.txGasPrice(evmGasPrice); + assertEq(tx.gasprice, evmGasPrice, "gas price should work in EVM mode"); + + // Test in PVM mode + vm.pvm(true); + uint256 pvmGasPrice = 75_000_000_000; // 75 gwei + vm.txGasPrice(pvmGasPrice); + assertEq(tx.gasprice, pvmGasPrice, "gas price should work in PVM mode"); + } + + function testTxGasPricePreservedInPvmContract() public { + // Set gas price in EVM mode + vm.pvm(false); + uint256 evmGasPrice = 50_000_000_000; // 50 gwei + vm.txGasPrice(evmGasPrice); + + // Switch to PVM mode (gas price should be preserved) + vm.pvm(true); + + // Deploy a contract in PVM mode - it should see the preserved gas price + GasPriceChecker checker = new GasPriceChecker(); + + // Call the contract - it should see the same gas price + uint256 gasPriceFromContract = checker.getGasPrice(); + assertEq(gasPriceFromContract, evmGasPrice, "gas price should be preserved in PVM contract"); + } +}