Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion crates/bin/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ workspace = true
[dependencies]
anyhow.workspace = true
clap = { workspace = true, features = ["derive"] }
futures.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
serde_path_to_error.workspace = true
Expand Down
31 changes: 17 additions & 14 deletions crates/bin/src/commands/run_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ pub struct RunFileCommand {
#[cfg(feature = "scroll")]
#[arg(short, long)]
chunk_mode: bool,
#[cfg(feature = "scroll")]
prev_msg_queue_hash: Option<sbv::primitives::B256>,
}

impl RunFileCommand {
Expand Down Expand Up @@ -42,13 +44,13 @@ impl RunFileCommand {
fn run_chunk(self) -> anyhow::Result<()> {
use anyhow::bail;
use sbv::{
core::{ChunkInfo, EvmDatabase, EvmExecutor},
core::{EvmDatabase, EvmExecutor},
kv::{nohash::NoHashMap, null::NullProvider},
primitives::{
BlockWitness as _,
chainspec::{Chain, get_chain_spec},
ext::{BlockWitnessChunkExt, BlockWitnessExt, TxBytesHashExt},
types::BlockWitness,
ext::{BlockWitnessChunkExt, BlockWitnessExt},
types::{BlockWitness, ChunkInfoBuilder},
},
trie::BlockWitnessTrieExt,
};
Expand All @@ -71,18 +73,23 @@ impl RunFileCommand {
.iter()
.map(|w| w.build_reth_block())
.collect::<Result<Vec<_>, _>>()?;
let chunk_info =
ChunkInfo::from_blocks(witnesses[0].chain_id, witnesses[0].pre_state_root, &blocks);

let chain_spec = get_chain_spec(Chain::from_id(chunk_info.chain_id())).unwrap();
let chain_id = witnesses[0].chain_id;
let chain_spec = get_chain_spec(Chain::from_id(chain_id)).unwrap();

let mut chunk_info_builder = ChunkInfoBuilder::new(&chain_spec, &blocks);
if let Some(prev_msg_queue_hash) = self.prev_msg_queue_hash {
chunk_info_builder.prev_msg_queue_hash(prev_msg_queue_hash);
}

let mut code_db = NoHashMap::default();
witnesses.import_codes(&mut code_db);
let mut nodes_provider = NoHashMap::default();
witnesses.import_nodes(&mut nodes_provider)?;

let mut db = EvmDatabase::new_from_root(
&code_db,
chunk_info.prev_state_root(),
chunk_info_builder.get_prev_state_root(),
&nodes_provider,
&NullProvider,
)?;
Expand All @@ -91,16 +98,12 @@ impl RunFileCommand {
db.update(&nodes_provider, output.state.state.iter())?;
}
let post_state_root = db.commit_changes();
if post_state_root != chunk_info.post_state_root() {
if post_state_root != chunk_info_builder.get_post_state_root() {
bail!("post state root mismatch");
}

let withdraw_root = db.withdraw_root()?;
let tx_bytes_hash = blocks
.iter()
.flat_map(|b| b.body().transactions.iter())
.tx_bytes_hash();
let _public_input_hash = chunk_info.public_input_hash(&withdraw_root, &tx_bytes_hash);
let chunk_info = chunk_info_builder.build(db.withdraw_root()?);
let _public_input_hash = chunk_info.pi_hash();
dev_info!("[chunk mode] public input hash: {_public_input_hash:?}");

Ok(())
Expand Down
6 changes: 1 addition & 5 deletions crates/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,8 @@ reth-evm-ethereum.workspace = true
reth-execution-types.workspace = true
reth-scroll-evm = { workspace = true, optional = true }
reth-storage-errors.workspace = true
serde.workspace = true
serde_json.workspace = true

thiserror.workspace = true
tiny-keccak.workspace = true

sbv-primitives.workspace = true
sbv-helpers.workspace = true
Expand All @@ -30,8 +28,6 @@ sbv-trie.workspace = true

[dev-dependencies]
ctor.workspace = true
serde.workspace = true
serde_json.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true

Expand Down
119 changes: 0 additions & 119 deletions crates/core/src/chunk.rs
Original file line number Diff line number Diff line change
@@ -1,119 +0,0 @@
use sbv_primitives::{B256, BlockChunkExt, RecoveredBlock, types::reth::Block};
use tiny_keccak::{Hasher, Keccak};

/// A chunk is a set of continuous blocks.
/// ChunkInfo is metadata of chunk, with following fields:
/// - state root before this chunk
/// - state root after this chunk
/// - the withdraw root after this chunk
/// - the data hash of this chunk
/// - the tx data hash of this chunk
/// - flattened L2 tx bytes hash
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct ChunkInfo {
chain_id: u64,
prev_state_root: B256,
post_state_root: B256,
data_hash: B256,
}

impl ChunkInfo {
/// Construct by block traces
#[must_use]
pub fn from_blocks(
chain_id: u64,
prev_state_root: B256,
blocks: &[RecoveredBlock<Block>],
) -> Self {
let last_block = blocks.last().expect("at least one block");

let data_hash = cycle_track!(
{
let mut data_hasher = Keccak::v256();
for block in blocks.iter() {
block.hash_da_header(&mut data_hasher);
}
for block in blocks.iter() {
block.hash_l1_msg(&mut data_hasher);
}
let mut data_hash = B256::ZERO;
data_hasher.finalize(&mut data_hash.0);
data_hash
},
"Keccak::v256"
);

ChunkInfo {
chain_id,
prev_state_root,
post_state_root: last_block.state_root,
data_hash,
}
}

/// Public input hash for a given chunk is defined as
/// keccak(
/// chain id ||
/// prev state root ||
/// post state root ||
/// withdraw root ||
/// chunk data hash ||
/// chunk txdata hash
/// )
pub fn public_input_hash(&self, withdraw_root: &B256, tx_bytes_hash: &B256) -> B256 {
let mut hasher = Keccak::v256();

hasher.update(&self.chain_id.to_be_bytes());
hasher.update(self.prev_state_root.as_ref());
hasher.update(self.post_state_root.as_slice());
hasher.update(withdraw_root.as_slice());
hasher.update(self.data_hash.as_slice());
hasher.update(tx_bytes_hash.as_slice());

let mut public_input_hash = B256::ZERO;
hasher.finalize(&mut public_input_hash.0);
public_input_hash
}

/// Chain ID of this chunk
pub fn chain_id(&self) -> u64 {
self.chain_id
}

/// State root before this chunk
pub fn prev_state_root(&self) -> B256 {
self.prev_state_root
}

/// State root after this chunk
pub fn post_state_root(&self) -> B256 {
self.post_state_root
}

/// Data hash of this chunk
pub fn data_hash(&self) -> B256 {
self.data_hash
}
}

#[cfg(test)]
mod tests {
use super::*;
use sbv_primitives::{BlockWitness as _, RecoveredBlock, types::BlockWitness};

const TRACES_STR: [&str; 4] = [
include_str!("../../../testdata/holesky_witness/2971844.json"),
include_str!("../../../testdata/holesky_witness/2971845.json"),
include_str!("../../../testdata/holesky_witness/2971846.json"),
include_str!("../../../testdata/holesky_witness/2971847.json"),
];

#[test]
fn test_public_input_hash() {
let witnesses: [BlockWitness; 4] = TRACES_STR.map(|s| serde_json::from_str(s).unwrap());
let blocks: [RecoveredBlock<Block>; 4] =
witnesses.clone().map(|s| s.build_reth_block().unwrap());

let _ = ChunkInfo::from_blocks(1, witnesses[0].pre_state_root, &blocks);
}
}
5 changes: 0 additions & 5 deletions crates/core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,6 @@
extern crate sbv_helpers;
extern crate core;

#[cfg(feature = "scroll")]
mod chunk;
#[cfg(feature = "scroll")]
pub use chunk::ChunkInfo;

mod database;
pub use database::{DatabaseError, DatabaseRef, EvmDatabase};

Expand Down
1 change: 1 addition & 0 deletions crates/primitives/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ scroll = [
"dep:scroll-alloy-rpc-types",
"dep:scroll-alloy-network",
"reth-scroll-primitives/serde",
"reth-scroll-primitives/scroll",
"revm/scroll-default-handler",
"revm/optional_no_base_fee",
]
Expand Down
12 changes: 7 additions & 5 deletions crates/primitives/src/ext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,31 +95,33 @@ impl<T: BlockWitness> BlockWitnessExt for [T] {
#[cfg(feature = "scroll")]
pub trait TxBytesHashExt {
/// Hash the transaction bytes.
fn tx_bytes_hash(self) -> B256;
fn tx_bytes_hash(self) -> (usize, B256);

/// Hash the transaction bytes.
fn tx_bytes_hash_in(self, rlp_buffer: &mut Vec<u8>) -> B256;
fn tx_bytes_hash_in(self, rlp_buffer: &mut Vec<u8>) -> (usize, B256);
}

#[cfg(feature = "scroll")]
impl<'a, I: IntoIterator<Item = &'a Tx>, Tx: alloy_eips::eip2718::Encodable2718 + 'a> TxBytesHashExt
for I
{
fn tx_bytes_hash(self) -> B256 {
fn tx_bytes_hash(self) -> (usize, B256) {
let mut rlp_buffer = Vec::new();
self.tx_bytes_hash_in(&mut rlp_buffer)
}

fn tx_bytes_hash_in(self, rlp_buffer: &mut Vec<u8>) -> B256 {
fn tx_bytes_hash_in(self, rlp_buffer: &mut Vec<u8>) -> (usize, B256) {
use tiny_keccak::{Hasher, Keccak};
let mut tx_bytes_hasher = Keccak::v256();
let mut len = 0;
for tx in self.into_iter() {
tx.encode_2718(rlp_buffer);
len += rlp_buffer.len();
tx_bytes_hasher.update(rlp_buffer);
rlp_buffer.clear();
}
let mut tx_bytes_hash = B256::ZERO;
tx_bytes_hasher.finalize(&mut tx_bytes_hash.0);
tx_bytes_hash
(len, tx_bytes_hash)
}
}
65 changes: 57 additions & 8 deletions crates/primitives/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,33 +200,82 @@ pub trait Withdrawal: fmt::Debug {
}

/// Chunk related extension methods for Block
/// FIXME: gate this behind scroll feature
#[cfg(feature = "scroll")]
pub trait BlockChunkExt {
/// Hash the header of the block
fn hash_da_header(&self, hasher: &mut impl tiny_keccak::Hasher);
fn legacy_hash_da_header(&self, hasher: &mut impl tiny_keccak::Hasher);
/// Hash the l1 messages of the block
fn legacy_hash_l1_msg(&self, hasher: &mut impl tiny_keccak::Hasher);
/// Hash the l1 messages of the block
fn hash_l1_msg(&self, hasher: &mut impl tiny_keccak::Hasher);
fn hash_msg_queue(&self, initial_queue_hash: &B256) -> B256;
/// Number of L1 msg txs in the block
fn num_l1_msgs(&self) -> usize;
}

#[cfg(feature = "scroll")]
impl BlockChunkExt for RecoveredBlock<types::reth::Block> {
#[inline]
fn hash_da_header(&self, hasher: &mut impl tiny_keccak::Hasher) {
fn legacy_hash_da_header(&self, hasher: &mut impl tiny_keccak::Hasher) {
hasher.update(&self.number.to_be_bytes());
hasher.update(&self.timestamp.to_be_bytes());
hasher.update(
&U256::from_limbs([self.base_fee_per_gas.unwrap_or_default(), 0, 0, 0])
.to_be_bytes::<{ U256::BYTES }>(),
);
hasher.update(&self.gas_limit.to_be_bytes());
hasher.update(&(self.body().transactions.len() as u16).to_be_bytes()); // FIXME: l1 tx could be skipped, the actual tx count needs to be calculated
// FIXME: l1 tx could be skipped, the actual tx count needs to be calculated
hasher.update(&(self.body().transactions.len() as u16).to_be_bytes());
}

#[inline]
fn hash_l1_msg(&self, hasher: &mut impl tiny_keccak::Hasher) {
fn legacy_hash_l1_msg(&self, hasher: &mut impl tiny_keccak::Hasher) {
use reth_primitives_traits::SignedTransaction;
use types::consensus::Typed2718;
for tx in self.body().transactions.iter().filter(|tx| tx.ty() == 0x7e) {
for tx in self
.body()
.transactions
.iter()
.filter(|tx| tx.is_l1_message())
{
hasher.update(tx.tx_hash().as_slice())
}
}

#[inline]
fn hash_msg_queue(&self, initial_queue_hash: &B256) -> B256 {
use reth_primitives_traits::SignedTransaction;
use tiny_keccak::Hasher;

let mut rolling_hash = *initial_queue_hash;
for tx in self
.body()
.transactions
.iter()
.filter(|tx| tx.is_l1_message())
{
let mut hasher = tiny_keccak::Keccak::v256();
hasher.update(rolling_hash.as_slice());
hasher.update(tx.tx_hash().as_slice());

hasher.finalize(rolling_hash.as_mut_slice());

// clear last 32 bits, i.e. 4 bytes.
// https://github.com/scroll-tech/da-codec/blob/26dc8d575244560611548fada6a3a2745c60fe83/encoding/da.go#L817-L825
// see also https://github.com/scroll-tech/da-codec/pull/42
rolling_hash.0[28] = 0;
rolling_hash.0[29] = 0;
rolling_hash.0[30] = 0;
rolling_hash.0[31] = 0;
}

rolling_hash
}

#[inline]
fn num_l1_msgs(&self) -> usize {
self.body()
.transactions
.iter()
.filter(|tx| tx.is_l1_message())
.count()
}
}
Loading