Skip to content

feat: integrate batch changes #41

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Apr 3, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

34 changes: 5 additions & 29 deletions crates/codec/src/decoding/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ impl Batch {
let num_l1_messages = b.context.num_l1_messages as usize;
let block_messages = l1_messages_buf.get(..num_l1_messages).unwrap_or(&[]);
*l1_messages_buf = l1_messages_buf.get(num_l1_messages..).unwrap_or(&[]);

block_messages
})
.collect::<Vec<_>>();
Expand Down Expand Up @@ -113,7 +114,7 @@ fn hash_chunk(
mod tests {
use crate::decoding::{test_utils::read_to_bytes, v0::decode_v0, v1::decode_v1};

use alloy_primitives::{address, b256, bytes, U256};
use alloy_primitives::b256;
use scroll_alloy_consensus::TxL1Message;

#[test]
Expand All @@ -134,35 +135,10 @@ mod tests {
// <https://etherscan.io/tx/0xdc0a315b25b46f4c1085e3884c63f8ede61e984e47655f7667e5f14e3df55f82>
let raw_calldata = read_to_bytes("./testdata/calldata_v0_with_l1_messages.bin")?;
let batch = decode_v0(&raw_calldata)?;
let l1_messages: Vec<TxL1Message> =
serde_json::from_str(&std::fs::read_to_string("./testdata/l1_messages_v0.json")?)?;

let hash = batch
.try_compute_data_hash(&[
TxL1Message {
queue_index: 39,
gas_limit: 180000,
to: address!("781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC"),
value: U256::ZERO,
sender: address!("7885BcBd5CeCEf1336b5300fb5186A12DDD8c478"),
input: bytes!("8ef1332e000000000000000000000000f1af3b23de0a5ca3cab7261cb0061c0d779a5c7b00000000000000000000000033b60d5dd260d453cac3782b0bdc01ce846721420000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002700000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e48431f5c1000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000006efdbff2a14a7c8e15944d1f4a48f9f95f663a4000000000000000000000000c451b0191351ce308fdfd779d73814c910fc5ecb000000000000000000000000c451b0191351ce308fdfd779d73814c910fc5ecb00000000000000000000000000000000000000000000000000000005d21dba0000000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
},
TxL1Message {
queue_index: 40,
gas_limit: 168000,
to: address!("781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC"),
value: U256::ZERO,
sender: address!("7885BcBd5CeCEf1336b5300fb5186A12DDD8c478"),
input: bytes!("8ef1332e0000000000000000000000007f2b8c31f88b6006c382775eea88297ec1e3e9050000000000000000000000006ea73e05adc79974b931123675ea8f78ffdacdf00000000000000000000000000000000000000000000000000011c37937e08000000000000000000000000000000000000000000000000000000000000000002800000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e8748000000000000000000000000b89db2813541287a4dd1fc6801eec30595ecdc6c000000000000000000000000b89db2813541287a4dd1fc6801eec30595ecdc6c0000000000000000000000000000000000000000000000000011c37937e080000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
},
TxL1Message {
queue_index: 41,
gas_limit: 168000,
to: address!("781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC"),
value: U256::ZERO,
sender: address!("7885BcBd5CeCEf1336b5300fb5186A12DDD8c478"),
input: bytes!("8ef1332e0000000000000000000000007f2b8c31f88b6006c382775eea88297ec1e3e9050000000000000000000000006ea73e05adc79974b931123675ea8f78ffdacdf0000000000000000000000000000000000000000000000000002386f26fc10000000000000000000000000000000000000000000000000000000000000000002900000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e87480000000000000000000000003219c394111d45757ccb68a4fd353b4f7f9660960000000000000000000000003219c394111d45757ccb68a4fd353b4f7f966096000000000000000000000000000000000000000000000000002386f26fc100000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
},
])
.unwrap();
let hash = batch.try_compute_data_hash(&l1_messages).unwrap();

assert_eq!(hash, b256!("55fd647c58461d910b5bfb4539f2177ba575c9c8d578a344558976a4375cc287"));

Expand Down
4 changes: 4 additions & 0 deletions crates/codec/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use alloy_primitives::U256;

/// An error occurring during the codec process.
#[derive(Debug, thiserror::Error)]
pub enum CodecError {
Expand All @@ -13,6 +15,8 @@ pub enum DecodingError {
MissingCodecVersion,
#[error("unsupported codec version {0}")]
UnsupportedCodecVersion(u8),
#[error("malformed codec version: {0}")]
MalformedCodecVersion(U256),
#[error("missing blob from data source")]
MissingBlob,
#[error("missing chunk data")]
Expand Down
26 changes: 23 additions & 3 deletions crates/codec/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use crate::decoding::{
};

use alloy_eips::eip4844::Blob;
use alloy_primitives::Bytes;
use alloy_primitives::{ruint::UintTryTo, Bytes, U256};

/// The Codec.
#[derive(Debug)]
Expand Down Expand Up @@ -43,7 +43,7 @@ impl Codec {
/// Decodes the input data and returns the decoded [`Batch`].
pub fn decode<T: CommitDataSource>(input: &T) -> Result<Batch, CodecError> {
let calldata = input.calldata();
let version = calldata.first().ok_or(DecodingError::MissingCodecVersion)?;
let version = get_codec_version(calldata)?;

let payload = match version {
0 => decode_v0(calldata)?,
Expand All @@ -63,7 +63,7 @@ impl Codec {
let blob = input.blob().ok_or(DecodingError::MissingBlob)?;
decode_v7(blob.as_ref())?
}
v => return Err(DecodingError::UnsupportedCodecVersion(*v).into()),
v => return Err(DecodingError::UnsupportedCodecVersion(v).into()),
};

Ok(payload)
Expand All @@ -77,3 +77,23 @@ pub trait CommitDataSource {
/// Returns the blob for decoding.
fn blob(&self) -> Option<&Blob>;
}

/// Returns the codec version from the calldata.
fn get_codec_version(calldata: &[u8]) -> Result<u8, DecodingError> {
const CODEC_VERSION_OFFSET_START: usize = 4;
const CODEC_VERSION_LEN: usize = 32;
const CODEC_VERSION_OFFSET_END: usize = CODEC_VERSION_OFFSET_START + CODEC_VERSION_LEN;
const HIGH_BYTES_MASK: U256 =
U256::from_limbs([u64::MAX, u64::MAX, u64::MAX, 0xffffffffffffff00]);

let version = calldata
.get(CODEC_VERSION_OFFSET_START..CODEC_VERSION_OFFSET_END)
.ok_or(DecodingError::Eof)?;
let version = U256::from_be_slice(version);

if (version & HIGH_BYTES_MASK) != U256::ZERO {
return Err(DecodingError::MalformedCodecVersion(version))
}

Ok(version.uint_try_to().expect("fits in single byte"))
}
26 changes: 26 additions & 0 deletions crates/codec/testdata/l1_messages_v0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
[
{
"queueIndex": "39",
"gas": "180000",
"to": "0x781e90f1c8fc4611c9b7497c3b47f99ef6969cbc",
"value": "0x0",
"sender": "0x7885bcbd5cecef1336b5300fb5186a12ddd8c478",
"input": "8ef1332e000000000000000000000000f1af3b23de0a5ca3cab7261cb0061c0d779a5c7b00000000000000000000000033b60d5dd260d453cac3782b0bdc01ce846721420000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002700000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e48431f5c1000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000006efdbff2a14a7c8e15944d1f4a48f9f95f663a4000000000000000000000000c451b0191351ce308fdfd779d73814c910fc5ecb000000000000000000000000c451b0191351ce308fdfd779d73814c910fc5ecb00000000000000000000000000000000000000000000000000000005d21dba0000000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
{
"queueIndex": "40",
"gas": "168000",
"to": "0x781e90f1c8fc4611c9b7497c3b47f99ef6969cbc",
"value": "0x0",
"sender": "0x7885bcbd5cecef1336b5300fb5186a12ddd8c478",
"input": "8ef1332e0000000000000000000000007f2b8c31f88b6006c382775eea88297ec1e3e9050000000000000000000000006ea73e05adc79974b931123675ea8f78ffdacdf00000000000000000000000000000000000000000000000000011c37937e08000000000000000000000000000000000000000000000000000000000000000002800000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e8748000000000000000000000000b89db2813541287a4dd1fc6801eec30595ecdc6c000000000000000000000000b89db2813541287a4dd1fc6801eec30595ecdc6c0000000000000000000000000000000000000000000000000011c37937e080000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
{
"queueIndex": "41",
"gas": "168000",
"to": "0x781e90f1c8fc4611c9b7497c3b47f99ef6969cbc",
"value": "0x0",
"sender": "0x7885bcbd5cecef1336b5300fb5186a12ddd8c478",
"input": "8ef1332e0000000000000000000000007f2b8c31f88b6006c382775eea88297ec1e3e9050000000000000000000000006ea73e05adc79974b931123675ea8f78ffdacdf0000000000000000000000000000000000000000000000000002386f26fc10000000000000000000000000000000000000000000000000000000000000000002900000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e87480000000000000000000000003219c394111d45757ccb68a4fd353b4f7f9660960000000000000000000000003219c394111d45757ccb68a4fd353b4f7f966096000000000000000000000000000000000000000000000000002386f26fc100000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
}
]
28 changes: 8 additions & 20 deletions crates/database/db/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,15 +46,14 @@ impl From<DatabaseConnection> for Database {
#[cfg(test)]
mod test {
use crate::{operations::DatabaseOperations, test_utils::setup_test_db};

use arbitrary::{Arbitrary, Unstructured};
use futures::StreamExt;
use rand::Rng;
use rollup_node_primitives::{
BatchInput, BatchInputV1, BatchInputV2, L1MessageWithBlockNumber,
};
use rollup_node_primitives::{BatchCommitData, L1MessageWithBlockNumber};

#[tokio::test]
async fn test_database_round_trip_batch_input() {
async fn test_database_round_trip_batch_commit() {
// Set up the test database.
let db = setup_test_db().await;

Expand All @@ -64,24 +63,13 @@ mod test {
let mut u = Unstructured::new(&bytes);

// Generate a random BatchInputV1.
let batch_input_v1 = BatchInputV1::arbitrary(&mut u).unwrap();
let batch_input = BatchInput::BatchInputDataV1(batch_input_v1);

// Round trip the BatchInput through the database.
db.insert_batch_input(batch_input.clone()).await.unwrap();
let batch_input_from_db =
db.get_batch_input_by_batch_index(batch_input.batch_index()).await.unwrap().unwrap();
assert_eq!(batch_input, batch_input_from_db);

// Generate a random BatchInputV2.
let batch_input_v2 = BatchInputV2::arbitrary(&mut u).unwrap();
let batch_input = BatchInput::BatchInputDataV2(batch_input_v2);
let batch_commit = BatchCommitData::arbitrary(&mut u).unwrap();

// Round trip the BatchInput through the database.
db.insert_batch_input(batch_input.clone()).await.unwrap();
let batch_input_from_db =
db.get_batch_input_by_batch_index(batch_input.batch_index()).await.unwrap().unwrap();
assert_eq!(batch_input, batch_input_from_db);
db.insert_batch(batch_commit.clone()).await.unwrap();
let batch_commit_from_db =
db.get_batch_by_index(batch_commit.index).await.unwrap().unwrap();
assert_eq!(batch_commit, batch_commit_from_db);
}

#[tokio::test]
Expand Down
55 changes: 55 additions & 0 deletions crates/database/db/src/models/batch_commit.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
use std::sync::Arc;

use rollup_node_primitives::BatchCommitData;
use sea_orm::{entity::prelude::*, ActiveValue};

/// A database model that represents a batch input.
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "batch_commit")]
pub struct Model {
#[sea_orm(primary_key)]
index: i64,
hash: Vec<u8>,
block_number: i64,
calldata: Vec<u8>,
blob_hash: Option<Vec<u8>>,
finalized_block_number: Option<i64>,
}

/// The relation for the batch input model.
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}

/// The active model behavior for the batch input model.
impl ActiveModelBehavior for ActiveModel {}

impl From<BatchCommitData> for ActiveModel {
fn from(batch_commit: BatchCommitData) -> Self {
Self {
index: ActiveValue::Set(
batch_commit.index.try_into().expect("index should fit in i64"),
),
hash: ActiveValue::Set(batch_commit.hash.to_vec()),
block_number: ActiveValue::Set(
batch_commit.block_number.try_into().expect("block number should fit in i64"),
),
calldata: ActiveValue::Set(batch_commit.calldata.0.to_vec()),
blob_hash: ActiveValue::Set(batch_commit.blob_versioned_hash.map(|b| b.to_vec())),
finalized_block_number: ActiveValue::Unchanged(None),
}
}
}

impl From<Model> for BatchCommitData {
fn from(value: Model) -> Self {
Self {
hash: value.hash.as_slice().try_into().expect("data persisted in database is valid"),
index: value.index as u64,
block_number: value.block_number as u64,
calldata: Arc::new(value.calldata.into()),
blob_versioned_hash: value
.blob_hash
.map(|b| b.as_slice().try_into().expect("data persisted in database is valid")),
}
}
}
105 changes: 0 additions & 105 deletions crates/database/db/src/models/batch_input.rs

This file was deleted.

4 changes: 2 additions & 2 deletions crates/database/db/src/models/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/// This module contains the batch input database model.
pub mod batch_input;
/// This module contains the batch commit database model.
pub mod batch_commit;

/// This module contains the L1 message database model.
pub mod l1_message;
Loading
Loading