Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Lazy wasm pt4 #11491

Open
wants to merge 38 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 34 commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
14af224
grumpkin math async
Thunkar Jan 23, 2025
4992d11
fmt
Thunkar Jan 23, 2025
b3f696c
Merge branch 'master' into gj/lazy_wasm_pt3
Thunkar Jan 23, 2025
36c38dd
wip
Thunkar Jan 23, 2025
ecaf467
fmt
Thunkar Jan 23, 2025
0001174
Merge branch 'gj/lazy_wasm_pt3' of github.com:AztecProtocol/aztec-pac…
Thunkar Jan 23, 2025
db661a6
wip
Thunkar Jan 23, 2025
a9754a1
Merge branch 'master' of github.com:AztecProtocol/aztec-packages into…
Thunkar Jan 23, 2025
72299dc
wip
Thunkar Jan 23, 2025
a5df41d
don't mess with the trees
Thunkar Jan 23, 2025
8e9655a
kill me
Thunkar Jan 23, 2025
54a7035
maybe
Thunkar Jan 23, 2025
727d4db
fmt and fixes
Thunkar Jan 24, 2025
8a01427
fixing tests
Thunkar Jan 24, 2025
17b5a9e
fixes
Thunkar Jan 24, 2025
5a15198
Merge branch 'master' of github.com:AztecProtocol/aztec-packages into…
Thunkar Jan 24, 2025
f441fce
more test fixes
Thunkar Jan 24, 2025
8531c7f
Merge branch 'master' of github.com:AztecProtocol/aztec-packages into…
Thunkar Jan 24, 2025
50633b5
fixes
Thunkar Jan 24, 2025
f5a0d2d
fixes
Thunkar Jan 24, 2025
337c0c9
more fixes
Thunkar Jan 24, 2025
92f1400
more fixes
Thunkar Jan 24, 2025
12e1f7a
wip
Thunkar Jan 24, 2025
bb55e12
fix
Thunkar Jan 24, 2025
5dbd1fd
fixed stats
Thunkar Jan 24, 2025
1ad7cfa
more fixes
Thunkar Jan 26, 2025
0dc360d
Merge branch 'master' of github.com:AztecProtocol/aztec-packages into…
Thunkar Jan 26, 2025
bbaf649
fix
Thunkar Jan 26, 2025
dd53866
fixes
Thunkar Jan 26, 2025
a2c56ec
fix e2e
Thunkar Jan 26, 2025
156bf34
fixed broadcasting functions
Thunkar Jan 26, 2025
dfb4167
cleanup
Thunkar Jan 26, 2025
5d44545
more e2e
Thunkar Jan 26, 2025
9da825c
restored completeaddress validation
Thunkar Jan 26, 2025
f7994be
lazy everything
Thunkar Jan 27, 2025
8e4fcf3
better api
Thunkar Jan 27, 2025
b9d9655
simplified API
Thunkar Jan 27, 2025
b0d2b80
minor fixes
Thunkar Jan 27, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
8 changes: 4 additions & 4 deletions barretenberg/ts/src/barretenberg/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ export class BarretenbergSync extends BarretenbergApiSync {
}
}

let barrentenbergLazySingleton: BarretenbergLazy;
let barrentenbergLazySingletonPromise: Promise<BarretenbergLazy>;

export class BarretenbergLazy extends BarretenbergApi {
private constructor(wasm: BarretenbergWasmMain) {
Expand All @@ -138,10 +138,10 @@ export class BarretenbergLazy extends BarretenbergApi {
}

static async getSingleton() {
if (!barrentenbergLazySingleton) {
barrentenbergLazySingleton = await BarretenbergLazy.new();
if (!barrentenbergLazySingletonPromise) {
barrentenbergLazySingletonPromise = BarretenbergLazy.new();
}
return barrentenbergLazySingleton;
return await barrentenbergLazySingletonPromise;
}

getWasm() {
Expand Down
12 changes: 12 additions & 0 deletions barretenberg/ts/src/barretenberg_api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,18 @@ export class BarretenbergApi {
return out[0];
}

async poseidon2HashAccumulate(inputsBuffer: Fr[]): Promise<Fr> {
const inArgs = [inputsBuffer].map(serializeBufferable);
const outTypes: OutputType[] = [Fr];
const result = await this.wasm.callWasmExport(
'poseidon2_hash_accumulate',
inArgs,
outTypes.map(t => t.SIZE_IN_BYTES),
);
const out = result.map((r, i) => outTypes[i].fromBuffer(r));
return out[0];
}

async poseidon2Hashes(inputsBuffer: Fr[]): Promise<Fr> {
const inArgs = [inputsBuffer].map(serializeBufferable);
const outTypes: OutputType[] = [Fr];
Expand Down
18 changes: 9 additions & 9 deletions yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ describe('Archiver', () => {
(b, i) =>
(b.header.globalVariables.timestamp = new Fr(now + DefaultL1ContractsConfig.ethereumSlotDuration * (i + 1))),
);
const rollupTxs = blocks.map(makeRollupTx);
const rollupTxs = await Promise.all(blocks.map(makeRollupTx));

publicClient.getBlockNumber.mockResolvedValueOnce(2500n).mockResolvedValueOnce(2600n).mockResolvedValueOnce(2700n);

Expand Down Expand Up @@ -274,7 +274,7 @@ describe('Archiver', () => {

const numL2BlocksInTest = 2;

const rollupTxs = blocks.map(makeRollupTx);
const rollupTxs = await Promise.all(blocks.map(makeRollupTx));

// Here we set the current L1 block number to 102. L1 to L2 messages after this should not be read.
publicClient.getBlockNumber.mockResolvedValue(102n);
Expand Down Expand Up @@ -315,7 +315,7 @@ describe('Archiver', () => {

const numL2BlocksInTest = 2;

const rollupTxs = blocks.map(makeRollupTx);
const rollupTxs = await Promise.all(blocks.map(makeRollupTx));

publicClient.getBlockNumber.mockResolvedValueOnce(50n).mockResolvedValueOnce(100n);
mockRollup.read.status
Expand Down Expand Up @@ -350,7 +350,7 @@ describe('Archiver', () => {

const numL2BlocksInTest = 2;

const rollupTxs = blocks.map(makeRollupTx);
const rollupTxs = await Promise.all(blocks.map(makeRollupTx));

publicClient.getBlockNumber.mockResolvedValueOnce(50n).mockResolvedValueOnce(100n).mockResolvedValueOnce(150n);

Expand Down Expand Up @@ -419,7 +419,7 @@ describe('Archiver', () => {
l2Block.header.globalVariables.slotNumber = new Fr(notLastL2SlotInEpoch);
blocks = [l2Block];

const rollupTxs = blocks.map(makeRollupTx);
const rollupTxs = await Promise.all(blocks.map(makeRollupTx));
publicClient.getBlockNumber.mockResolvedValueOnce(l1BlockForL2Block);
mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]);
makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString());
Expand Down Expand Up @@ -450,7 +450,7 @@ describe('Archiver', () => {
l2Block.header.globalVariables.slotNumber = new Fr(lastL2SlotInEpoch);
blocks = [l2Block];

const rollupTxs = blocks.map(makeRollupTx);
const rollupTxs = await Promise.all(blocks.map(makeRollupTx));
publicClient.getBlockNumber.mockResolvedValueOnce(l1BlockForL2Block);
mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]);
makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString());
Expand Down Expand Up @@ -539,12 +539,12 @@ describe('Archiver', () => {
* @param block - The L2Block.
* @returns A fake tx with calldata that corresponds to calling process in the Rollup contract.
*/
function makeRollupTx(l2Block: L2Block) {
async function makeRollupTx(l2Block: L2Block) {
const header = toHex(l2Block.header.toBuffer());
const body = toHex(l2Block.body.toBuffer());
const blobInput = Blob.getEthBlobEvaluationInputs(Blob.getBlobs(l2Block.body.toBlobFields()));
const blobInput = Blob.getEthBlobEvaluationInputs(await Blob.getBlobs(l2Block.body.toBlobFields()));
const archive = toHex(l2Block.archive.root.toBuffer());
const blockHash = toHex(l2Block.header.hash().toBuffer());
const blockHash = toHex((await l2Block.header.hash()).toBuffer());
const input = encodeFunctionData({
abi: RollupAbi,
functionName: 'propose',
Expand Down
39 changes: 25 additions & 14 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -794,7 +794,7 @@ export class Archiver implements ArchiveSource, Traceable {
async addContractClass(contractClass: ContractClassPublic): Promise<void> {
await this.store.addContractClasses(
[contractClass],
[computePublicBytecodeCommitment(contractClass.packedBytecode)],
[await computePublicBytecodeCommitment(contractClass.packedBytecode)],
0,
);
return;
Expand Down Expand Up @@ -829,10 +829,13 @@ export class Archiver implements ArchiveSource, Traceable {
);
}

const latestBlockHeaderHash = await latestBlockHeader?.hash();
const provenBlockHeaderHash = await provenBlockHeader?.hash();
const finalizedBlockHeaderHash = await provenBlockHeader?.hash();
return {
latest: { number: latestBlockNumber, hash: latestBlockHeader?.hash().toString() } as L2BlockId,
proven: { number: provenBlockNumber, hash: provenBlockHeader?.hash().toString() } as L2BlockId,
finalized: { number: provenBlockNumber, hash: provenBlockHeader?.hash().toString() } as L2BlockId,
latest: { number: latestBlockNumber, hash: latestBlockHeaderHash?.toString() } as L2BlockId,
proven: { number: provenBlockNumber, hash: provenBlockHeaderHash?.toString() } as L2BlockId,
finalized: { number: provenBlockNumber, hash: finalizedBlockHeaderHash?.toString() } as L2BlockId,
};
}
}
Expand Down Expand Up @@ -881,19 +884,19 @@ class ArchiverStoreHelper
* @param allLogs - All logs emitted in a bunch of blocks.
*/
async #updateRegisteredContractClasses(allLogs: UnencryptedL2Log[], blockNum: number, operation: Operation) {
const contractClasses = allLogs
const contractClassRegisteredEvents = allLogs
.filter(log => ContractClassRegisteredEvent.isContractClassRegisteredEvent(log.data))
.map(log => ContractClassRegisteredEvent.fromLog(log.data))
.map(e => e.toContractClassPublic());
.map(log => ContractClassRegisteredEvent.fromLog(log.data));

const contractClasses = await Promise.all(contractClassRegisteredEvents.map(e => e.toContractClassPublic()));
if (contractClasses.length > 0) {
contractClasses.forEach(c => this.#log.verbose(`${Operation[operation]} contract class ${c.id.toString()}`));
if (operation == Operation.Store) {
// TODO: Will probably want to create some worker threads to compute these bytecode commitments as they are expensive
return await this.store.addContractClasses(
contractClasses,
contractClasses.map(x => computePublicBytecodeCommitment(x.packedBytecode)),
blockNum,
const commitments = await Promise.all(
contractClasses.map(c => computePublicBytecodeCommitment(c.packedBytecode)),
);
return await this.store.addContractClasses(contractClasses, commitments, blockNum);
} else if (operation == Operation.Delete) {
return await this.store.deleteContractClasses(contractClasses, blockNum);
}
Expand Down Expand Up @@ -961,10 +964,18 @@ class ArchiverStoreHelper
const unconstrainedFns = allFns.filter(
(fn): fn is UnconstrainedFunctionWithMembershipProof => 'privateFunctionsArtifactTreeRoot' in fn,
);
const validPrivateFns = privateFns.filter(fn => isValidPrivateFunctionMembershipProof(fn, contractClass));
const validUnconstrainedFns = unconstrainedFns.filter(fn =>
isValidUnconstrainedFunctionMembershipProof(fn, contractClass),

const privateFunctionsWithValidity = await Promise.all(
privateFns.map(async fn => ({ fn, valid: await isValidPrivateFunctionMembershipProof(fn, contractClass) })),
);
const validPrivateFns = privateFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn);
const unconstrainedFunctionsWithValidity = await Promise.all(
unconstrainedFns.map(async fn => ({
fn,
valid: await isValidUnconstrainedFunctionMembershipProof(fn, contractClass),
})),
);
const validUnconstrainedFns = unconstrainedFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn);
const validFnCount = validPrivateFns.length + validUnconstrainedFns.length;
if (validFnCount !== allFns.length) {
this.#log.warn(`Skipping ${allFns.length - validFnCount} invalid functions`);
Expand Down
10 changes: 5 additions & 5 deletions yarn-project/archiver/src/archiver/archiver_store_test_suite.ts
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
() => wrapInBlock(blocks[5].data.body.txEffects[2], blocks[5].data),
() => wrapInBlock(blocks[1].data.body.txEffects[0], blocks[1].data),
])('retrieves a previously stored transaction', async getExpectedTx => {
const expectedTx = getExpectedTx();
const expectedTx = await getExpectedTx();
const actualTx = await store.getTxEffect(expectedTx.data.txHash);
expect(actualTx).toEqual(expectedTx);
});
Expand All @@ -227,7 +227,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
])('tries to retrieves a previously stored transaction after deleted', async getExpectedTx => {
await store.unwindBlocks(blocks.length, blocks.length);

const expectedTx = getExpectedTx();
const expectedTx = await getExpectedTx();
const actualTx = await store.getTxEffect(expectedTx.data.txHash);
expect(actualTx).toEqual(undefined);
});
Expand Down Expand Up @@ -300,10 +300,10 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
const blockNum = 10;

beforeEach(async () => {
contractClass = makeContractClassPublic();
contractClass = await makeContractClassPublic();
await store.addContractClasses(
[contractClass],
[computePublicBytecodeCommitment(contractClass.packedBytecode)],
[await computePublicBytecodeCommitment(contractClass.packedBytecode)],
blockNum,
);
});
Expand All @@ -320,7 +320,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
it('returns contract class if later "deployment" class was deleted', async () => {
await store.addContractClasses(
[contractClass],
[computePublicBytecodeCommitment(contractClass.packedBytecode)],
[await computePublicBytecodeCommitment(contractClass.packedBytecode)],
blockNum + 1,
);
await store.deleteContractClasses([contractClass], blockNum + 1);
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/archiver/src/archiver/data_retrieval.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ async function getBlockFromRollupTx(
}

// TODO(#9101): Once we stop publishing calldata, we will still need the blobCheck below to ensure that the block we are building does correspond to the blob fields
const blobCheck = Blob.getBlobs(blockFields);
const blobCheck = await Blob.getBlobs(blockFields);
if (Blob.getEthBlobEvaluationInputs(blobCheck) !== blobInputs) {
// NB: We can just check the blobhash here, which is the first 32 bytes of blobInputs
// A mismatch means that the fields published in the blob in propose() do NOT match those in the extracted block.
Expand Down
41 changes: 21 additions & 20 deletions yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,12 @@ export class BlockStore {
* @param blocks - The L2 blocks to be added to the store.
* @returns True if the operation is successful.
*/
addBlocks(blocks: L1Published<L2Block>[]): Promise<boolean> {
async addBlocks(blocks: L1Published<L2Block>[]): Promise<boolean> {
if (blocks.length === 0) {
return Promise.resolve(true);
return true;
}

return this.db.transaction(() => {
return await this.db.transaction(async () => {
for (const block of blocks) {
void this.#blocks.set(block.data.number, {
header: block.data.header.toBuffer(),
Expand All @@ -72,7 +72,7 @@ export class BlockStore {
void this.#txIndex.set(tx.txHash.toString(), [block.data.number, i]);
});

void this.#blockBodies.set(block.data.hash().toString(), block.data.body.toBuffer());
void this.#blockBodies.set((await block.data.hash()).toString(), block.data.body.toBuffer());
}

void this.#lastSynchedL1Block.set(blocks[blocks.length - 1].l1.blockNumber);
Expand All @@ -88,16 +88,16 @@ export class BlockStore {
* @param blocksToUnwind - The number of blocks we are to unwind
* @returns True if the operation is successful
*/
unwindBlocks(from: number, blocksToUnwind: number) {
return this.db.transaction(() => {
async unwindBlocks(from: number, blocksToUnwind: number) {
return await this.db.transaction(async () => {
const last = this.getSynchedL2BlockNumber();
if (from != last) {
throw new Error(`Can only unwind blocks from the tip (requested ${from} but current tip is ${last})`);
}

for (let i = 0; i < blocksToUnwind; i++) {
const blockNumber = from - i;
const block = this.getBlock(blockNumber);
const block = await this.getBlock(blockNumber);

if (block === undefined) {
throw new Error(`Cannot remove block ${blockNumber} from the store, we don't have it`);
Expand All @@ -106,7 +106,7 @@ export class BlockStore {
block.data.body.txEffects.forEach(tx => {
void this.#txIndex.delete(tx.txHash.toString());
});
const blockHash = block.data.hash().toString();
const blockHash = (await block.data.hash()).toString();
void this.#blockBodies.delete(blockHash);
this.#log.debug(`Unwound block ${blockNumber} ${blockHash}`);
}
Expand All @@ -121,9 +121,10 @@ export class BlockStore {
* @param limit - The number of blocks to return.
* @returns The requested L2 blocks
*/
*getBlocks(start: number, limit: number): IterableIterator<L1Published<L2Block>> {
async *getBlocks(start: number, limit: number): AsyncIterableIterator<L1Published<L2Block>> {
for (const blockStorage of this.#blocks.values(this.#computeBlockRange(start, limit))) {
yield this.getBlockFromBlockStorage(blockStorage);
const block = await this.getBlockFromBlockStorage(blockStorage);
yield block;
}
}

Expand All @@ -132,10 +133,10 @@ export class BlockStore {
* @param blockNumber - The number of the block to return.
* @returns The requested L2 block.
*/
getBlock(blockNumber: number): L1Published<L2Block> | undefined {
getBlock(blockNumber: number): Promise<L1Published<L2Block> | undefined> {
const blockStorage = this.#blocks.get(blockNumber);
if (!blockStorage || !blockStorage.header) {
return undefined;
return Promise.resolve(undefined);
}

return this.getBlockFromBlockStorage(blockStorage);
Expand All @@ -153,10 +154,10 @@ export class BlockStore {
}
}

private getBlockFromBlockStorage(blockStorage: BlockStorage) {
private async getBlockFromBlockStorage(blockStorage: BlockStorage) {
const header = BlockHeader.fromBuffer(blockStorage.header);
const archive = AppendOnlyTreeSnapshot.fromBuffer(blockStorage.archive);
const blockHash = header.hash().toString();
const blockHash = (await header.hash()).toString();
const blockBodyBuffer = this.#blockBodies.get(blockHash);
if (blockBodyBuffer === undefined) {
throw new Error(
Expand All @@ -174,21 +175,21 @@ export class BlockStore {
* @param txHash - The txHash of the tx corresponding to the tx effect.
* @returns The requested tx effect (or undefined if not found).
*/
getTxEffect(txHash: TxHash): InBlock<TxEffect> | undefined {
async getTxEffect(txHash: TxHash): Promise<InBlock<TxEffect> | undefined> {
const [blockNumber, txIndex] = this.getTxLocation(txHash) ?? [];
if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
return undefined;
}

const block = this.getBlock(blockNumber);
const block = await this.getBlock(blockNumber);
if (!block) {
return undefined;
}

return {
data: block.data.body.txEffects[txIndex],
l2BlockNumber: block.data.number,
l2BlockHash: block.data.hash().toString(),
l2BlockHash: (await block.data.hash()).toString(),
};
}

Expand All @@ -197,21 +198,21 @@ export class BlockStore {
* @param txHash - The hash of a tx we try to get the receipt for.
* @returns The requested tx receipt (or undefined if not found).
*/
getSettledTxReceipt(txHash: TxHash): TxReceipt | undefined {
async getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
const [blockNumber, txIndex] = this.getTxLocation(txHash) ?? [];
if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
return undefined;
}

const block = this.getBlock(blockNumber)!;
const block = (await this.getBlock(blockNumber))!;
const tx = block.data.body.txEffects[txIndex];

return new TxReceipt(
txHash,
TxReceipt.statusFromRevertCode(tx.revertCode),
'',
tx.transactionFee.toBigInt(),
L2BlockHash.fromField(block.data.hash()),
L2BlockHash.fromField(await block.data.hash()),
block.data.number,
);
}
Expand Down
Loading
Loading