From 2a8e01cc9f9ca77e75991bd584d6752e120c9db6 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Fri, 3 Jan 2025 09:56:21 +0000 Subject: [PATCH 01/20] fix: use explicit read transactions (#10911) Open and hold a read txn open until the entire iterator is consumed. --------- Co-authored-by: ludamad Co-authored-by: ludamad --- .../src/e2e_fees/private_payments.test.ts | 3 +- yarn-project/kv-store/src/lmdb/array.ts | 22 +++-- yarn-project/kv-store/src/lmdb/map.ts | 80 +++++++++++-------- 3 files changed, 62 insertions(+), 43 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts b/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts index c1fa2d4fbec..39420fd44b8 100644 --- a/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts @@ -325,8 +325,7 @@ describe('e2e_fees private_payment', () => { }); // TODO(#7694): Remove this test once the lacking feature in TXE is implemented. - // TODO(#10775): Reenable, hit e.g. https://github.com/AztecProtocol/aztec-packages/actions/runs/12419409370/job/34675397831 - it.skip('insufficient funded amount is correctly handled', async () => { + it('insufficient funded amount is correctly handled', async () => { // We call arbitrary `private_get_name(...)` function just to check the correct error is triggered. await expect( bananaCoin.methods.private_get_name().prove({ diff --git a/yarn-project/kv-store/src/lmdb/array.ts b/yarn-project/kv-store/src/lmdb/array.ts index 19537dd7598..08aa0decda5 100644 --- a/yarn-project/kv-store/src/lmdb/array.ts +++ b/yarn-project/kv-store/src/lmdb/array.ts @@ -90,14 +90,20 @@ export class LmdbAztecArray implements AztecArray, AztecAsyncArray { } *entries(): IterableIterator<[number, T]> { - const values = this.#db.getRange({ - start: this.#slot(0), - limit: this.length, - }); - - for (const { key, value } of values) { - const index = key[3]; - yield [index, value]; + const transaction = this.#db.useReadTransaction(); + try { + const values = this.#db.getRange({ + start: this.#slot(0), + limit: this.length, + transaction, + }); + + for (const { key, value } of values) { + const index = key[3]; + yield [index, value]; + } + } finally { + transaction.done(); } } diff --git a/yarn-project/kv-store/src/lmdb/map.ts b/yarn-project/kv-store/src/lmdb/map.ts index 4458c3c3539..dd0d9aef064 100644 --- a/yarn-project/kv-store/src/lmdb/map.ts +++ b/yarn-project/kv-store/src/lmdb/map.ts @@ -40,9 +40,16 @@ export class LmdbAztecMap implements AztecMultiMap, Azte } *getValues(key: K): IterableIterator { - const values = this.db.getValues(this.slot(key)); - for (const value of values) { - yield value?.[1]; + const transaction = this.db.useReadTransaction(); + try { + const values = this.db.getValues(this.slot(key), { + transaction, + }); + for (const value of values) { + yield value?.[1]; + } + } finally { + transaction.done(); } } @@ -88,38 +95,45 @@ export class LmdbAztecMap implements AztecMultiMap, Azte } *entries(range: Range = {}): IterableIterator<[K, V]> { - const { reverse = false, limit } = range; - // LMDB has a quirk where it expects start > end when reverse=true - // in that case, we need to swap the start and end sentinels - const start = reverse - ? range.end - ? this.slot(range.end) - : this.endSentinel - : range.start - ? this.slot(range.start) - : this.startSentinel; - - const end = reverse - ? range.start + const transaction = this.db.useReadTransaction(); + + try { + const { reverse = false, limit } = range; + // LMDB has a quirk where it expects start > end when reverse=true + // in that case, we need to swap the start and end sentinels + const start = reverse + ? range.end + ? this.slot(range.end) + : this.endSentinel + : range.start ? this.slot(range.start) - : this.startSentinel - : range.end - ? this.slot(range.end) - : this.endSentinel; + : this.startSentinel; - const lmdbRange: RangeOptions = { - start, - end, - reverse, - limit, - }; - - const iterator = this.db.getRange(lmdbRange); - - for (const { - value: [key, value], - } of iterator) { - yield [key, value]; + const end = reverse + ? range.start + ? this.slot(range.start) + : this.startSentinel + : range.end + ? this.slot(range.end) + : this.endSentinel; + + const lmdbRange: RangeOptions = { + start, + end, + reverse, + limit, + transaction, + }; + + const iterator = this.db.getRange(lmdbRange); + + for (const { + value: [key, value], + } of iterator) { + yield [key, value]; + } + } finally { + transaction.done(); } } From dc12c2b678e0c450c05cbd4748296e17ae73860b Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Fri, 3 Jan 2025 05:04:10 -0500 Subject: [PATCH 02/20] feat: Sync from noir (#11022) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE feat(ssa): Hoist add and mul binary ops using known induction variables (https://github.com/noir-lang/noir/pull/6910) chore: fix warning (https://github.com/noir-lang/noir/pull/6927) chore(ci): Memory reports for execution (https://github.com/noir-lang/noir/pull/6907) chore: use ssa parser in flattening pass tests (https://github.com/noir-lang/noir/pull/6868) feat(LSP): suggest trait methods from where clauses (https://github.com/noir-lang/noir/pull/6915) feat: warn on trait method visibility (https://github.com/noir-lang/noir/pull/6923) feat!: Switch to using `jsonrpsee` for foreign calls; refactor `run_test`; foreign call layering (https://github.com/noir-lang/noir/pull/6849) chore: add rollup circuits to memory reports (https://github.com/noir-lang/noir/pull/6897) chore: remove unused dependency (https://github.com/noir-lang/noir/pull/6922) chore: add if/loop tip (separate from no-predicate #5657) (https://github.com/noir-lang/noir/pull/6806) chore: move implementation of print foreign call into `nargo` (https://github.com/noir-lang/noir/pull/6865) chore: document format strings (https://github.com/noir-lang/noir/pull/6920) END_COMMIT_OVERRIDE --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- .noir-sync-commit | 2 +- noir/noir-repo/.github/workflows/reports.yml | 173 ++- noir/noir-repo/Cargo.lock | 1042 +++++++++-------- noir/noir-repo/Cargo.toml | 4 +- .../noirc_evaluator/src/ssa/ir/instruction.rs | 2 +- .../src/ssa/ir/instruction/binary.rs | 2 +- .../src/ssa/opt/flatten_cfg.rs | 159 +-- .../src/ssa/opt/loop_invariant.rs | 28 +- .../src/hir/def_collector/dc_mod.rs | 9 +- .../noirc_frontend/src/parser/errors.rs | 12 +- .../noirc_frontend/src/parser/parser/impls.rs | 4 +- .../src/parser/parser/traits.rs | 24 +- .../compiler/noirc_printable_type/Cargo.toml | 4 - .../compiler/noirc_printable_type/src/lib.rs | 212 +--- noir/noir-repo/compiler/wasm/Cargo.toml | 6 +- noir/noir-repo/cspell.json | 2 + noir/noir-repo/deny.toml | 7 +- .../docs/explainers/explainer-writing-noir.md | 12 + .../docs/noir/concepts/data_types/strings.md | 35 + .../noir/concepts/data_types/strings.md | 35 + noir/noir-repo/test_programs/memory_report.sh | 11 +- .../tooling/acvm_cli/src/cli/execute_cmd.rs | 5 +- .../tooling/debugger/src/foreign_calls.rs | 55 +- .../tooling/lsp/src/requests/completion.rs | 57 +- .../lsp/src/requests/completion/tests.rs | 18 + .../noir-repo/tooling/lsp/src/requests/mod.rs | 4 +- .../tooling/lsp/src/requests/test_run.rs | 13 +- noir/noir-repo/tooling/nargo/Cargo.toml | 25 +- noir/noir-repo/tooling/nargo/src/errors.rs | 3 +- .../nargo/src/foreign_calls/default.rs | 115 ++ .../tooling/nargo/src/foreign_calls/layers.rs | 140 +++ .../tooling/nargo/src/foreign_calls/mocker.rs | 41 +- .../tooling/nargo/src/foreign_calls/mod.rs | 103 +- .../tooling/nargo/src/foreign_calls/print.rs | 93 +- .../tooling/nargo/src/foreign_calls/rpc.rs | 216 ++-- noir/noir-repo/tooling/nargo/src/lib.rs | 6 +- noir/noir-repo/tooling/nargo/src/ops/mod.rs | 2 +- noir/noir-repo/tooling/nargo/src/ops/test.rs | 129 +- noir/noir-repo/tooling/nargo_cli/Cargo.toml | 10 +- .../tooling/nargo_cli/benches/criterion.rs | 3 +- .../tooling/nargo_cli/src/cli/info_cmd.rs | 5 +- .../tooling/nargo_cli/src/cli/test_cmd.rs | 17 +- .../tooling/nargo_cli/tests/execute.rs | 2 - .../tooling/nargo_cli/tests/stdlib-props.rs | 7 +- .../tooling/nargo_cli/tests/stdlib-tests.rs | 7 +- .../nargo_fmt/src/formatter/function.rs | 21 +- .../tooling/nargo_fmt/src/formatter/impls.rs | 4 +- .../tooling/nargo_fmt/src/formatter/item.rs | 5 +- .../nargo_fmt/src/formatter/trait_impl.rs | 31 +- .../tooling/nargo_fmt/src/formatter/traits.rs | 31 +- noir/noir-repo/tooling/nargo_fmt/src/lib.rs | 1 + noir/noir-repo/tooling/noirc_abi/src/lib.rs | 16 +- .../tooling/noirc_abi/src/printable_type.rs | 78 ++ .../tooling/noirc_artifacts/src/debug_vars.rs | 9 +- .../src/cli/execution_flamegraph_cmd.rs | 4 +- 55 files changed, 1858 insertions(+), 1203 deletions(-) create mode 100644 noir/noir-repo/tooling/nargo/src/foreign_calls/default.rs create mode 100644 noir/noir-repo/tooling/nargo/src/foreign_calls/layers.rs create mode 100644 noir/noir-repo/tooling/noirc_abi/src/printable_type.rs diff --git a/.noir-sync-commit b/.noir-sync-commit index 619e948d03f..ba95bd5998e 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -c8a25b52a484115663cccf614bbebe3ca11778f3 +ebc4d2cf2b91658a10393733407f33d50a0faaf1 diff --git a/noir/noir-repo/.github/workflows/reports.yml b/noir/noir-repo/.github/workflows/reports.yml index 625d3771277..85a54147423 100644 --- a/noir/noir-repo/.github/workflows/reports.yml +++ b/noir/noir-repo/.github/workflows/reports.yml @@ -217,10 +217,24 @@ jobs: ./memory_report.sh mv memory_report.json ../memory_report.json - - name: Upload memory report + - name: Upload compilation memory report uses: actions/upload-artifact@v4 with: - name: in_progress_memory_report + name: in_progress_compilation_mem_report + path: memory_report.json + retention-days: 3 + overwrite: true + + - name: Generate execution memory report + working-directory: ./test_programs + run: | + ./memory_report.sh 0 1 + mv memory_report.json ../memory_report.json + + - name: Upload execution memory report + uses: actions/upload-artifact@v4 + with: + name: in_progress_execution_mem_report path: memory_report.json retention-days: 3 overwrite: true @@ -414,6 +428,50 @@ jobs: mv /home/runner/work/noir/noir/scripts/test_programs/execution_report.sh ./execution_report.sh ./execution_report.sh 1 1 + - name: Generate compilation report with averages + working-directory: ./test-repo/${{ matrix.project.path }} + if: ${{ matrix.project.take_average }} + run: | + mv /home/runner/work/noir/noir/scripts/test_programs/compilation_report.sh ./compilation_report.sh + chmod +x ./compilation_report.sh + ./compilation_report.sh 1 1 + + - name: Generate execution report without averages + working-directory: ./test-repo/${{ matrix.project.path }} + if: ${{ !matrix.project.is_library && !matrix.project.take_average }} + run: | + mv /home/runner/work/noir/noir/scripts/test_programs/execution_report.sh ./execution_report.sh + ./execution_report.sh 1 + + - name: Generate execution report with averages + working-directory: ./test-repo/${{ matrix.project.path }} + if: ${{ !matrix.project.is_library && matrix.project.take_average }} + run: | + mv /home/runner/work/noir/noir/scripts/test_programs/execution_report.sh ./execution_report.sh + ./execution_report.sh 1 1 + + - name: Generate compilation report with averages + working-directory: ./test-repo/${{ matrix.project.path }} + if: ${{ matrix.project.take_average }} + run: | + mv /home/runner/work/noir/noir/scripts/test_programs/compilation_report.sh ./compilation_report.sh + chmod +x ./compilation_report.sh + ./compilation_report.sh 1 1 + + - name: Generate execution report without averages + working-directory: ./test-repo/${{ matrix.project.path }} + if: ${{ !matrix.project.is_library && !matrix.project.take_average }} + run: | + mv /home/runner/work/noir/noir/scripts/test_programs/execution_report.sh ./execution_report.sh + ./execution_report.sh 1 + + - name: Generate execution report with averages + working-directory: ./test-repo/${{ matrix.project.path }} + if: ${{ !matrix.project.is_library && matrix.project.take_average }} + run: | + mv /home/runner/work/noir/noir/scripts/test_programs/execution_report.sh ./execution_report.sh + ./execution_report.sh 1 1 + - name: Move compilation report id: compilation_report shell: bash @@ -479,11 +537,11 @@ jobs: - name: Parse compilation report id: compilation_report - uses: noir-lang/noir-bench-report@e408e131e96c3615b4f820d7d642360fb4d6e2f4 + uses: noir-lang/noir-bench-report@6ba151d7795042c4ff51864fbeb13c0a6a79246c with: report: compilation_report.json header: | - # Compilation Report + Compilation Report memory_report: false - name: Add memory report to sticky comment @@ -507,6 +565,8 @@ jobs: - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-inner } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-reset } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-tail } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-private } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-public } name: External repo memory report - ${{ matrix.project.repo }}/${{ matrix.project.path }} steps: @@ -538,15 +598,38 @@ jobs: path: test-repo ref: ${{ matrix.project.ref }} - - name: Generate compilation report + - name: Generate compilation memory report working-directory: ./test-repo/${{ matrix.project.path }} run: | mv /home/runner/work/noir/noir/scripts/test_programs/memory_report.sh ./memory_report.sh - chmod +x ./memory_report.sh ./memory_report.sh 1 + # Rename the memory report as the execution report is about to write to the same file + cp memory_report.json compilation_memory_report.json + + - name: Generate execution memory report + working-directory: ./test-repo/${{ matrix.project.path }} + run: | + ./memory_report.sh 1 1 - name: Move compilation report - id: report + id: compilation_mem_report + shell: bash + run: | + PACKAGE_NAME=${{ matrix.project.path }} + PACKAGE_NAME=$(basename $PACKAGE_NAME) + mv ./test-repo/${{ matrix.project.path }}/compilation_memory_report.json ./memory_report_$PACKAGE_NAME.json + echo "memory_report_name=$PACKAGE_NAME" >> $GITHUB_OUTPUT + + - name: Upload compilation memory report + uses: actions/upload-artifact@v4 + with: + name: compilation_mem_report_${{ steps.compilation_mem_report.outputs.memory_report_name }} + path: memory_report_${{ steps.compilation_mem_report.outputs.memory_report_name }}.json + retention-days: 3 + overwrite: true + + - name: Move execution report + id: execution_mem_report shell: bash run: | PACKAGE_NAME=${{ matrix.project.path }} @@ -554,16 +637,16 @@ jobs: mv ./test-repo/${{ matrix.project.path }}/memory_report.json ./memory_report_$PACKAGE_NAME.json echo "memory_report_name=$PACKAGE_NAME" >> $GITHUB_OUTPUT - - name: Upload memory report + - name: Upload execution memory report uses: actions/upload-artifact@v4 with: - name: memory_report_${{ steps.report.outputs.memory_report_name }} - path: memory_report_${{ steps.report.outputs.memory_report_name }}.json + name: execution_mem_report_${{ steps.execution_mem_report.outputs.memory_report_name }} + path: memory_report_${{ steps.execution_mem_report.outputs.memory_report_name }}.json retention-days: 3 overwrite: true - upload_memory_report: - name: Upload memory report + upload_compilation_memory_report: + name: Upload compilation memory report needs: [generate_memory_report, external_repo_memory_report] # We want this job to run even if one variation of the matrix in `external_repo_memory_report` fails if: always() @@ -577,12 +660,12 @@ jobs: - name: Download initial memory report uses: actions/download-artifact@v4 with: - name: in_progress_memory_report + name: in_progress_compilation_mem_report - name: Download matrix memory reports uses: actions/download-artifact@v4 with: - pattern: memory_report_* + pattern: compilation_mem_report_* path: ./reports - name: Merge memory reports using jq @@ -590,21 +673,67 @@ jobs: mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh ./merge-bench-reports.sh memory_report - - name: Parse memory report - id: memory_report - uses: noir-lang/noir-bench-report@e408e131e96c3615b4f820d7d642360fb4d6e2f4 + - name: Parse compilation memory report + id: compilation_mem_report + uses: noir-lang/noir-bench-report@6ba151d7795042c4ff51864fbeb13c0a6a79246c with: report: memory_report.json header: | - # Memory Report + Compilation Memory Report memory_report: true - name: Add memory report to sticky comment if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' uses: marocchino/sticky-pull-request-comment@v2 with: - header: memory - message: ${{ steps.memory_report.outputs.markdown }} + header: compilation_memory + message: ${{ steps.compilation_mem_report.outputs.markdown }} + + upload_execution_memory_report: + name: Upload execution memory report + needs: [generate_memory_report, external_repo_memory_report] + # We want this job to run even if one variation of the matrix in `external_repo_memory_report` fails + if: always() + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download initial memory report + uses: actions/download-artifact@v4 + with: + name: in_progress_execution_mem_report + + - name: Download matrix memory reports + uses: actions/download-artifact@v4 + with: + pattern: execution_mem_report_* + path: ./reports + + - name: Merge memory reports using jq + run: | + mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh + ./merge-bench-reports.sh memory_report + # Rename the memory report as to not clash with the compilation memory report file name + cp memory_report.json execution_memory_report.json + + - name: Parse execution memory report + id: execution_mem_report + uses: noir-lang/noir-bench-report@6ba151d7795042c4ff51864fbeb13c0a6a79246c + with: + report: execution_memory_report.json + header: | + Execution Memory Report + memory_report: true + + - name: Add execution memory report to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: execution_memory + message: ${{ steps.execution_mem_report.outputs.markdown }} upload_execution_report: name: Upload execution report @@ -636,11 +765,11 @@ jobs: - name: Parse execution report id: execution_report - uses: noir-lang/noir-bench-report@e408e131e96c3615b4f820d7d642360fb4d6e2f4 + uses: noir-lang/noir-bench-report@6ba151d7795042c4ff51864fbeb13c0a6a79246c with: report: execution_report.json header: | - # Execution Report + Execution Report execution_report: true - name: Add memory report to sticky comment diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index a29cb44acd4..f8d09d8d39c 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -31,7 +31,7 @@ dependencies = [ "ark-bls12-381", "ark-bn254", "ark-ff", - "cfg-if 1.0.0", + "cfg-if", "hex", "num-bigint", "proptest", @@ -88,9 +88,9 @@ dependencies = [ "nargo", "paste", "proptest", - "rand 0.8.5", + "rand", "thiserror", - "toml 0.7.8", + "toml", "tracing-appender", "tracing-subscriber", ] @@ -104,7 +104,7 @@ dependencies = [ "build-data", "console_error_panic_hook", "const-str", - "getrandom 0.2.15", + "getrandom", "gloo-utils", "js-sys", "pkg-config", @@ -143,8 +143,8 @@ version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ - "cfg-if 1.0.0", - "getrandom 0.2.15", + "cfg-if", + "getrandom", "once_cell", "version_check", "zerocopy", @@ -370,7 +370,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand 0.8.5", + "rand", ] [[package]] @@ -422,7 +422,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "138985dd8aefbefeaa66b01b7f5b2b6b4c333fcef1cc5f32c63a2aabe37d6de3" dependencies = [ - "futures 0.3.31", + "futures", "lsp-types 0.94.1", "pin-project-lite", "rustix", @@ -436,6 +436,23 @@ dependencies = [ "waitpid-any", ] +[[package]] +name = "async-trait" +version = "0.1.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "autocfg" version = "1.4.0" @@ -450,7 +467,7 @@ checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide 0.7.4", "object", @@ -463,12 +480,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - [[package]] name = "base64" version = "0.21.7" @@ -573,7 +584,7 @@ dependencies = [ "arrayref", "arrayvec", "cc", - "cfg-if 1.0.0", + "cfg-if", "constant_time_eq", ] @@ -595,7 +606,7 @@ dependencies = [ "ff 0.12.1", "group 0.12.1", "pairing", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -682,38 +693,6 @@ version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" -[[package]] -name = "camino" -version = "1.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" -dependencies = [ - "serde", -] - -[[package]] -name = "cargo-platform" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" -dependencies = [ - "serde", -] - -[[package]] -name = "cargo_metadata" -version = "0.15.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a" -dependencies = [ - "camino", - "cargo-platform", - "semver", - "serde", - "serde_json", - "thiserror", -] - [[package]] name = "cast" version = "0.3.0" @@ -730,10 +709,10 @@ dependencies = [ ] [[package]] -name = "cfg-if" -version = "0.1.10" +name = "cesu8" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" [[package]] name = "cfg-if" @@ -869,7 +848,7 @@ checksum = "fc4159b76af02757139baf42c0c971c6dc155330999fbfd8eddb29b97fb2db68" dependencies = [ "codespan-reporting", "lsp-types 0.88.0", - "url 2.5.4", + "url", ] [[package]] @@ -916,6 +895,16 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "comma" version = "1.0.0" @@ -940,7 +929,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "wasm-bindgen", ] @@ -982,12 +971,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" -[[package]] -name = "convert_case" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" - [[package]] name = "convert_case" version = "0.6.0" @@ -997,6 +980,16 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -1009,7 +1002,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96e58d342ad113c2b878f16d5d034c03be492ae460cdbc02b7f0f2284d310c7d" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1027,7 +1020,7 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1113,7 +1106,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "subtle", "zeroize", ] @@ -1201,12 +1194,12 @@ version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-utils", "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core 0.9.10", + "parking_lot_core", ] [[package]] @@ -1249,19 +1242,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "derive_more" -version = "0.99.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" -dependencies = [ - "convert_case 0.4.0", - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.87", -] - [[package]] name = "difflib" version = "0.4.0" @@ -1294,7 +1274,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "dirs-sys-next", ] @@ -1384,7 +1364,7 @@ dependencies = [ "generic-array", "group 0.12.1", "pkcs8", - "rand_core 0.6.4", + "rand_core", "sec1", "subtle", "zeroize", @@ -1495,7 +1475,7 @@ version = "3.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "rustix", "windows-sys 0.48.0", ] @@ -1507,7 +1487,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ "bitvec", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -1518,7 +1498,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ "bitvec", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -1556,7 +1536,7 @@ version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "libredox 0.1.3", "windows-sys 0.59.0", @@ -1620,7 +1600,7 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ - "percent-encoding 2.3.1", + "percent-encoding", ] [[package]] @@ -1638,12 +1618,6 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" -[[package]] -name = "futures" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" - [[package]] name = "futures" version = "0.3.31" @@ -1652,7 +1626,6 @@ checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", - "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -1675,18 +1648,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" -[[package]] -name = "futures-executor" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", - "num_cpus", -] - [[package]] name = "futures-io" version = "0.3.31" @@ -1722,7 +1683,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ - "futures 0.1.31", "futures-channel", "futures-core", "futures-io", @@ -1754,27 +1714,16 @@ dependencies = [ "version_check", ] -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", -] - [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] @@ -1835,7 +1784,7 @@ checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ "ff 0.12.1", "memuse", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -1846,17 +1795,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff 0.13.0", - "rand_core 0.6.4", + "rand_core", "subtle", ] +[[package]] +name = "h2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap 2.6.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crunchy", ] @@ -1879,7 +1847,7 @@ dependencies = [ "ff 0.12.1", "group 0.12.1", "pasta_curves 0.4.1", - "rand_core 0.6.4", + "rand_core", "rayon", ] @@ -1960,9 +1928,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.12" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -1971,12 +1939,24 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.6" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", + "futures-util", "http", + "http-body", "pin-project-lite", ] @@ -1994,25 +1974,60 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.31" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" +checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" dependencies = [ "bytes", "futures-channel", - "futures-core", "futures-util", + "h2", "http", "http-body", "httparse", "httpdate", "itoa", "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +dependencies = [ + "futures-util", + "http", + "hyper", + "hyper-util", + "log", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", "socket2", "tokio", "tower-service", "tracing", - "want", ] [[package]] @@ -2168,17 +2183,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" -[[package]] -name = "idna" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" -dependencies = [ - "matches", - "unicode-bidi", - "unicode-normalization", -] - [[package]] name = "idna" version = "1.0.3" @@ -2223,7 +2227,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" dependencies = [ "bitmaps", - "rand_core 0.6.4", + "rand_core", "rand_xoshiro", "serde", "sized-chunks", @@ -2326,15 +2330,6 @@ dependencies = [ "libc", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if 1.0.0", -] - [[package]] name = "is-terminal" version = "0.4.13" @@ -2372,126 +2367,147 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] -name = "js-sys" -version = "0.3.63" +name = "jni" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790" +checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" dependencies = [ - "wasm-bindgen", + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", ] [[package]] -name = "jsonrpc" -version = "0.16.0" +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "js-sys" +version = "0.3.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34efde8d2422fb79ed56db1d3aea8fa5b583351d15a26770cdee2f88813dd702" +checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790" dependencies = [ - "base64 0.13.1", - "minreq", - "serde", - "serde_json", + "wasm-bindgen", ] [[package]] -name = "jsonrpc-client-transports" -version = "18.0.0" +name = "jsonrpsee" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b99d4207e2a04fb4581746903c2bb7eb376f88de9c699d0f3e10feeac0cd3a" +checksum = "c5c71d8c1a731cc4227c2f698d377e7848ca12c8a48866fc5e6951c43a4db843" dependencies = [ - "derive_more", - "futures 0.3.31", - "jsonrpc-core", - "jsonrpc-pubsub", - "log", - "serde", - "serde_json", - "url 1.7.2", + "jsonrpsee-core", + "jsonrpsee-http-client", + "jsonrpsee-proc-macros", + "jsonrpsee-server", + "jsonrpsee-types", + "tokio", + "tracing", ] [[package]] -name = "jsonrpc-core" -version = "18.0.0" +name = "jsonrpsee-core" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +checksum = "f2882f6f8acb9fdaec7cefc4fd607119a9bd709831df7d7672a1d3b644628280" dependencies = [ - "futures 0.3.31", - "futures-executor", + "async-trait", + "bytes", "futures-util", - "log", + "http", + "http-body", + "http-body-util", + "jsonrpsee-types", + "parking_lot", + "rand", + "rustc-hash 2.1.0", "serde", - "serde_derive", "serde_json", + "thiserror", + "tokio", + "tracing", ] [[package]] -name = "jsonrpc-core-client" -version = "18.0.0" +name = "jsonrpsee-http-client" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b51da17abecbdab3e3d4f26b01c5ec075e88d3abe3ab3b05dc9aa69392764ec0" +checksum = "b3638bc4617f96675973253b3a45006933bde93c2fd8a6170b33c777cc389e5b" dependencies = [ - "futures 0.3.31", - "jsonrpc-client-transports", + "async-trait", + "base64 0.22.1", + "http-body", + "hyper", + "hyper-rustls", + "hyper-util", + "jsonrpsee-core", + "jsonrpsee-types", + "rustls", + "rustls-platform-verifier", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower", + "tracing", + "url", ] [[package]] -name = "jsonrpc-derive" -version = "18.0.0" +name = "jsonrpsee-proc-macros" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b939a78fa820cdfcb7ee7484466746a7377760970f6f9c6fe19f9edcc8a38d2" +checksum = "c06c01ae0007548e73412c08e2285ffe5d723195bf268bce67b1b77c3bb2a14d" dependencies = [ + "heck 0.5.0", "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.87", ] [[package]] -name = "jsonrpc-http-server" -version = "18.0.0" +name = "jsonrpsee-server" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1dea6e07251d9ce6a552abfb5d7ad6bc290a4596c8dcc3d795fae2bbdc1f3ff" +checksum = "82ad8ddc14be1d4290cd68046e7d1d37acd408efed6d3ca08aefcc3ad6da069c" dependencies = [ - "futures 0.3.31", + "futures-util", + "http", + "http-body", + "http-body-util", "hyper", - "jsonrpc-core", - "jsonrpc-server-utils", - "log", - "net2", - "parking_lot 0.11.2", - "unicase", -] - -[[package]] -name = "jsonrpc-pubsub" -version = "18.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240f87695e6c6f62fb37f05c02c04953cf68d6408b8c1c89de85c7a0125b1011" -dependencies = [ - "futures 0.3.31", - "jsonrpc-core", - "lazy_static", - "log", - "parking_lot 0.11.2", - "rand 0.7.3", + "hyper-util", + "jsonrpsee-core", + "jsonrpsee-types", + "pin-project", + "route-recognizer", "serde", + "serde_json", + "soketto", + "thiserror", + "tokio", + "tokio-stream", + "tokio-util", + "tower", + "tracing", ] [[package]] -name = "jsonrpc-server-utils" -version = "18.0.0" +name = "jsonrpsee-types" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4fdea130485b572c39a460d50888beb00afb3e35de23ccd7fad8ff19f0e0d4" +checksum = "a178c60086f24cc35bb82f57c651d0d25d99c4742b4d335de04e97fa1f08a8a1" dependencies = [ - "bytes", - "futures 0.3.31", - "globset", - "jsonrpc-core", - "lazy_static", - "log", - "tokio", - "tokio-stream", - "tokio-util 0.6.10", - "unicase", + "http", + "serde", + "serde_json", + "thiserror", ] [[package]] @@ -2504,7 +2520,7 @@ dependencies = [ "bls12_381", "ff 0.12.1", "group 0.12.1", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -2514,7 +2530,7 @@ version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ecdsa", "elliptic-curve", "sha2", @@ -2657,7 +2673,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url 2.5.4", + "url", ] [[package]] @@ -2670,7 +2686,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url 2.5.4", + "url", ] [[package]] @@ -2682,12 +2698,6 @@ dependencies = [ "regex-automata 0.1.10", ] -[[package]] -name = "matches" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" - [[package]] name = "memchr" version = "2.7.4" @@ -2736,17 +2746,6 @@ dependencies = [ "adler2", ] -[[package]] -name = "minreq" -version = "2.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "763d142cdff44aaadd9268bebddb156ef6c65a0e13486bb81673cf2d8739f9b0" -dependencies = [ - "log", - "serde", - "serde_json", -] - [[package]] name = "mio" version = "0.8.11" @@ -2755,7 +2754,7 @@ checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.48.0", ] @@ -2767,7 +2766,7 @@ checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" dependencies = [ "hermit-abi 0.3.9", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.52.0", ] @@ -2778,11 +2777,7 @@ dependencies = [ "acvm", "fm", "iter-extended", - "jsonrpc", - "jsonrpc-core", - "jsonrpc-core-client", - "jsonrpc-derive", - "jsonrpc-http-server", + "jsonrpsee", "noir_fuzzer", "noirc_abi", "noirc_driver", @@ -2790,10 +2785,12 @@ dependencies = [ "noirc_frontend", "noirc_printable_type", "proptest", - "rand 0.8.5", + "rand", "rayon", "serde", + "serde_json", "thiserror", + "tokio", "tracing", "walkdir", ] @@ -2850,12 +2847,11 @@ dependencies = [ "tempfile", "termcolor", "termion", - "test-binary", "test-case", "thiserror", "tokio", - "tokio-util 0.7.12", - "toml 0.7.8", + "tokio-util", + "toml", "tower", "tracing-appender", "tracing-subscriber", @@ -2869,7 +2865,7 @@ dependencies = [ "serde", "similar-asserts", "thiserror", - "toml 0.7.8", + "toml", ] [[package]] @@ -2886,19 +2882,8 @@ dependencies = [ "tempfile", "test-case", "thiserror", - "toml 0.7.8", - "url 2.5.4", -] - -[[package]] -name = "net2" -version = "0.2.39" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac" -dependencies = [ - "cfg-if 0.1.10", - "libc", - "winapi", + "toml", + "url", ] [[package]] @@ -2918,7 +2903,7 @@ checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c" dependencies = [ "bitflags 1.3.2", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "memoffset", ] @@ -2931,7 +2916,7 @@ checksum = "f346ff70e7dbfd675fe90590b92d59ef2de15a8779ae305ebcbfd3f0caf59be4" dependencies = [ "autocfg", "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "libc", "memoffset", "pin-utils", @@ -2944,7 +2929,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" dependencies = [ "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "libc", ] @@ -2980,7 +2965,7 @@ dependencies = [ "noirc_abi", "noirc_artifacts", "proptest", - "rand 0.8.5", + "rand", ] [[package]] @@ -3002,7 +2987,7 @@ dependencies = [ "acvm", "async-lsp", "codespan-lsp", - "convert_case 0.6.0", + "convert_case", "fm", "fxhash", "lsp-types 0.94.1", @@ -3056,7 +3041,7 @@ dependencies = [ "build-data", "console_error_panic_hook", "fm", - "getrandom 0.2.15", + "getrandom", "gloo-utils", "js-sys", "nargo", @@ -3088,7 +3073,7 @@ dependencies = [ "strum", "strum_macros", "thiserror", - "toml 0.7.8", + "toml", ] [[package]] @@ -3098,7 +3083,7 @@ dependencies = [ "acvm", "build-data", "console_error_panic_hook", - "getrandom 0.2.15", + "getrandom", "gloo-utils", "iter-extended", "js-sys", @@ -3169,7 +3154,7 @@ version = "1.0.0-beta.1" dependencies = [ "acvm", "bn254_blackbox_solver", - "cfg-if 1.0.0", + "cfg-if", "chrono", "fxhash", "im", @@ -3198,7 +3183,7 @@ dependencies = [ "acvm", "base64 0.21.7", "bn254_blackbox_solver", - "cfg-if 1.0.0", + "cfg-if", "fm", "im", "iter-extended", @@ -3211,7 +3196,7 @@ dependencies = [ "proptest", "proptest-derive 0.5.0", "rangemap", - "rustc-hash", + "rustc-hash 1.1.0", "serde", "serde_json", "small-ord-set", @@ -3227,11 +3212,7 @@ name = "noirc_printable_type" version = "1.0.0-beta.1" dependencies = [ "acvm", - "iter-extended", - "jsonrpc", "serde", - "serde_json", - "thiserror", ] [[package]] @@ -3269,7 +3250,7 @@ dependencies = [ "file-id", "log", "notify", - "parking_lot 0.12.3", + "parking_lot", "walkdir", ] @@ -3328,16 +3309,6 @@ dependencies = [ "libm", ] -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi 0.3.9", - "libc", -] - [[package]] name = "numtoa" version = "0.1.0" @@ -3365,6 +3336,12 @@ version = "11.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + [[package]] name = "overload" version = "0.1.1" @@ -3397,17 +3374,6 @@ dependencies = [ "group 0.12.1", ] -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - [[package]] name = "parking_lot" version = "0.12.3" @@ -3415,21 +3381,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", - "parking_lot_core 0.9.10", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if 1.0.0", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -3438,7 +3390,7 @@ version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.5.7", "smallvec", @@ -3455,7 +3407,7 @@ dependencies = [ "ff 0.12.1", "group 0.12.1", "lazy_static", - "rand 0.8.5", + "rand", "static_assertions", "subtle", ] @@ -3470,7 +3422,7 @@ dependencies = [ "ff 0.13.0", "group 0.13.0", "lazy_static", - "rand 0.8.5", + "rand", "static_assertions", "subtle", ] @@ -3481,12 +3433,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "percent-encoding" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" - [[package]] name = "percent-encoding" version = "2.3.1" @@ -3521,7 +3467,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" dependencies = [ "phf_shared", - "rand 0.8.5", + "rand", ] [[package]] @@ -3547,6 +3493,26 @@ dependencies = [ "siphasher", ] +[[package]] +name = "pin-project" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "pin-project-lite" version = "0.2.15" @@ -3617,7 +3583,7 @@ checksum = "ebbe2f8898beba44815fdc9e5a4ae9c929e21c5dc29b0c774a15555f7f58d6d0" dependencies = [ "aligned-vec", "backtrace", - "cfg-if 1.0.0", + "cfg-if", "criterion", "findshlibs", "inferno", @@ -3625,7 +3591,7 @@ dependencies = [ "log", "nix 0.26.4", "once_cell", - "parking_lot 0.12.3", + "parking_lot", "smallvec", "symbolic-demangle", "tempfile", @@ -3698,11 +3664,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "0.1.5" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml 0.5.11", + "toml_edit 0.22.22", ] [[package]] @@ -3731,8 +3697,8 @@ dependencies = [ "bitflags 2.6.0", "lazy_static", "num-traits", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand", + "rand_chacha", "rand_xorshift", "regex-syntax 0.8.5", "rusty-fork", @@ -3802,19 +3768,6 @@ dependencies = [ "nibble_vec", ] -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - [[package]] name = "rand" version = "0.8.5" @@ -3822,18 +3775,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", + "rand_chacha", + "rand_core", ] [[package]] @@ -3843,16 +3786,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", + "rand_core", ] [[package]] @@ -3861,16 +3795,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", + "getrandom", ] [[package]] @@ -3879,7 +3804,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -3888,7 +3813,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" dependencies = [ - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -3917,15 +3842,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.4.1" @@ -3956,7 +3872,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom 0.2.15", + "getrandom", "libredox 0.1.3", "thiserror", ] @@ -4038,6 +3954,27 @@ dependencies = [ "bytemuck", ] +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "route-recognizer" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" + [[package]] name = "rust-embed" version = "6.8.1" @@ -4084,6 +4021,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" + [[package]] name = "rustc_version" version = "0.4.1" @@ -4106,6 +4049,87 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rustls" +version = "0.23.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" + +[[package]] +name = "rustls-platform-verifier" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490" +dependencies = [ + "core-foundation", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework", + "security-framework-sys", + "webpki-roots", + "winapi", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.18" @@ -4131,7 +4155,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db7826789c0e25614b03e5a54a0717a86f9ff6e6e5247f92b369472869320039" dependencies = [ "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "clipboard-win", "dirs-next", "fd-lock", @@ -4226,6 +4250,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "scoped-tls" version = "1.0.1" @@ -4252,14 +4285,35 @@ dependencies = [ "zeroize", ] +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.6.0", + "core-foundation", + "core-foundation-sys", + "libc", + "num-bigint", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "semver" version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" -dependencies = [ - "serde", -] [[package]] name = "serde" @@ -4377,13 +4431,24 @@ dependencies = [ "syn 2.0.87", ] +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sha2" version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest", ] @@ -4426,7 +4491,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" dependencies = [ "digest", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -4517,6 +4582,22 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "soketto" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures", + "http", + "httparse", + "log", + "rand", + "sha1", +] + [[package]] name = "spin" version = "0.9.8" @@ -4656,7 +4737,7 @@ version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fastrand", "once_cell", "rustix", @@ -4701,19 +4782,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" -[[package]] -name = "test-binary" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c7cb854285c40b61c0fade358bf63a2bb1226688a1ea11432ea65349209e6e3" -dependencies = [ - "camino", - "cargo_metadata", - "once_cell", - "paste", - "thiserror", -] - [[package]] name = "test-case" version = "3.3.1" @@ -4729,7 +4797,7 @@ version = "3.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adcb7fd841cd518e279be3d5a3eb0636409487998a4aff22f3de87b81e88384f" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "proc-macro2", "quote", "syn 2.0.87", @@ -4794,7 +4862,7 @@ version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", ] @@ -4849,26 +4917,11 @@ dependencies = [ "serde_json", ] -[[package]] -name = "tinyvec" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - [[package]] name = "tokio" -version = "1.41.1" +version = "1.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" dependencies = [ "backtrace", "bytes", @@ -4892,28 +4945,25 @@ dependencies = [ ] [[package]] -name = "tokio-stream" -version = "0.1.16" +name = "tokio-rustls" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" dependencies = [ - "futures-core", - "pin-project-lite", + "rustls", "tokio", ] [[package]] -name = "tokio-util" -version = "0.6.10" +name = "tokio-stream" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ - "bytes", "futures-core", - "futures-sink", - "log", "pin-project-lite", "tokio", + "tokio-util", ] [[package]] @@ -4930,15 +4980,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "toml" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" -dependencies = [ - "serde", -] - [[package]] name = "toml" version = "0.7.8" @@ -4948,7 +4989,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit", + "toml_edit 0.19.15", ] [[package]] @@ -4970,7 +5011,18 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.22.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +dependencies = [ + "indexmap 2.6.0", + "toml_datetime", + "winnow 0.6.20", ] [[package]] @@ -4979,6 +5031,10 @@ version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", "tower-layer", "tower-service", "tracing", @@ -5141,18 +5197,6 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" -[[package]] -name = "unicase" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" - -[[package]] -name = "unicode-bidi" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" - [[package]] name = "unicode-ident" version = "1.0.13" @@ -5165,15 +5209,6 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" -[[package]] -name = "unicode-normalization" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" -dependencies = [ - "tinyvec", -] - [[package]] name = "unicode-segmentation" version = "1.12.0" @@ -5193,15 +5228,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] -name = "url" -version = "1.7.2" +name = "untrusted" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" -dependencies = [ - "idna 0.1.5", - "matches", - "percent-encoding 1.0.1", -] +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" @@ -5210,8 +5240,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", - "idna 1.0.3", - "percent-encoding 2.3.1", + "idna", + "percent-encoding", "serde", ] @@ -5289,12 +5319,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -5307,7 +5331,7 @@ version = "0.2.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "serde", "serde_json", "wasm-bindgen-macro", @@ -5334,7 +5358,7 @@ version = "0.4.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -5403,6 +5427,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "0.26.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "winapi" version = "0.3.9" @@ -5600,6 +5633,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "winnow" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" +dependencies = [ + "memchr", +] + [[package]] name = "write16" version = "1.0.0" @@ -5741,7 +5783,7 @@ dependencies = [ "blake2", "bls12_381", "byteorder", - "cfg-if 1.0.0", + "cfg-if", "group 0.12.1", "group 0.13.0", "halo2", @@ -5749,7 +5791,7 @@ dependencies = [ "jubjub", "lazy_static", "pasta_curves 0.5.1", - "rand 0.8.5", + "rand", "serde", "sha2", "sha3", diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 5e31693b09b..53a28b8002d 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -148,7 +148,7 @@ num-traits = "0.2" similar-asserts = "1.5.0" tempfile = "3.6.0" test-case = "3.3.1" -jsonrpc = { version = "0.16.0", features = ["minreq_http"] } +jsonrpsee = { version = "0.24.7", features = ["client-core"] } flate2 = "1.0.24" color-eyre = "0.6.2" rand = "0.8.5" @@ -159,7 +159,7 @@ sha2 = { version = "0.10.6", features = ["compress"] } sha3 = "0.10.6" strum = "0.24" strum_macros = "0.24" - +tokio = "1.42" im = { version = "15.1", features = ["serde"] } tracing = "0.1.40" tracing-web = "0.1.3" diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 23d8b425349..0c8d8affeb1 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -22,7 +22,7 @@ use super::{ value::{Value, ValueId}, }; -mod binary; +pub(crate) mod binary; mod call; mod cast; mod constrain; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs index 81f2f3b1e01..ce65343c7ef 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs @@ -294,7 +294,7 @@ impl Binary { } /// Evaluate a binary operation with constant arguments. -fn eval_constant_binary_op( +pub(crate) fn eval_constant_binary_op( lhs: FieldElement, rhs: FieldElement, operator: BinaryOp, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index aec172dddcd..cbaacf1d70e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -818,13 +818,10 @@ mod test { use acvm::acir::AcirField; use crate::ssa::{ - function_builder::FunctionBuilder, ir::{ dfg::DataFlowGraph, function::Function, - instruction::{BinaryOp, Instruction, TerminatorInstruction}, - map::Id, - types::Type, + instruction::{Instruction, TerminatorInstruction}, value::{Value, ValueId}, }, opt::assert_normalized_ssa_equals, @@ -1337,104 +1334,50 @@ mod test { // Regression test for #1826. Ensures the `else` branch does not see the stores of the // `then` branch. // - // fn main f1 { - // b0(): - // v0 = allocate - // store Field 0 at v0 - // v2 = allocate - // store Field 2 at v2 - // v4 = load v2 - // v5 = lt v4, Field 2 - // jmpif v5 then: b1, else: b2 - // b1(): - // v24 = load v0 - // v25 = load v2 - // v26 = mul v25, Field 10 - // v27 = add v24, v26 - // store v27 at v0 - // v28 = load v2 - // v29 = add v28, Field 1 - // store v29 at v2 - // jmp b5() - // b5(): - // v14 = load v0 - // return v14 - // b2(): - // v6 = load v2 - // v8 = lt v6, Field 4 - // jmpif v8 then: b3, else: b4 - // b3(): - // v16 = load v0 - // v17 = load v2 - // v19 = mul v17, Field 100 - // v20 = add v16, v19 - // store v20 at v0 - // v21 = load v2 - // v23 = add v21, Field 1 - // store v23 at v2 - // jmp b4() - // b4(): - // jmp b5() - // } - let main_id = Id::test_new(0); - let mut builder = FunctionBuilder::new("main".into(), main_id); - - let b1 = builder.insert_block(); - let b2 = builder.insert_block(); - let b3 = builder.insert_block(); - let b4 = builder.insert_block(); - let b5 = builder.insert_block(); - - let zero = builder.field_constant(0u128); - let one = builder.field_constant(1u128); - let two = builder.field_constant(2u128); - let four = builder.field_constant(4u128); - let ten = builder.field_constant(10u128); - let one_hundred = builder.field_constant(100u128); - - let v0 = builder.insert_allocate(Type::field()); - builder.insert_store(v0, zero); - let v2 = builder.insert_allocate(Type::field()); - builder.insert_store(v2, two); - let v4 = builder.insert_load(v2, Type::field()); - let v5 = builder.insert_binary(v4, BinaryOp::Lt, two); - builder.terminate_with_jmpif(v5, b1, b2); - - builder.switch_to_block(b1); - let v24 = builder.insert_load(v0, Type::field()); - let v25 = builder.insert_load(v2, Type::field()); - let v26 = builder.insert_binary(v25, BinaryOp::Mul, ten); - let v27 = builder.insert_binary(v24, BinaryOp::Add, v26); - builder.insert_store(v0, v27); - let v28 = builder.insert_load(v2, Type::field()); - let v29 = builder.insert_binary(v28, BinaryOp::Add, one); - builder.insert_store(v2, v29); - builder.terminate_with_jmp(b5, vec![]); - - builder.switch_to_block(b5); - let v14 = builder.insert_load(v0, Type::field()); - builder.terminate_with_return(vec![v14]); - - builder.switch_to_block(b2); - let v6 = builder.insert_load(v2, Type::field()); - let v8 = builder.insert_binary(v6, BinaryOp::Lt, four); - builder.terminate_with_jmpif(v8, b3, b4); - - builder.switch_to_block(b3); - let v16 = builder.insert_load(v0, Type::field()); - let v17 = builder.insert_load(v2, Type::field()); - let v19 = builder.insert_binary(v17, BinaryOp::Mul, one_hundred); - let v20 = builder.insert_binary(v16, BinaryOp::Add, v19); - builder.insert_store(v0, v20); - let v21 = builder.insert_load(v2, Type::field()); - let v23 = builder.insert_binary(v21, BinaryOp::Add, one); - builder.insert_store(v2, v23); - builder.terminate_with_jmp(b4, vec![]); - - builder.switch_to_block(b4); - builder.terminate_with_jmp(b5, vec![]); - - let ssa = builder.finish().flatten_cfg().mem2reg().fold_constants(); + let src = " + acir(inline) fn main f0 { + b0(): + v0 = allocate -> &mut Field + store Field 0 at v0 + v2 = allocate -> &mut Field + store Field 2 at v2 + v4 = load v2 -> Field + v5 = lt v4, Field 2 + jmpif v5 then: b4, else: b1 + b1(): + v6 = load v2 -> Field + v8 = lt v6, Field 4 + jmpif v8 then: b2, else: b3 + b2(): + v9 = load v0 -> Field + v10 = load v2 -> Field + v12 = mul v10, Field 100 + v13 = add v9, v12 + store v13 at v0 + v14 = load v2 -> Field + v16 = add v14, Field 1 + store v16 at v2 + jmp b3() + b3(): + jmp b5() + b4(): + v17 = load v0 -> Field + v18 = load v2 -> Field + v20 = mul v18, Field 10 + v21 = add v17, v20 + store v21 at v0 + v22 = load v2 -> Field + v23 = add v22, Field 1 + store v23 at v2 + jmp b5() + b5(): + v24 = load v0 -> Field + return v24 + }"; + + let ssa = Ssa::from_str(src).unwrap(); + + let ssa = ssa.flatten_cfg().mem2reg().fold_constants(); let main = ssa.main(); @@ -1451,6 +1394,18 @@ mod test { } _ => unreachable!("Should have terminator instruction"), } + + let expected = " + acir(inline) fn main f0 { + b0(): + v0 = allocate -> &mut Field + v1 = allocate -> &mut Field + enable_side_effects u1 1 + return Field 200 + } + "; + + assert_normalized_ssa_equals(ssa, expected); } #[test] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs index c188ed1f80f..0a3c18c1b1e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs @@ -15,7 +15,7 @@ use crate::ssa::{ basic_block::BasicBlockId, function::Function, function_inserter::FunctionInserter, - instruction::{Instruction, InstructionId}, + instruction::{binary::eval_constant_binary_op, BinaryOp, Instruction, InstructionId}, types::Type, value::ValueId, }, @@ -207,6 +207,7 @@ impl<'f> LoopInvariantContext<'f> { let can_be_deduplicated = instruction.can_be_deduplicated(self.inserter.function, false) || matches!(instruction, Instruction::MakeArray { .. }) + || matches!(instruction, Instruction::Binary(_)) || self.can_be_deduplicated_from_upper_bound(&instruction); is_loop_invariant && can_be_deduplicated @@ -231,6 +232,31 @@ impl<'f> LoopInvariantContext<'f> { false } } + Instruction::Binary(binary) => { + if !matches!(binary.operator, BinaryOp::Add | BinaryOp::Mul) { + return false; + } + + let operand_type = + self.inserter.function.dfg.type_of_value(binary.lhs).unwrap_numeric(); + + let lhs_const = + self.inserter.function.dfg.get_numeric_constant_with_type(binary.lhs); + let rhs_const = + self.inserter.function.dfg.get_numeric_constant_with_type(binary.rhs); + let (lhs, rhs) = match ( + lhs_const, + rhs_const, + self.outer_induction_variables.get(&binary.lhs), + self.outer_induction_variables.get(&binary.rhs), + ) { + (Some((lhs, _)), None, None, Some(upper_bound)) => (lhs, *upper_bound), + (None, Some((rhs, _)), Some(upper_bound), None) => (*upper_bound, rhs), + _ => return false, + }; + + eval_constant_binary_op(lhs, rhs, binary.operator, operand_type).is_some() + } _ => false, } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index e7953aab5a4..eff48ce22a6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -500,6 +500,9 @@ impl<'a> ModCollector<'a> { .def_interner .push_function_definition(func_id, modifiers, trait_id.0, location); + let referenced = ReferenceId::Function(func_id); + context.def_interner.add_definition_location(referenced, Some(trait_id.0)); + if !trait_item.doc_comments.is_empty() { context.def_interner.set_doc_comments( ReferenceId::Function(func_id), @@ -1222,7 +1225,11 @@ pub(crate) fn collect_trait_impl_items( for item in std::mem::take(&mut trait_impl.items) { match item.item.kind { - TraitImplItemKind::Function(impl_method) => { + TraitImplItemKind::Function(mut impl_method) => { + // Regardless of what visibility was on the source code, treat it as public + // (a warning is produced during parsing for this) + impl_method.def.visibility = ItemVisibility::Public; + let func_id = interner.push_empty_fn(); let location = Location::new(impl_method.span(), file_id); interner.push_function(func_id, &impl_method.def, module, location); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs index 7cb8731593b..32cacf5e4ed 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs @@ -71,6 +71,8 @@ pub enum ParserErrorReason { PatternInTraitFunctionParameter, #[error("Patterns aren't allowed in a trait impl's associated constants")] PatternInAssociatedConstant, + #[error("Visibility is ignored on a trait method")] + TraitVisibilityIgnored, #[error("Visibility is ignored on a trait impl method")] TraitImplVisibilityIgnored, #[error("comptime keyword is deprecated")] @@ -183,11 +185,8 @@ impl ParserError { } pub fn is_warning(&self) -> bool { - matches!( - self.reason(), - Some(ParserErrorReason::ExperimentalFeature(_)) - | Some(ParserErrorReason::MissingSafetyComment) - ) + let diagnostic: Diagnostic = self.into(); + diagnostic.is_warning() } } @@ -264,6 +263,9 @@ impl<'a> From<&'a ParserError> for Diagnostic { ParserErrorReason::ExperimentalFeature(_) => { Diagnostic::simple_warning(reason.to_string(), "".into(), error.span) } + ParserErrorReason::TraitVisibilityIgnored => { + Diagnostic::simple_warning(reason.to_string(), "".into(), error.span) + } ParserErrorReason::TraitImplVisibilityIgnored => { Diagnostic::simple_warning(reason.to_string(), "".into(), error.span) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/impls.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/impls.rs index 8e6b3bae0e9..278c20e1e27 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/impls.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/impls.rs @@ -245,7 +245,7 @@ impl<'a> Parser<'a> { let noir_function = self.parse_function( attributes, - ItemVisibility::Public, + modifiers.visibility, modifiers.comptime.is_some(), modifiers.unconstrained.is_some(), true, // allow_self @@ -482,7 +482,7 @@ mod tests { panic!("Expected function"); }; assert_eq!(function.def.name.to_string(), "foo"); - assert_eq!(function.def.visibility, ItemVisibility::Public); + assert_eq!(function.def.visibility, ItemVisibility::Private); } #[test] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/traits.rs index e03b629e9ea..6f6a9bab960 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/traits.rs @@ -225,6 +225,10 @@ impl<'a> Parser<'a> { false, // allow mut ); + if modifiers.visibility != ItemVisibility::Private { + self.push_error(ParserErrorReason::TraitVisibilityIgnored, modifiers.visibility_span); + } + if !self.eat_keyword(Keyword::Fn) { self.modifiers_not_followed_by_an_item(modifiers); return None; @@ -285,7 +289,11 @@ mod tests { use crate::{ ast::{NoirTrait, NoirTraitImpl, TraitItem}, parser::{ - parser::{parse_program, tests::expect_no_errors, ParserErrorReason}, + parser::{ + parse_program, + tests::{expect_no_errors, get_single_error, get_source_with_error_span}, + ParserErrorReason, + }, ItemKind, }, }; @@ -513,7 +521,19 @@ mod tests { } #[test] - fn parse_trait_inheirtance() { + fn parse_trait_function_with_visibility() { + let src = " + trait Foo { pub fn foo(); } + ^^^ + "; + let (src, span) = get_source_with_error_span(src); + let (_module, errors) = parse_program(&src); + let error = get_single_error(&errors, span); + assert!(error.to_string().contains("Visibility is ignored on a trait method")); + } + + #[test] + fn parse_trait_inheritance() { let src = "trait Foo: Bar + Baz {}"; let noir_trait = parse_trait_no_errors(src); assert_eq!(noir_trait.bounds.len(), 2); diff --git a/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml b/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml index 8d0574aad64..a1eae750b1f 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml @@ -13,10 +13,6 @@ workspace = true [dependencies] acvm.workspace = true -iter-extended.workspace = true serde.workspace = true -serde_json.workspace = true -thiserror.workspace = true -jsonrpc.workspace = true [dev-dependencies] diff --git a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs index 6ae187da27f..eb74d2470fb 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs @@ -1,10 +1,13 @@ +#![forbid(unsafe_code)] +#![warn(unused_crate_dependencies, unused_extern_crates)] +#![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] + use std::{collections::BTreeMap, str}; -use acvm::{acir::AcirField, brillig_vm::brillig::ForeignCallParam}; -use iter_extended::vecmap; +use acvm::AcirField; use serde::{Deserialize, Serialize}; -use thiserror::Error; #[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] #[serde(tag = "kind", rename_all = "lowercase")] @@ -66,96 +69,23 @@ pub enum PrintableValueDisplay { Plain(PrintableValue, PrintableType), FmtString(String, Vec<(PrintableValue, PrintableType)>), } - -#[derive(Debug, Error)] -pub enum ForeignCallError { - #[error("No handler could be found for foreign call `{0}`")] - NoHandler(String), - - #[error("Foreign call inputs needed for execution are missing")] - MissingForeignCallInputs, - - #[error("Could not parse PrintableType argument. {0}")] - ParsingError(#[from] serde_json::Error), - - #[error("Failed calling external resolver. {0}")] - ExternalResolverError(#[from] jsonrpc::Error), - - #[error("Assert message resolved after an unsatisified constrain. {0}")] - ResolvedAssertMessage(String), -} - -impl TryFrom<&[ForeignCallParam]> for PrintableValueDisplay { - type Error = ForeignCallError; - - fn try_from(foreign_call_inputs: &[ForeignCallParam]) -> Result { - let (is_fmt_str, foreign_call_inputs) = - foreign_call_inputs.split_last().ok_or(ForeignCallError::MissingForeignCallInputs)?; - - if is_fmt_str.unwrap_field().is_one() { - convert_fmt_string_inputs(foreign_call_inputs) - } else { - convert_string_inputs(foreign_call_inputs) +impl std::fmt::Display for PrintableValueDisplay { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Plain(value, typ) => { + let output_string = to_string(value, typ).ok_or(std::fmt::Error)?; + write!(fmt, "{output_string}") + } + Self::FmtString(template, values) => { + let mut values_iter = values.iter(); + write_template_replacing_interpolations(template, fmt, || { + values_iter.next().and_then(|(value, typ)| to_string(value, typ)) + }) + } } } } -fn convert_string_inputs( - foreign_call_inputs: &[ForeignCallParam], -) -> Result, ForeignCallError> { - // Fetch the PrintableType from the foreign call input - // The remaining input values should hold what is to be printed - let (printable_type_as_values, input_values) = - foreign_call_inputs.split_last().ok_or(ForeignCallError::MissingForeignCallInputs)?; - let printable_type = fetch_printable_type(printable_type_as_values)?; - - // We must use a flat map here as each value in a struct will be in a separate input value - let mut input_values_as_fields = input_values.iter().flat_map(|param| param.fields()); - - let value = decode_value(&mut input_values_as_fields, &printable_type); - - Ok(PrintableValueDisplay::Plain(value, printable_type)) -} - -fn convert_fmt_string_inputs( - foreign_call_inputs: &[ForeignCallParam], -) -> Result, ForeignCallError> { - let (message, input_and_printable_types) = - foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; - - let message_as_fields = message.fields(); - let message_as_string = decode_string_value(&message_as_fields); - - let (num_values, input_and_printable_types) = input_and_printable_types - .split_first() - .ok_or(ForeignCallError::MissingForeignCallInputs)?; - - let mut output = Vec::new(); - let num_values = num_values.unwrap_field().to_u128() as usize; - - let types_start_at = input_and_printable_types.len() - num_values; - let mut input_iter = - input_and_printable_types[0..types_start_at].iter().flat_map(|param| param.fields()); - for printable_type in input_and_printable_types.iter().skip(types_start_at) { - let printable_type = fetch_printable_type(printable_type)?; - let value = decode_value(&mut input_iter, &printable_type); - - output.push((value, printable_type)); - } - - Ok(PrintableValueDisplay::FmtString(message_as_string, output)) -} - -fn fetch_printable_type( - printable_type: &ForeignCallParam, -) -> Result { - let printable_type_as_fields = printable_type.fields(); - let printable_type_as_string = decode_string_value(&printable_type_as_fields); - let printable_type: PrintableType = serde_json::from_str(&printable_type_as_string)?; - - Ok(printable_type) -} - fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { let mut output = String::new(); match (value, typ) { @@ -193,7 +123,7 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Op (PrintableValue::Vec { array_elements, is_slice }, PrintableType::Array { typ, .. }) | (PrintableValue::Vec { array_elements, is_slice }, PrintableType::Slice { typ }) => { if *is_slice { - output.push('&') + output.push('&'); } output.push('['); let mut values = array_elements.iter().peekable(); @@ -253,23 +183,6 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Op Some(output) } -impl std::fmt::Display for PrintableValueDisplay { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Plain(value, typ) => { - let output_string = to_string(value, typ).ok_or(std::fmt::Error)?; - write!(fmt, "{output_string}") - } - Self::FmtString(template, values) => { - let mut values_iter = values.iter(); - write_template_replacing_interpolations(template, fmt, || { - values_iter.next().and_then(|(value, typ)| to_string(value, typ)) - }) - } - } - } -} - fn write_template_replacing_interpolations( template: &str, fmt: &mut std::fmt::Formatter<'_>, @@ -346,94 +259,11 @@ fn format_field_string(field: F) -> String { "0x".to_owned() + &trimmed_field } -/// Assumes that `field_iterator` contains enough field elements in order to decode the [PrintableType] -pub fn decode_value( - field_iterator: &mut impl Iterator, - typ: &PrintableType, -) -> PrintableValue { - match typ { - PrintableType::Field - | PrintableType::SignedInteger { .. } - | PrintableType::UnsignedInteger { .. } - | PrintableType::Boolean => { - let field_element = field_iterator.next().unwrap(); - - PrintableValue::Field(field_element) - } - PrintableType::Array { length, typ } => { - let length = *length as usize; - let mut array_elements = Vec::with_capacity(length); - for _ in 0..length { - array_elements.push(decode_value(field_iterator, typ)); - } - - PrintableValue::Vec { array_elements, is_slice: false } - } - PrintableType::Slice { typ } => { - let length = field_iterator - .next() - .expect("not enough data to decode variable array length") - .to_u128() as usize; - let mut array_elements = Vec::with_capacity(length); - for _ in 0..length { - array_elements.push(decode_value(field_iterator, typ)); - } - - PrintableValue::Vec { array_elements, is_slice: true } - } - PrintableType::Tuple { types } => PrintableValue::Vec { - array_elements: vecmap(types, |typ| decode_value(field_iterator, typ)), - is_slice: false, - }, - PrintableType::String { length } => { - let field_elements: Vec = field_iterator.take(*length as usize).collect(); - - PrintableValue::String(decode_string_value(&field_elements)) - } - PrintableType::Struct { fields, .. } => { - let mut struct_map = BTreeMap::new(); - - for (field_key, param_type) in fields { - let field_value = decode_value(field_iterator, param_type); - - struct_map.insert(field_key.to_owned(), field_value); - } - - PrintableValue::Struct(struct_map) - } - PrintableType::Function { env, .. } => { - let field_element = field_iterator.next().unwrap(); - let func_ref = PrintableValue::Field(field_element); - // we want to consume the fields from the environment, but for now they are not actually printed - decode_value(field_iterator, env); - func_ref - } - PrintableType::MutableReference { typ } => { - // we decode the reference, but it's not really used for printing - decode_value(field_iterator, typ) - } - PrintableType::Unit => PrintableValue::Field(F::zero()), - } -} - -pub fn decode_string_value(field_elements: &[F]) -> String { - // TODO: Replace with `into` when Char is supported - let string_as_slice = vecmap(field_elements, |e| { - let mut field_as_bytes = e.to_be_bytes(); - let char_byte = field_as_bytes.pop().unwrap(); // A character in a string is represented by a u8, thus we just want the last byte of the element - assert!(field_as_bytes.into_iter().all(|b| b == 0)); // Assert that the rest of the field element's bytes are empty - char_byte - }); - - let final_string = str::from_utf8(&string_as_slice).unwrap(); - final_string.to_owned() -} - #[cfg(test)] mod tests { use acvm::FieldElement; - use crate::{PrintableType, PrintableValue, PrintableValueDisplay}; + use super::{PrintableType, PrintableValue, PrintableValueDisplay}; #[test] fn printable_value_display_to_string_without_interpolations() { diff --git a/noir/noir-repo/compiler/wasm/Cargo.toml b/noir/noir-repo/compiler/wasm/Cargo.toml index 9951b23f609..5cde0dfbbcf 100644 --- a/noir/noir-repo/compiler/wasm/Cargo.toml +++ b/noir/noir-repo/compiler/wasm/Cargo.toml @@ -17,9 +17,9 @@ workspace = true crate-type = ["cdylib"] [dependencies] + acvm = { workspace = true, features = ["bn254"] } fm.workspace = true -nargo.workspace = true noirc_driver.workspace = true noirc_frontend = { workspace = true, features = ["bn254"] } noirc_errors.workspace = true @@ -33,6 +33,10 @@ gloo-utils.workspace = true tracing-subscriber.workspace = true tracing-web.workspace = true +# Cannot use the `rpc` feature because the HTTP dependency wouldn't compile to Wasm. +# We could use `path` if `rpc` was a default feature, but we made it opt-in so we don't get any problems when publishing the workspace. +nargo.workspace = true + # This is an unused dependency, we are adding it # so that we can enable the js feature in getrandom. getrandom = { workspace = true, features = ["js"] } diff --git a/noir/noir-repo/cspell.json b/noir/noir-repo/cspell.json index 826e30fa86a..ed9f7427c6f 100644 --- a/noir/noir-repo/cspell.json +++ b/noir/noir-repo/cspell.json @@ -126,6 +126,7 @@ "jmpifs", "jmps", "jsdoc", + "jsonrpsee", "Jubjub", "keccak", "keccakf", @@ -166,6 +167,7 @@ "nomicfoundation", "noncanonical", "nouner", + "oneshot", "overflowing", "pedersen", "peekable", diff --git a/noir/noir-repo/deny.toml b/noir/noir-repo/deny.toml index 661c8095281..48628fb0045 100644 --- a/noir/noir-repo/deny.toml +++ b/noir/noir-repo/deny.toml @@ -66,14 +66,19 @@ exceptions = [ # so we prefer to not have dependencies using it # https://tldrlegal.com/license/creative-commons-cc0-1.0-universal { allow = ["CC0-1.0"], name = "more-asserts" }, - { allow = ["CC0-1.0"], name = "jsonrpc" }, { allow = ["CC0-1.0"], name = "notify" }, { allow = ["CC0-1.0"], name = "tiny-keccak" }, { allow = ["MPL-2.0"], name = "sized-chunks" }, { allow = ["MPL-2.0"], name = "webpki-roots" }, { allow = ["CDDL-1.0"], name = "inferno" }, + { allow = ["OpenSSL"], name = "ring" }, ] +[[licenses.clarify]] +crate = "ring" +expression = "ISC" +license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }] + # This section is considered when running `cargo deny check sources`. # More documentation about the 'sources' section can be found here: # https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html diff --git a/noir/noir-repo/docs/docs/explainers/explainer-writing-noir.md b/noir/noir-repo/docs/docs/explainers/explainer-writing-noir.md index 3ce4245dc45..202bf76a827 100644 --- a/noir/noir-repo/docs/docs/explainers/explainer-writing-noir.md +++ b/noir/noir-repo/docs/docs/explainers/explainer-writing-noir.md @@ -140,6 +140,18 @@ Use arrays and indices that are known at compile time where possible. Using `assert_constant(i);` before an index, `i`, is used in an array will give a compile error if `i` is NOT known at compile time. ::: +### Reduce what is inside loops and conditional logic + +Putting less logic inside an `if` (`else`, etc) paths, or inside a loop, translates to less gates required to represent the program. The compiler should mostly take care of this. + +A loop duplicates the gates for each iteration of the loop, or put another way, "unrolls" the loop. Any calculations/calls that are unchanged in the loop should be calculated once before, and the result used in the loop. + +An `if` statement is "flattened" and gates created for each path even if execution uses only one path. Furthermore, there are additional operations required for each path. Sometimes this can have a multiplying effect on the operations in the `if` and `else` etc. + +:::tip +Only have essential computation inside conditional logic and loops, and calculate anything else once (before, or after, depending). +::: + ### Leverage unconstrained execution Constrained verification can leverage unconstrained execution, this is especially useful for operations that are represented by many gates. diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/strings.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/strings.md index 1fdee42425e..b2257e8bdbb 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/strings.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/strings.md @@ -77,3 +77,38 @@ let s = r#"Simon says "hello world""#; // Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; ``` + +## Format strings + +A format string begins with the letter `f` and allows inserting the value of local and global variables in it. + +Example: + +```rust +let four = 2 + 2; +let s = f"Two plus two is: {four}"; +println(s); +``` + +The output of the above program is: + +```text +Two plus two is: 4 +``` + +To insert the value of a local or global variable, put it inside `{...}` in the string. + +If you need to write the `{` or `}` characters, use `{{` and `}}` respectively: + +```rust +let four = 2 + 2; + +// Prints "This is not expanded: {four}" +println(f"This is not expanded: {{four}}"); +``` + +More complex expressions are not allowed inside `{...}`: + +```rust +let s = f"Two plus two is: {2 + 2}" // Error: invalid format string +``` \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.1/noir/concepts/data_types/strings.md b/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.1/noir/concepts/data_types/strings.md index 1fdee42425e..b2257e8bdbb 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.1/noir/concepts/data_types/strings.md +++ b/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.1/noir/concepts/data_types/strings.md @@ -77,3 +77,38 @@ let s = r#"Simon says "hello world""#; // Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; ``` + +## Format strings + +A format string begins with the letter `f` and allows inserting the value of local and global variables in it. + +Example: + +```rust +let four = 2 + 2; +let s = f"Two plus two is: {four}"; +println(s); +``` + +The output of the above program is: + +```text +Two plus two is: 4 +``` + +To insert the value of a local or global variable, put it inside `{...}` in the string. + +If you need to write the `{` or `}` characters, use `{{` and `}}` respectively: + +```rust +let four = 2 + 2; + +// Prints "This is not expanded: {four}" +println(f"This is not expanded: {{four}}"); +``` + +More complex expressions are not allowed inside `{...}`: + +```rust +let s = f"Two plus two is: {2 + 2}" // Error: invalid format string +``` \ No newline at end of file diff --git a/noir/noir-repo/test_programs/memory_report.sh b/noir/noir-repo/test_programs/memory_report.sh index 4d03726d374..e501464c198 100755 --- a/noir/noir-repo/test_programs/memory_report.sh +++ b/noir/noir-repo/test_programs/memory_report.sh @@ -12,7 +12,7 @@ current_dir=$(pwd) base_path="$current_dir/execution_success" # If there is an argument that means we want to generate a report for only the current directory -if [ "$#" -ne 0 ]; then +if [ "$1" == "1" ]; then base_path="$current_dir" tests_to_profile=(".") fi @@ -31,11 +31,16 @@ for test_name in ${tests_to_profile[@]}; do echo " ," >> $current_dir"/memory_report.json" fi - if [ "$#" -ne 0 ]; then + if [ "$1" == "1" ]; then test_name=$(basename $current_dir) fi - heaptrack --output $current_dir/$test_name"_heap" $NARGO compile --force + COMMAND="compile --force --silence-warnings" + if [ "$2" == "1" ]; then + COMMAND="execute --silence-warnings" + fi + + heaptrack --output $current_dir/$test_name"_heap" $NARGO $COMMAND if test -f $current_dir/$test_name"_heap.gz"; then heaptrack --analyze $current_dir/$test_name"_heap.gz" > $current_dir/$test_name"_heap_analysis.txt" diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs index d4473eb3eef..c3c83a8f000 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -9,7 +9,7 @@ use nargo::PrintOutput; use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; use crate::errors::CliError; -use nargo::{foreign_calls::DefaultForeignCallExecutor, ops::execute_program}; +use nargo::{foreign_calls::DefaultForeignCallBuilder, ops::execute_program}; use super::fs::witness::{create_output_witness_string, save_witness_to_dir}; @@ -74,7 +74,8 @@ pub(crate) fn execute_program_from_witness( &program, inputs_map, &Bn254BlackBoxSolver, - &mut DefaultForeignCallExecutor::new(PrintOutput::Stdout, None, None, None), + &mut DefaultForeignCallBuilder { output: PrintOutput::Stdout, ..Default::default() } + .build(), ) .map_err(CliError::CircuitExecutionError) } diff --git a/noir/noir-repo/tooling/debugger/src/foreign_calls.rs b/noir/noir-repo/tooling/debugger/src/foreign_calls.rs index 899ba892d8f..b92e22844ea 100644 --- a/noir/noir-repo/tooling/debugger/src/foreign_calls.rs +++ b/noir/noir-repo/tooling/debugger/src/foreign_calls.rs @@ -4,12 +4,13 @@ use acvm::{ AcirField, FieldElement, }; use nargo::{ - foreign_calls::{DefaultForeignCallExecutor, ForeignCallExecutor}, + foreign_calls::{ + layers::Layer, DefaultForeignCallBuilder, ForeignCallError, ForeignCallExecutor, + }, PrintOutput, }; use noirc_artifacts::debug::{DebugArtifact, DebugVars, StackFrame}; use noirc_errors::debug_info::{DebugFnId, DebugVarId}; -use noirc_printable_type::ForeignCallError; pub(crate) enum DebugForeignCall { VarAssign, @@ -44,23 +45,31 @@ pub trait DebugForeignCallExecutor: ForeignCallExecutor { fn current_stack_frame(&self) -> Option>; } -pub struct DefaultDebugForeignCallExecutor<'a> { - executor: DefaultForeignCallExecutor<'a, FieldElement>, +#[derive(Default)] +pub struct DefaultDebugForeignCallExecutor { pub debug_vars: DebugVars, } -impl<'a> DefaultDebugForeignCallExecutor<'a> { - pub fn new(output: PrintOutput<'a>) -> Self { - Self { - executor: DefaultForeignCallExecutor::new(output, None, None, None), - debug_vars: DebugVars::default(), - } +impl DefaultDebugForeignCallExecutor { + fn make( + output: PrintOutput<'_>, + ex: DefaultDebugForeignCallExecutor, + ) -> impl DebugForeignCallExecutor + '_ { + DefaultForeignCallBuilder::default().with_output(output).build().add_layer(ex) + } + + #[allow(clippy::new_ret_no_self, dead_code)] + pub fn new(output: PrintOutput<'_>) -> impl DebugForeignCallExecutor + '_ { + Self::make(output, Self::default()) } - pub fn from_artifact(output: PrintOutput<'a>, artifact: &DebugArtifact) -> Self { - let mut ex = Self::new(output); + pub fn from_artifact<'a>( + output: PrintOutput<'a>, + artifact: &DebugArtifact, + ) -> impl DebugForeignCallExecutor + 'a { + let mut ex = Self::default(); ex.load_artifact(artifact); - ex + Self::make(output, ex) } pub fn load_artifact(&mut self, artifact: &DebugArtifact) { @@ -73,7 +82,7 @@ impl<'a> DefaultDebugForeignCallExecutor<'a> { } } -impl DebugForeignCallExecutor for DefaultDebugForeignCallExecutor<'_> { +impl DebugForeignCallExecutor for DefaultDebugForeignCallExecutor { fn get_variables(&self) -> Vec> { self.debug_vars.get_variables() } @@ -91,7 +100,7 @@ fn debug_fn_id(value: &FieldElement) -> DebugFnId { DebugFnId(value.to_u128() as u32) } -impl ForeignCallExecutor for DefaultDebugForeignCallExecutor<'_> { +impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { fn execute( &mut self, foreign_call: &ForeignCallWaitInfo, @@ -166,7 +175,21 @@ impl ForeignCallExecutor for DefaultDebugForeignCallExecutor<'_> { self.debug_vars.pop_fn(); Ok(ForeignCallResult::default()) } - None => self.executor.execute(foreign_call), + None => Err(ForeignCallError::NoHandler(foreign_call_name.to_string())), } } } + +impl DebugForeignCallExecutor for Layer +where + H: DebugForeignCallExecutor, + I: ForeignCallExecutor, +{ + fn get_variables(&self) -> Vec> { + self.handler().get_variables() + } + + fn current_stack_frame(&self) -> Option> { + self.handler().current_stack_frame() + } +} diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion.rs b/noir/noir-repo/tooling/lsp/src/requests/completion.rs index 3762ba9cf8d..fd6ef60af82 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion.rs @@ -31,7 +31,7 @@ use noirc_frontend::{ }, }, hir_def::traits::Trait, - node_interner::{NodeInterner, ReferenceId, StructId}, + node_interner::{FuncId, NodeInterner, ReferenceId, StructId}, parser::{Item, ItemKind, ParsedSubModule}, token::{MetaAttribute, Token, Tokens}, Kind, ParsedModule, StructType, Type, TypeBinding, @@ -120,6 +120,8 @@ struct NodeFinder<'a> { use_segment_positions: UseSegmentPositions, self_type: Option, in_comptime: bool, + /// The function we are in, if any + func_id: Option, } impl<'a> NodeFinder<'a> { @@ -165,6 +167,7 @@ impl<'a> NodeFinder<'a> { use_segment_positions: UseSegmentPositions::default(), self_type: None, in_comptime: false, + func_id: None, } } @@ -639,6 +642,13 @@ impl<'a> NodeFinder<'a> { function_completion_kind: FunctionCompletionKind, self_prefix: bool, ) { + self.complete_trait_constraints_methods( + typ, + prefix, + function_kind, + function_completion_kind, + ); + let Some(methods_by_name) = self.interner.get_type_methods(typ) else { return; }; @@ -697,6 +707,31 @@ impl<'a> NodeFinder<'a> { } } + fn complete_trait_constraints_methods( + &mut self, + typ: &Type, + prefix: &str, + function_kind: FunctionKind, + function_completion_kind: FunctionCompletionKind, + ) { + let Some(func_id) = self.func_id else { + return; + }; + + let func_meta = self.interner.function_meta(&func_id); + for constraint in &func_meta.trait_constraints { + if *typ == constraint.typ { + let trait_ = self.interner.get_trait(constraint.trait_bound.trait_id); + self.complete_trait_methods( + trait_, + prefix, + function_kind, + function_completion_kind, + ); + } + } + } + fn complete_trait_methods( &mut self, trait_: &Trait, @@ -1125,8 +1160,17 @@ impl<'a> Visitor for NodeFinder<'a> { let old_in_comptime = self.in_comptime; self.in_comptime = noir_function.def.is_comptime; + if let Some(ReferenceId::Function(func_id)) = self + .interner + .reference_at_location(Location::new(noir_function.name_ident().span(), self.file)) + { + self.func_id = Some(func_id); + } + noir_function.def.body.accept(Some(span), self); + self.func_id = None; + self.in_comptime = old_in_comptime; self.type_parameters = old_type_parameters; self.self_type = None; @@ -1207,7 +1251,7 @@ impl<'a> Visitor for NodeFinder<'a> { fn visit_trait_item_function( &mut self, - _name: &Ident, + name: &Ident, generics: &UnresolvedGenerics, parameters: &[(Ident, UnresolvedType)], return_type: &noirc_frontend::ast::FunctionReturnType, @@ -1232,7 +1276,16 @@ impl<'a> Visitor for NodeFinder<'a> { for (name, _) in parameters { self.local_variables.insert(name.to_string(), name.span()); } + + if let Some(ReferenceId::Function(func_id)) = + self.interner.reference_at_location(Location::new(name.span(), self.file)) + { + self.func_id = Some(func_id); + } + body.accept(None, self); + + self.func_id = None; }; self.type_parameters = old_type_parameters; diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs index 97c7ad86d5a..65157090767 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs @@ -2861,4 +2861,22 @@ fn main() { assert_eq!(items.len(), 1); assert!(items[0].label == "bar_baz()"); } + + #[test] + async fn test_suggests_trait_method_from_where_clause_in_function() { + let src = r#" + trait Foo { + fn foo(self) -> i32; + } + + fn something(x: T) -> i32 + where + T: Foo, + { + x.fo>|< + } + "#; + let items = get_completions(src).await; + assert_eq!(items.len(), 1); + } } diff --git a/noir/noir-repo/tooling/lsp/src/requests/mod.rs b/noir/noir-repo/tooling/lsp/src/requests/mod.rs index 22bdda8d7d7..f7b58d7d42f 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/mod.rs @@ -18,6 +18,7 @@ use nargo_fmt::Config; use noirc_frontend::graph::CrateId; use noirc_frontend::hir::def_map::CrateDefMap; +use noirc_frontend::parser::ParserError; use noirc_frontend::usage_tracker::UsageTracker; use noirc_frontend::{graph::Dependency, node_interner::NodeInterner}; use serde::{Deserialize, Serialize}; @@ -285,7 +286,8 @@ fn on_formatting_inner( if let Some(source) = state.input_files.get(&path) { let (module, errors) = noirc_frontend::parse_program(source); - if !errors.is_empty() { + let is_all_warnings = errors.iter().all(ParserError::is_warning); + if !is_all_warnings { return Ok(None); } diff --git a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs index 72ae6695b82..1071866dfad 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs @@ -3,6 +3,7 @@ use std::future::{self, Future}; use crate::insert_all_files_for_workspace_into_file_manager; use async_lsp::{ErrorCode, ResponseError}; use nargo::{ + foreign_calls::DefaultForeignCallBuilder, ops::{run_test, TestStatus}, PrintOutput, }; @@ -88,10 +89,16 @@ fn on_test_run_request_inner( &mut context, &test_function, PrintOutput::Stdout, - None, - Some(workspace.root_dir.clone()), - Some(package.name.to_string()), &CompileOptions::default(), + |output, base| { + DefaultForeignCallBuilder { + output, + resolver_url: None, // NB without this the root and package don't do anything. + root_path: Some(workspace.root_dir.clone()), + package_name: Some(package.name.to_string()), + } + .build_with_base(base) + }, ); let result = match test_result { TestStatus::Pass => NargoTestRunResult { diff --git a/noir/noir-repo/tooling/nargo/Cargo.toml b/noir/noir-repo/tooling/nargo/Cargo.toml index 1dbb9978b0b..9fb46b78bc9 100644 --- a/noir/noir-repo/tooling/nargo/Cargo.toml +++ b/noir/noir-repo/tooling/nargo/Cargo.toml @@ -21,20 +21,27 @@ noirc_errors.workspace = true noirc_frontend.workspace = true noirc_printable_type.workspace = true iter-extended.workspace = true +jsonrpsee.workspace = true +rayon.workspace = true thiserror.workspace = true tracing.workspace = true -rayon.workspace = true -jsonrpc.workspace = true -rand.workspace = true serde.workspace = true +serde_json.workspace = true walkdir = "2.5.0" +# Some dependencies are optional so we can compile to Wasm. +tokio = { workspace = true, optional = true } +rand = { workspace = true, optional = true } + [target.'cfg(not(target_arch = "wasm32"))'.dependencies] -noir_fuzzer.workspace = true -proptest.workspace = true +noir_fuzzer = { workspace = true } +proptest = { workspace = true } [dev-dependencies] -jsonrpc-http-server = "18.0" -jsonrpc-core-client = "18.0" -jsonrpc-derive = "18.0" -jsonrpc-core = "18.0" +jsonrpsee = { workspace = true, features = ["server"] } + +[features] +default = [] + +# Execution currently uses HTTP based Oracle resolvers; does not compile to Wasm. +rpc = ["jsonrpsee/http-client", "jsonrpsee/macros", "tokio/rt", "rand"] diff --git a/noir/noir-repo/tooling/nargo/src/errors.rs b/noir/noir-repo/tooling/nargo/src/errors.rs index 5256f28e36c..00c411bf7e4 100644 --- a/noir/noir-repo/tooling/nargo/src/errors.rs +++ b/noir/noir-repo/tooling/nargo/src/errors.rs @@ -16,9 +16,10 @@ use noirc_errors::{ pub use noirc_errors::Location; use noirc_driver::CrateName; -use noirc_printable_type::ForeignCallError; use thiserror::Error; +use crate::foreign_calls::ForeignCallError; + /// Errors covering situations where a package cannot be compiled. #[derive(Debug, Error)] pub enum CompileError { diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/default.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/default.rs new file mode 100644 index 00000000000..ce4af3aa744 --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/default.rs @@ -0,0 +1,115 @@ +use acvm::AcirField; +use serde::{Deserialize, Serialize}; + +use crate::PrintOutput; + +use super::{ + layers::{self, Layer, Layering}, + mocker::MockForeignCallExecutor, + print::PrintForeignCallExecutor, + ForeignCallExecutor, +}; + +#[cfg(feature = "rpc")] +use super::rpc::RPCForeignCallExecutor; + +/// A builder for [DefaultForeignCallLayers] where we can enable fields based on feature flags, +/// which is easier than providing different overrides for a `new` method. +#[derive(Default)] +pub struct DefaultForeignCallBuilder<'a> { + pub output: PrintOutput<'a>, + #[cfg(feature = "rpc")] + pub resolver_url: Option, + #[cfg(feature = "rpc")] + pub root_path: Option, + #[cfg(feature = "rpc")] + pub package_name: Option, +} + +impl<'a> DefaultForeignCallBuilder<'a> { + /// Override the output. + pub fn with_output(mut self, output: PrintOutput<'a>) -> Self { + self.output = output; + self + } + + /// Compose the executor layers with [layers::Empty] as the default handler. + pub fn build(self) -> DefaultForeignCallLayers<'a, layers::Empty, F> + where + F: AcirField + Serialize + for<'de> Deserialize<'de> + 'a, + { + self.build_with_base(layers::Empty) + } + + /// Compose the executor layers with `base` as the default handler. + pub fn build_with_base(self, base: B) -> DefaultForeignCallLayers<'a, B, F> + where + F: AcirField + Serialize + for<'de> Deserialize<'de> + 'a, + B: ForeignCallExecutor + 'a, + { + let executor = { + #[cfg(feature = "rpc")] + { + use rand::Rng; + + base.add_layer(self.resolver_url.map(|resolver_url| { + let id = rand::thread_rng().gen(); + RPCForeignCallExecutor::new( + &resolver_url, + id, + self.root_path, + self.package_name, + ) + })) + } + #[cfg(not(feature = "rpc"))] + { + base + } + }; + + executor + .add_layer(MockForeignCallExecutor::default()) + .add_layer(PrintForeignCallExecutor::new(self.output)) + } +} + +/// Facilitate static typing of layers on a base layer, so inner layers can be accessed. +#[cfg(feature = "rpc")] +pub type DefaultForeignCallLayers<'a, B, F> = Layer< + PrintForeignCallExecutor<'a>, + Layer, Layer, B>>, +>; +#[cfg(not(feature = "rpc"))] +pub type DefaultForeignCallLayers<'a, B, F> = + Layer, Layer, B>>; + +/// Convenience constructor for code that used to create the executor this way. +#[cfg(feature = "rpc")] +pub struct DefaultForeignCallExecutor; + +/// Convenience constructors for the RPC case. Non-RPC versions are not provided +/// because once a crate opts into this within the workspace, everyone gets it +/// even if they don't want to. For the non-RPC case we can nudge people to +/// use `DefaultForeignCallBuilder` which is easier to keep flexible. +#[cfg(feature = "rpc")] +impl DefaultForeignCallExecutor { + #[allow(clippy::new_ret_no_self)] + pub fn new<'a, F>( + output: PrintOutput<'a>, + resolver_url: Option<&str>, + root_path: Option, + package_name: Option, + ) -> impl ForeignCallExecutor + 'a + where + F: AcirField + Serialize + for<'de> Deserialize<'de> + 'a, + { + DefaultForeignCallBuilder { + output, + resolver_url: resolver_url.map(|s| s.to_string()), + root_path, + package_name, + } + .build() + } +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/layers.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/layers.rs new file mode 100644 index 00000000000..19f14c6f4a8 --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/layers.rs @@ -0,0 +1,140 @@ +use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; + +use super::{ForeignCallError, ForeignCallExecutor}; + +/// Returns an empty result when called. +/// +/// If all executors have no handler for the given foreign call then we cannot +/// return a correct response to the ACVM. The best we can do is to return an empty response, +/// this allows us to ignore any foreign calls which exist solely to pass information from inside +/// the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. +/// +/// We optimistically return an empty response for all oracle calls as the ACVM will error +/// should a response have been required. +pub struct Empty; + +impl ForeignCallExecutor for Empty { + fn execute( + &mut self, + _foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + Ok(ForeignCallResult::default()) + } +} + +/// Returns `NoHandler` for every call. +pub struct Unhandled; + +impl ForeignCallExecutor for Unhandled { + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + Err(ForeignCallError::NoHandler(foreign_call.function.clone())) + } +} + +/// Forwards to the inner executor if its own handler doesn't handle the call. +pub struct Layer { + pub handler: H, + pub inner: I, +} + +impl ForeignCallExecutor for Layer +where + H: ForeignCallExecutor, + I: ForeignCallExecutor, +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + match self.handler.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => self.inner.execute(foreign_call), + handled => handled, + } + } +} + +impl Layer { + /// Create a layer from two handlers + pub fn new(handler: H, inner: I) -> Self { + Self { handler, inner } + } +} + +impl Layer { + /// Create a layer from a handler. + /// If the handler doesn't handle a call, a default empty response is returned. + pub fn or_empty(handler: H) -> Self { + Self { handler, inner: Empty } + } +} + +impl Layer { + /// Create a layer from a handler. + /// If the handler doesn't handle a call, `NoHandler` error is returned. + pub fn or_unhandled(handler: H) -> Self { + Self { handler, inner: Unhandled } + } +} + +impl Layer { + /// A base layer that doesn't handle anything. + pub fn unhandled() -> Self { + Self { handler: Unhandled, inner: Unhandled } + } +} + +impl Layer { + /// Add another layer on top of this one. + pub fn add_layer(self, handler: J) -> Layer { + Layer::new(handler, self) + } + + pub fn handler(&self) -> &H { + &self.handler + } + + pub fn inner(&self) -> &I { + &self.inner + } +} + +/// Compose handlers. +pub trait Layering { + /// Layer an executor on top of this one. + /// The `other` executor will be called first. + fn add_layer(self, other: L) -> Layer + where + Self: Sized + ForeignCallExecutor, + L: ForeignCallExecutor; +} + +impl Layering for T { + fn add_layer(self, other: L) -> Layer + where + T: Sized + ForeignCallExecutor, + L: ForeignCallExecutor, + { + Layer::new(other, self) + } +} + +/// Support disabling a layer by making it optional. +/// This way we can still have a known static type for a composition, +/// because layers are always added, potentially wrapped in an `Option`. +impl ForeignCallExecutor for Option +where + H: ForeignCallExecutor, +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + match self { + Some(handler) => handler.execute(foreign_call), + None => Err(ForeignCallError::NoHandler(foreign_call.function.clone())), + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs index c93d16bbaf6..b289e907cd7 100644 --- a/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs @@ -1,12 +1,13 @@ +use std::marker::PhantomData; + use acvm::{ acir::brillig::{ForeignCallParam, ForeignCallResult}, pwg::ForeignCallWaitInfo, AcirField, }; -use noirc_printable_type::{decode_string_value, ForeignCallError}; -use serde::{Deserialize, Serialize}; +use noirc_abi::decode_string_value; -use super::{ForeignCall, ForeignCallExecutor}; +use super::{ForeignCall, ForeignCallError, ForeignCallExecutor}; /// This struct represents an oracle mock. It can be used for testing programs that use oracles. #[derive(Debug, PartialEq, Eq, Clone)] @@ -45,7 +46,7 @@ impl MockedCall { } #[derive(Debug, Default)] -pub(crate) struct MockForeignCallExecutor { +pub struct MockForeignCallExecutor { /// Mocks have unique ids used to identify them in Noir, allowing to update or remove them. last_mock_id: usize, /// The registered mocks @@ -78,8 +79,9 @@ impl MockForeignCallExecutor { } } -impl Deserialize<'a>> ForeignCallExecutor - for MockForeignCallExecutor +impl ForeignCallExecutor for MockForeignCallExecutor +where + F: AcirField, { fn execute( &mut self, @@ -174,3 +176,30 @@ impl Deserialize<'a>> ForeignCallExecutor } } } + +/// Handler that panics if any of the mock functions are called. +#[allow(dead_code)] // TODO: Make the mocker optional +pub(crate) struct DisabledMockForeignCallExecutor { + _field: PhantomData, +} + +impl ForeignCallExecutor for DisabledMockForeignCallExecutor { + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let foreign_call_name = foreign_call.function.as_str(); + if let Some( + ForeignCall::CreateMock + | ForeignCall::SetMockParams + | ForeignCall::GetMockLastParams + | ForeignCall::SetMockReturns + | ForeignCall::SetMockTimes + | ForeignCall::ClearMock, + ) = ForeignCall::lookup(foreign_call_name) + { + panic!("unexpected mock call: {}", foreign_call.function) + } + Err(ForeignCallError::NoHandler(foreign_call.function.clone())) + } +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs index 65ff051bcbf..06fe42bfec9 100644 --- a/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs @@ -1,16 +1,16 @@ -use std::path::PathBuf; +use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo}; +use thiserror::Error; -use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; -use mocker::MockForeignCallExecutor; -use noirc_printable_type::ForeignCallError; -use print::{PrintForeignCallExecutor, PrintOutput}; -use rand::Rng; -use rpc::RPCForeignCallExecutor; -use serde::{Deserialize, Serialize}; +pub mod layers; +pub mod mocker; +pub mod print; -pub(crate) mod mocker; -pub(crate) mod print; -pub(crate) mod rpc; +pub mod default; +#[cfg(feature = "rpc")] +pub mod rpc; +pub use default::DefaultForeignCallBuilder; +#[cfg(feature = "rpc")] +pub use default::DefaultForeignCallExecutor; pub trait ForeignCallExecutor { fn execute( @@ -64,77 +64,20 @@ impl ForeignCall { } } -#[derive(Debug, Default)] -pub struct DefaultForeignCallExecutor<'a, F> { - /// The executor for any [`ForeignCall::Print`] calls. - printer: PrintForeignCallExecutor<'a>, - mocker: MockForeignCallExecutor, - external: Option, -} - -impl<'a, F: Default> DefaultForeignCallExecutor<'a, F> { - pub fn new( - output: PrintOutput<'a>, - resolver_url: Option<&str>, - root_path: Option, - package_name: Option, - ) -> Self { - let id = rand::thread_rng().gen(); - let printer = PrintForeignCallExecutor { output }; - let external_resolver = resolver_url.map(|resolver_url| { - RPCForeignCallExecutor::new(resolver_url, id, root_path, package_name) - }); - DefaultForeignCallExecutor { - printer, - mocker: MockForeignCallExecutor::default(), - external: external_resolver, - } - } -} +#[derive(Debug, Error)] +pub enum ForeignCallError { + #[error("No handler could be found for foreign call `{0}`")] + NoHandler(String), -impl<'a, F: AcirField + Serialize + for<'b> Deserialize<'b>> ForeignCallExecutor - for DefaultForeignCallExecutor<'a, F> -{ - fn execute( - &mut self, - foreign_call: &ForeignCallWaitInfo, - ) -> Result, ForeignCallError> { - let foreign_call_name = foreign_call.function.as_str(); - match ForeignCall::lookup(foreign_call_name) { - Some(ForeignCall::Print) => self.printer.execute(foreign_call), - Some( - ForeignCall::CreateMock - | ForeignCall::SetMockParams - | ForeignCall::GetMockLastParams - | ForeignCall::SetMockReturns - | ForeignCall::SetMockTimes - | ForeignCall::ClearMock, - ) => self.mocker.execute(foreign_call), + #[error("Foreign call inputs needed for execution are missing")] + MissingForeignCallInputs, - None => { - // First check if there's any defined mock responses for this foreign call. - match self.mocker.execute(foreign_call) { - Err(ForeignCallError::NoHandler(_)) => (), - response_or_error => return response_or_error, - }; + #[error("Could not parse PrintableType argument. {0}")] + ParsingError(#[from] serde_json::Error), - if let Some(external_resolver) = &mut self.external { - // If the user has registered an external resolver then we forward any remaining oracle calls there. - match external_resolver.execute(foreign_call) { - Err(ForeignCallError::NoHandler(_)) => (), - response_or_error => return response_or_error, - }; - } + #[error("Failed calling external resolver. {0}")] + ExternalResolverError(#[from] jsonrpsee::core::client::Error), - // If all executors have no handler for the given foreign call then we cannot - // return a correct response to the ACVM. The best we can do is to return an empty response, - // this allows us to ignore any foreign calls which exist solely to pass information from inside - // the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. - // - // We optimistically return an empty response for all oracle calls as the ACVM will error - // should a response have been required. - Ok(ForeignCallResult::default()) - } - } - } + #[error("Assert message resolved after an unsatisfied constrain. {0}")] + ResolvedAssertMessage(String), } diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs index 8b2b5efd8b6..fb5621da942 100644 --- a/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs @@ -1,7 +1,12 @@ -use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; -use noirc_printable_type::{ForeignCallError, PrintableValueDisplay}; +use acvm::{ + acir::brillig::{ForeignCallParam, ForeignCallResult}, + pwg::ForeignCallWaitInfo, + AcirField, +}; +use noirc_abi::{decode_printable_value, decode_string_value}; +use noirc_printable_type::{PrintableType, PrintableValueDisplay}; -use super::{ForeignCall, ForeignCallExecutor}; +use super::{ForeignCall, ForeignCallError, ForeignCallExecutor}; #[derive(Debug, Default)] pub enum PrintOutput<'a> { @@ -12,8 +17,14 @@ pub enum PrintOutput<'a> { } #[derive(Debug, Default)] -pub(crate) struct PrintForeignCallExecutor<'a> { - pub(crate) output: PrintOutput<'a>, +pub struct PrintForeignCallExecutor<'a> { + output: PrintOutput<'a>, +} + +impl<'a> PrintForeignCallExecutor<'a> { + pub fn new(output: PrintOutput<'a>) -> Self { + Self { output } + } } impl ForeignCallExecutor for PrintForeignCallExecutor<'_> { @@ -32,7 +43,8 @@ impl ForeignCallExecutor for PrintForeignCallExecutor<'_> { .ok_or(ForeignCallError::MissingForeignCallInputs)? .1; - let display_values: PrintableValueDisplay = foreign_call_inputs.try_into()?; + let display_values: PrintableValueDisplay = + try_from_params(foreign_call_inputs)?; let display_string = format!("{display_values}{}", if skip_newline { "" } else { "\n" }); @@ -50,3 +62,72 @@ impl ForeignCallExecutor for PrintForeignCallExecutor<'_> { } } } + +fn try_from_params( + foreign_call_inputs: &[ForeignCallParam], +) -> Result, ForeignCallError> { + let (is_fmt_str, foreign_call_inputs) = + foreign_call_inputs.split_last().ok_or(ForeignCallError::MissingForeignCallInputs)?; + + if is_fmt_str.unwrap_field().is_one() { + convert_fmt_string_inputs(foreign_call_inputs) + } else { + convert_string_inputs(foreign_call_inputs) + } +} + +fn convert_string_inputs( + foreign_call_inputs: &[ForeignCallParam], +) -> Result, ForeignCallError> { + // Fetch the PrintableType from the foreign call input + // The remaining input values should hold what is to be printed + let (printable_type_as_values, input_values) = + foreign_call_inputs.split_last().ok_or(ForeignCallError::MissingForeignCallInputs)?; + let printable_type = fetch_printable_type(printable_type_as_values)?; + + // We must use a flat map here as each value in a struct will be in a separate input value + let mut input_values_as_fields = input_values.iter().flat_map(|param| param.fields()); + + let value = decode_printable_value(&mut input_values_as_fields, &printable_type); + + Ok(PrintableValueDisplay::Plain(value, printable_type)) +} + +fn convert_fmt_string_inputs( + foreign_call_inputs: &[ForeignCallParam], +) -> Result, ForeignCallError> { + let (message, input_and_printable_types) = + foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; + + let message_as_fields = message.fields(); + let message_as_string = decode_string_value(&message_as_fields); + + let (num_values, input_and_printable_types) = input_and_printable_types + .split_first() + .ok_or(ForeignCallError::MissingForeignCallInputs)?; + + let mut output = Vec::new(); + let num_values = num_values.unwrap_field().to_u128() as usize; + + let types_start_at = input_and_printable_types.len() - num_values; + let mut input_iter = + input_and_printable_types[0..types_start_at].iter().flat_map(|param| param.fields()); + for printable_type in input_and_printable_types.iter().skip(types_start_at) { + let printable_type = fetch_printable_type(printable_type)?; + let value = decode_printable_value(&mut input_iter, &printable_type); + + output.push((value, printable_type)); + } + + Ok(PrintableValueDisplay::FmtString(message_as_string, output)) +} + +fn fetch_printable_type( + printable_type: &ForeignCallParam, +) -> Result { + let printable_type_as_fields = printable_type.fields(); + let printable_type_as_string = decode_string_value(&printable_type_as_fields); + let printable_type: PrintableType = serde_json::from_str(&printable_type_as_string)?; + + Ok(printable_type) +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs index 0653eb1c7e3..89a748b6c45 100644 --- a/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs @@ -1,25 +1,31 @@ use std::path::PathBuf; use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; -use jsonrpc::{arg as build_json_rpc_arg, minreq_http::Builder, Client}; -use noirc_printable_type::ForeignCallError; +use jsonrpsee::{ + core::client::ClientT, + http_client::{HttpClient, HttpClientBuilder}, + rpc_params, +}; use serde::{Deserialize, Serialize}; -use super::ForeignCallExecutor; +use super::{ForeignCallError, ForeignCallExecutor}; #[derive(Debug)] -pub(crate) struct RPCForeignCallExecutor { +pub struct RPCForeignCallExecutor { /// A randomly generated id for this `DefaultForeignCallExecutor`. /// /// This is used so that a single `external_resolver` can distinguish between requests from multiple /// instantiations of `DefaultForeignCallExecutor`. id: u64, /// JSON RPC client to resolve foreign calls - external_resolver: Client, + external_resolver: HttpClient, /// Root path to the program or workspace in execution. root_path: Option, /// Name of the package in execution package_name: Option, + /// Runtime to execute asynchronous tasks on. + /// See [bridging](https://tokio.rs/tokio/topics/bridging). + runtime: tokio::runtime::Runtime, } #[derive(Debug, Serialize, Deserialize)] @@ -31,59 +37,76 @@ struct ResolveForeignCallRequest { /// performed in parallel. session_id: u64, - #[serde(flatten)] /// The foreign call which the external RPC server is to provide a response for. + #[serde(flatten)] function_call: ForeignCallWaitInfo, - #[serde(skip_serializing_if = "Option::is_none")] /// Root path to the program or workspace in execution. - root_path: Option, #[serde(skip_serializing_if = "Option::is_none")] + root_path: Option, + /// Name of the package in execution + #[serde(skip_serializing_if = "Option::is_none")] package_name: Option, } +type ResolveForeignCallResult = Result, ForeignCallError>; + impl RPCForeignCallExecutor { - pub(crate) fn new( + pub fn new( resolver_url: &str, id: u64, root_path: Option, package_name: Option, ) -> Self { - let mut transport_builder = - Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); + let mut client_builder = HttpClientBuilder::new(); if let Some(Ok(timeout)) = std::env::var("NARGO_FOREIGN_CALL_TIMEOUT").ok().map(|timeout| timeout.parse()) { let timeout_duration = std::time::Duration::from_millis(timeout); - transport_builder = transport_builder.timeout(timeout_duration); + client_builder = client_builder.request_timeout(timeout_duration); }; - let oracle_resolver = Client::with_transport(transport_builder.build()); - RPCForeignCallExecutor { external_resolver: oracle_resolver, id, root_path, package_name } + let oracle_resolver = + client_builder.build(resolver_url).expect("Invalid oracle resolver URL"); + + // Opcodes are executed in the `ProgramExecutor::execute_circuit` one by one in a loop, + // we don't need a concurrent thread pool. + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_time() + .enable_io() + .build() + .expect("failed to build tokio runtime"); + + RPCForeignCallExecutor { + external_resolver: oracle_resolver, + id, + root_path, + package_name, + runtime, + } } } -impl Deserialize<'a>> ForeignCallExecutor - for RPCForeignCallExecutor +impl ForeignCallExecutor for RPCForeignCallExecutor +where + F: AcirField + Serialize + for<'a> Deserialize<'a>, { - fn execute( - &mut self, - foreign_call: &ForeignCallWaitInfo, - ) -> Result, ForeignCallError> { - let encoded_params = vec![build_json_rpc_arg(ResolveForeignCallRequest { + /// Execute an async call blocking the current thread. + /// This method cannot be called from inside a `tokio` runtime, for that to work + /// we need to offload the execution into a different thread; see the tests. + fn execute(&mut self, foreign_call: &ForeignCallWaitInfo) -> ResolveForeignCallResult { + let encoded_params = rpc_params!(ResolveForeignCallRequest { session_id: self.id, function_call: foreign_call.clone(), root_path: self.root_path.clone().map(|path| path.to_str().unwrap().to_string()), package_name: self.package_name.clone(), - })]; - - let req = self.external_resolver.build_request("resolve_foreign_call", &encoded_params); - - let response = self.external_resolver.send_request(req)?; + }); - let parsed_response: ForeignCallResult = response.result()?; + let parsed_response = self.runtime.block_on(async { + self.external_resolver.request("resolve_foreign_call", encoded_params).await + })?; Ok(parsed_response) } @@ -95,20 +118,23 @@ mod tests { acir::brillig::ForeignCallParam, brillig_vm::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, FieldElement, }; - use jsonrpc_core::Result as RpcResult; - use jsonrpc_derive::rpc; - use jsonrpc_http_server::{Server, ServerBuilder}; - - use super::{ForeignCallExecutor, RPCForeignCallExecutor, ResolveForeignCallRequest}; + use jsonrpsee::proc_macros::rpc; + use jsonrpsee::server::Server; + use jsonrpsee::types::ErrorObjectOwned; + use tokio::sync::{mpsc, oneshot}; + + use super::{ + ForeignCallExecutor, RPCForeignCallExecutor, ResolveForeignCallRequest, + ResolveForeignCallResult, + }; - #[allow(unreachable_pub)] - #[rpc] - pub trait OracleResolver { - #[rpc(name = "resolve_foreign_call")] + #[rpc(server)] + trait OracleResolver { + #[method(name = "resolve_foreign_call")] fn resolve_foreign_call( &self, req: ResolveForeignCallRequest, - ) -> RpcResult>; + ) -> Result, ErrorObjectOwned>; } struct OracleResolverImpl; @@ -129,99 +155,129 @@ mod tests { } } - impl OracleResolver for OracleResolverImpl { + impl OracleResolverServer for OracleResolverImpl { fn resolve_foreign_call( &self, req: ResolveForeignCallRequest, - ) -> RpcResult> { + ) -> Result, ErrorObjectOwned> { let response = match req.function_call.function.as_str() { "sum" => self.sum(req.function_call.inputs[0].clone()), "echo" => self.echo(req.function_call.inputs[0].clone()), "id" => FieldElement::from(req.session_id as u128).into(), - _ => panic!("unexpected foreign call"), }; Ok(response) } } - fn build_oracle_server() -> (Server, String) { - let mut io = jsonrpc_core::IoHandler::new(); - io.extend_with(OracleResolverImpl.to_delegate()); + /// The test client send its request and a response channel. + type RPCForeignCallClientRequest = ( + ForeignCallWaitInfo, + oneshot::Sender>, + ); - // Choosing port 0 results in a random port being assigned. - let server = ServerBuilder::new(io) - .start_http(&"127.0.0.1:0".parse().expect("Invalid address")) - .expect("Could not start server"); + /// Async client used in the tests. + #[derive(Clone)] + struct RPCForeignCallClient { + tx: mpsc::UnboundedSender, + } + + impl RPCForeignCallExecutor { + /// Spawn and run the executor in the background until all clients are closed. + fn run(mut self) -> RPCForeignCallClient { + let (tx, mut rx) = mpsc::unbounded_channel::(); + let handle = tokio::task::spawn_blocking(move || { + while let Some((req, tx)) = rx.blocking_recv() { + let res = self.execute(&req); + let _ = tx.send(res); + } + }); + // The task will finish when the client goes out of scope. + drop(handle); + RPCForeignCallClient { tx } + } + } + + impl RPCForeignCallClient { + /// Asynchronously execute a foreign call. + async fn execute( + &self, + req: &ForeignCallWaitInfo, + ) -> ResolveForeignCallResult { + let (tx, rx) = oneshot::channel(); + self.tx.send((req.clone(), tx)).expect("failed to send to executor"); + rx.await.expect("failed to receive from executor") + } + } - let url = format!("http://{}", server.address()); - (server, url) + /// Start running the Oracle server or a random port, returning the listen URL. + async fn build_oracle_server() -> std::io::Result { + // Choosing port 0 results in a random port being assigned. + let server = Server::builder().build("127.0.0.1:0").await?; + let addr = server.local_addr()?; + let handle = server.start(OracleResolverImpl.into_rpc()); + let url = format!("http://{}", addr); + // In this test we don't care about doing shutdown so let's it run forever. + tokio::spawn(handle.stopped()); + Ok(url) } - #[test] - fn test_oracle_resolver_echo() { - let (server, url) = build_oracle_server(); + #[tokio::test] + async fn test_oracle_resolver_echo() { + let url = build_oracle_server().await.unwrap(); - let mut executor = RPCForeignCallExecutor::new(&url, 1, None, None); + let executor = RPCForeignCallExecutor::new(&url, 1, None, None).run(); let foreign_call: ForeignCallWaitInfo = ForeignCallWaitInfo { function: "echo".to_string(), inputs: vec![ForeignCallParam::Single(1_u128.into())], }; - let result = executor.execute(&foreign_call); + let result = executor.execute(&foreign_call).await; assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); - - server.close(); } - #[test] - fn test_oracle_resolver_sum() { - let (server, url) = build_oracle_server(); + #[tokio::test] + async fn test_oracle_resolver_sum() { + let url = build_oracle_server().await.unwrap(); - let mut executor = RPCForeignCallExecutor::new(&url, 2, None, None); + let executor = RPCForeignCallExecutor::new(&url, 2, None, None).run(); let foreign_call: ForeignCallWaitInfo = ForeignCallWaitInfo { function: "sum".to_string(), inputs: vec![ForeignCallParam::Array(vec![1_usize.into(), 2_usize.into()])], }; - let result = executor.execute(&foreign_call); + let result = executor.execute(&foreign_call).await; assert_eq!(result.unwrap(), FieldElement::from(3_usize).into()); - - server.close(); } - #[test] - fn foreign_call_executor_id_is_persistent() { - let (server, url) = build_oracle_server(); + #[tokio::test] + async fn foreign_call_executor_id_is_persistent() { + let url = build_oracle_server().await.unwrap(); - let mut executor = RPCForeignCallExecutor::new(&url, 3, None, None); + let executor = RPCForeignCallExecutor::new(&url, 3, None, None).run(); let foreign_call: ForeignCallWaitInfo = ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; - let result_1 = executor.execute(&foreign_call).unwrap(); - let result_2 = executor.execute(&foreign_call).unwrap(); + let result_1 = executor.execute(&foreign_call).await.unwrap(); + let result_2 = executor.execute(&foreign_call).await.unwrap(); assert_eq!(result_1, result_2); - - server.close(); } - #[test] - fn oracle_resolver_rpc_can_distinguish_executors() { - let (server, url) = build_oracle_server(); + #[tokio::test] + async fn oracle_resolver_rpc_can_distinguish_executors() { + let url = build_oracle_server().await.unwrap(); - let mut executor_1 = RPCForeignCallExecutor::new(&url, 4, None, None); - let mut executor_2 = RPCForeignCallExecutor::new(&url, 5, None, None); + let executor_1 = RPCForeignCallExecutor::new(&url, 4, None, None).run(); + let executor_2 = RPCForeignCallExecutor::new(&url, 5, None, None).run(); let foreign_call: ForeignCallWaitInfo = ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; - let result_1 = executor_1.execute(&foreign_call).unwrap(); - let result_2 = executor_2.execute(&foreign_call).unwrap(); + let result_1 = executor_1.execute(&foreign_call).await.unwrap(); + let result_2 = executor_2.execute(&foreign_call).await.unwrap(); assert_ne!(result_1, result_2); - - server.close(); } } diff --git a/noir/noir-repo/tooling/nargo/src/lib.rs b/noir/noir-repo/tooling/nargo/src/lib.rs index c126b6f526c..de032ca55ae 100644 --- a/noir/noir-repo/tooling/nargo/src/lib.rs +++ b/noir/noir-repo/tooling/nargo/src/lib.rs @@ -14,6 +14,9 @@ pub mod ops; pub mod package; pub mod workspace; +pub use self::errors::NargoError; +pub use self::foreign_calls::print::PrintOutput; + use std::{ collections::{BTreeMap, HashMap, HashSet}, path::PathBuf, @@ -29,9 +32,6 @@ use package::{Dependency, Package}; use rayon::prelude::*; use walkdir::WalkDir; -pub use self::errors::NargoError; -pub use self::foreign_calls::print::PrintOutput; - pub fn prepare_dependencies( context: &mut Context, parent_crate: CrateId, diff --git a/noir/noir-repo/tooling/nargo/src/ops/mod.rs b/noir/noir-repo/tooling/nargo/src/ops/mod.rs index 04efeb5a9ec..7a52a829be3 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/mod.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/mod.rs @@ -3,10 +3,10 @@ pub use self::compile::{ collect_errors, compile_contract, compile_program, compile_program_with_debug_instrumenter, compile_workspace, report_errors, }; -pub use self::execute::{execute_program, execute_program_with_profiling}; pub use self::optimize::{optimize_contract, optimize_program}; pub use self::transform::{transform_contract, transform_program}; +pub use self::execute::{execute_program, execute_program_with_profiling}; pub use self::test::{run_test, TestStatus}; mod check; diff --git a/noir/noir-repo/tooling/nargo/src/ops/test.rs b/noir/noir-repo/tooling/nargo/src/ops/test.rs index 1306150518d..bfd5cd3713f 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/test.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/test.rs @@ -1,5 +1,3 @@ -use std::path::PathBuf; - use acvm::{ acir::{ brillig::ForeignCallResult, @@ -12,18 +10,10 @@ use noirc_abi::Abi; use noirc_driver::{compile_no_check, CompileError, CompileOptions, DEFAULT_EXPRESSION_WIDTH}; use noirc_errors::{debug_info::DebugInfo, FileDiagnostic}; use noirc_frontend::hir::{def_map::TestFunction, Context}; -use noirc_printable_type::ForeignCallError; -use rand::Rng; -use serde::{Deserialize, Serialize}; use crate::{ errors::try_to_diagnose_runtime_error, - foreign_calls::{ - mocker::MockForeignCallExecutor, - print::{PrintForeignCallExecutor, PrintOutput}, - rpc::RPCForeignCallExecutor, - ForeignCall, ForeignCallExecutor, - }, + foreign_calls::{layers, print::PrintOutput, ForeignCallError, ForeignCallExecutor}, NargoError, }; @@ -43,17 +33,19 @@ impl TestStatus { } } -#[allow(clippy::too_many_arguments)] -pub fn run_test>( +pub fn run_test<'a, B, F, E>( blackbox_solver: &B, context: &mut Context, test_function: &TestFunction, - output: PrintOutput<'_>, - foreign_call_resolver_url: Option<&str>, - root_path: Option, - package_name: Option, + output: PrintOutput<'a>, config: &CompileOptions, -) -> TestStatus { + build_foreign_call_executor: F, +) -> TestStatus +where + B: BlackBoxFunctionSolver, + F: Fn(PrintOutput<'a>, layers::Unhandled) -> E + 'a, + E: ForeignCallExecutor, +{ let test_function_has_no_arguments = context .def_interner .function_meta(&test_function.get_id()) @@ -70,12 +62,9 @@ pub fn run_test>( if test_function_has_no_arguments { // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, // otherwise constraints involving these expressions will not error. - let mut foreign_call_executor = TestForeignCallExecutor::new( - output, - foreign_call_resolver_url, - root_path, - package_name, - ); + // Use a base layer that doesn't handle anything, which we handle in the `execute` below. + let inner_executor = build_foreign_call_executor(output, layers::Unhandled); + let mut foreign_call_executor = TestForeignCallExecutor::new(inner_executor); let circuit_execution = execute_program( &compiled_program.program, @@ -133,16 +122,17 @@ pub fn run_test>( |program: &Program, initial_witness: WitnessMap| -> Result, String> { + // Use a base layer that doesn't handle anything, which we handle in the `execute` below. + let inner_executor = + build_foreign_call_executor(PrintOutput::None, layers::Unhandled); + let mut foreign_call_executor = + TestForeignCallExecutor::new(inner_executor); + let circuit_execution = execute_program( program, initial_witness, blackbox_solver, - &mut TestForeignCallExecutor::::new( - PrintOutput::None, - foreign_call_resolver_url, - root_path.clone(), - package_name.clone(), - ), + &mut foreign_call_executor, ); let status = test_status_program_compile_pass( @@ -278,38 +268,21 @@ fn check_expected_failure_message( } /// A specialized foreign call executor which tracks whether it has encountered any unknown foreign calls -struct TestForeignCallExecutor<'a, F> { - /// The executor for any [`ForeignCall::Print`] calls. - printer: PrintForeignCallExecutor<'a>, - mocker: MockForeignCallExecutor, - external: Option, - +struct TestForeignCallExecutor { + executor: E, encountered_unknown_foreign_call: bool, } -impl<'a, F: Default> TestForeignCallExecutor<'a, F> { - fn new( - output: PrintOutput<'a>, - resolver_url: Option<&str>, - root_path: Option, - package_name: Option, - ) -> Self { - let id = rand::thread_rng().gen(); - let printer = PrintForeignCallExecutor { output }; - let external_resolver = resolver_url.map(|resolver_url| { - RPCForeignCallExecutor::new(resolver_url, id, root_path, package_name) - }); - TestForeignCallExecutor { - printer, - mocker: MockForeignCallExecutor::default(), - external: external_resolver, - encountered_unknown_foreign_call: false, - } +impl TestForeignCallExecutor { + fn new(executor: E) -> Self { + Self { executor, encountered_unknown_foreign_call: false } } } -impl<'a, F: AcirField + Serialize + for<'b> Deserialize<'b>> ForeignCallExecutor - for TestForeignCallExecutor<'a, F> +impl ForeignCallExecutor for TestForeignCallExecutor +where + F: AcirField, + E: ForeignCallExecutor, { fn execute( &mut self, @@ -317,46 +290,14 @@ impl<'a, F: AcirField + Serialize + for<'b> Deserialize<'b>> ForeignCallExecutor ) -> Result, ForeignCallError> { // If the circuit has reached a new foreign call opcode then it can't have failed from any previous unknown foreign calls. self.encountered_unknown_foreign_call = false; - - let foreign_call_name = foreign_call.function.as_str(); - match ForeignCall::lookup(foreign_call_name) { - Some(ForeignCall::Print) => self.printer.execute(foreign_call), - - Some( - ForeignCall::CreateMock - | ForeignCall::SetMockParams - | ForeignCall::GetMockLastParams - | ForeignCall::SetMockReturns - | ForeignCall::SetMockTimes - | ForeignCall::ClearMock, - ) => self.mocker.execute(foreign_call), - - None => { - // First check if there's any defined mock responses for this foreign call. - match self.mocker.execute(foreign_call) { - Err(ForeignCallError::NoHandler(_)) => (), - response_or_error => return response_or_error, - }; - - if let Some(external_resolver) = &mut self.external { - // If the user has registered an external resolver then we forward any remaining oracle calls there. - match external_resolver.execute(foreign_call) { - Err(ForeignCallError::NoHandler(_)) => (), - response_or_error => return response_or_error, - }; - } - + match self.executor.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => { self.encountered_unknown_foreign_call = true; - - // If all executors have no handler for the given foreign call then we cannot - // return a correct response to the ACVM. The best we can do is to return an empty response, - // this allows us to ignore any foreign calls which exist solely to pass information from inside - // the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. - // - // We optimistically return an empty response for all oracle calls as the ACVM will error - // should a response have been required. - Ok(ForeignCallResult::default()) + // If the inner executor cannot handle this foreign call, then it's very likely that this is a custom + // foreign call. We then return an empty response in case the foreign call doesn't need return values. + layers::Empty.execute(foreign_call) } + other => other, } } } diff --git a/noir/noir-repo/tooling/nargo_cli/Cargo.toml b/noir/noir-repo/tooling/nargo_cli/Cargo.toml index 5603b7f4fca..f7a98b4c278 100644 --- a/noir/noir-repo/tooling/nargo_cli/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_cli/Cargo.toml @@ -27,7 +27,13 @@ clap.workspace = true fm.workspace = true fxhash.workspace = true iter-extended.workspace = true -nargo.workspace = true +# This is the only crate that really needs the RPC feature, +# but enabling it here implicitly enables it for the whole +# workspace. A crate could opt out using `path` dependency, +# but it's only `noir_wasm` which couldn't compile with it, +# and that is a different target, and for that the feature +# aren't unified with this one. +nargo = { workspace = true, features = ["rpc"] } nargo_fmt.workspace = true nargo_toml.workspace = true noir_lsp.workspace = true @@ -88,12 +94,10 @@ proptest.workspace = true sha2.workspace = true sha3.workspace = true iai = "0.1.1" -test-binary = "3.0.2" test-case.workspace = true lazy_static.workspace = true light-poseidon = "0.2.0" - [[bench]] name = "criterion" harness = false diff --git a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs index 9bc50f87d8e..3d81ade65bc 100644 --- a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs +++ b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs @@ -3,7 +3,6 @@ use acvm::{acir::native_types::WitnessMap, FieldElement}; use assert_cmd::prelude::{CommandCargoExt, OutputAssertExt}; use criterion::{criterion_group, criterion_main, Criterion}; -use nargo::PrintOutput; use noirc_abi::{ input_parser::{Format, InputValue}, Abi, InputMap, @@ -116,7 +115,7 @@ fn criterion_test_execution(c: &mut Criterion, test_program_dir: &Path, force_br let artifacts = RefCell::new(None); let mut foreign_call_executor = - nargo::foreign_calls::DefaultForeignCallExecutor::new(PrintOutput::None, None, None, None); + nargo::foreign_calls::DefaultForeignCallBuilder::default().build(); c.bench_function(&benchmark_name, |b| { b.iter_batched( diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index ee8ff32922e..6320cbbf350 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -3,8 +3,7 @@ use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use iter_extended::vecmap; use nargo::{ - constants::PROVER_INPUT_FILE, foreign_calls::DefaultForeignCallExecutor, package::Package, - PrintOutput, + constants::PROVER_INPUT_FILE, foreign_calls::DefaultForeignCallBuilder, package::Package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml}; use noirc_abi::input_parser::Format; @@ -255,7 +254,7 @@ fn profile_brillig_execution( &program_artifact.bytecode, initial_witness, &Bn254BlackBoxSolver, - &mut DefaultForeignCallExecutor::new(PrintOutput::None, None, None, None), + &mut DefaultForeignCallBuilder::default().build(), )?; let expression_width = get_target_width(package.expression_width, expression_width); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index f9736d0b79e..0aa31f36686 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -14,8 +14,9 @@ use clap::Args; use fm::FileManager; use formatters::{Formatter, JsonFormatter, PrettyFormatter, TerseFormatter}; use nargo::{ - insert_all_files_for_workspace_into_file_manager, ops::TestStatus, package::Package, parse_all, - prepare_package, workspace::Workspace, PrintOutput, + foreign_calls::DefaultForeignCallBuilder, insert_all_files_for_workspace_into_file_manager, + ops::TestStatus, package::Package, parse_all, prepare_package, workspace::Workspace, + PrintOutput, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml}; use noirc_driver::{check_crate, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; @@ -507,10 +508,16 @@ impl<'a> TestRunner<'a> { &mut context, test_function, PrintOutput::String(&mut output_string), - foreign_call_resolver_url, - root_path, - Some(package_name), &self.args.compile_options, + |output, base| { + DefaultForeignCallBuilder { + output, + resolver_url: foreign_call_resolver_url.map(|s| s.to_string()), + root_path: root_path.clone(), + package_name: Some(package_name.clone()), + } + .build_with_base(base) + }, ); (test_status, output_string) } diff --git a/noir/noir-repo/tooling/nargo_cli/tests/execute.rs b/noir/noir-repo/tooling/nargo_cli/tests/execute.rs index 561520c57a9..77d77cfd902 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/execute.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/execute.rs @@ -12,8 +12,6 @@ mod tests { use super::*; - test_binary::build_test_binary_once!(mock_backend, "../backend_interface/test-binaries"); - // Utilities to keep the test matrix labels more intuitive. #[derive(Debug, Clone, Copy)] struct ForceBrillig(pub bool); diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs index 9750eb823a6..8f08703ab04 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs @@ -2,9 +2,7 @@ use std::{cell::RefCell, collections::BTreeMap, path::Path}; use acvm::{acir::native_types::WitnessStack, AcirField, FieldElement}; use iter_extended::vecmap; -use nargo::{ - foreign_calls::DefaultForeignCallExecutor, ops::execute_program, parse_all, PrintOutput, -}; +use nargo::{foreign_calls::DefaultForeignCallBuilder, ops::execute_program, parse_all}; use noirc_abi::input_parser::InputValue; use noirc_driver::{ compile_main, file_manager_with_stdlib, prepare_crate, CompilationResult, CompileOptions, @@ -81,8 +79,7 @@ fn run_snippet_proptest( }; let blackbox_solver = bn254_blackbox_solver::Bn254BlackBoxSolver; - let foreign_call_executor = - RefCell::new(DefaultForeignCallExecutor::new(PrintOutput::None, None, None, None)); + let foreign_call_executor = RefCell::new(DefaultForeignCallBuilder::default().build()); // Generate multiple input/output proptest!(ProptestConfig::with_cases(100), |(io in strategy)| { diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs index 29b871814b8..6aae94f6645 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs @@ -2,6 +2,7 @@ #![allow(clippy::items_after_test_module)] use clap::Parser; use fm::FileManager; +use nargo::foreign_calls::DefaultForeignCallBuilder; use nargo::PrintOutput; use noirc_driver::{check_crate, file_manager_with_stdlib, CompileOptions}; use noirc_frontend::hir::FunctionNameMatch; @@ -88,10 +89,10 @@ fn run_stdlib_tests(force_brillig: bool, inliner_aggressiveness: i64) { &mut context, &test_function, PrintOutput::Stdout, - None, - Some(dummy_package.root_dir.clone()), - Some(dummy_package.name.to_string()), &CompileOptions { force_brillig, inliner_aggressiveness, ..Default::default() }, + |output, base| { + DefaultForeignCallBuilder { output, ..Default::default() }.build_with_base(base) + }, ); (test_name, status) }) diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/function.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/function.rs index 8207db5e486..ca905f3dcf8 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/function.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/function.rs @@ -19,10 +19,11 @@ pub(super) struct FunctionToFormat { pub(super) return_visibility: Visibility, pub(super) where_clause: Vec, pub(super) body: Option, + pub(super) skip_visibility: bool, } impl<'a> Formatter<'a> { - pub(super) fn format_function(&mut self, func: NoirFunction) { + pub(super) fn format_function(&mut self, func: NoirFunction, skip_visibility: bool) { self.format_function_impl(FunctionToFormat { attributes: func.def.attributes, visibility: func.def.visibility, @@ -33,6 +34,7 @@ impl<'a> Formatter<'a> { return_visibility: func.def.return_visibility, where_clause: func.def.where_clause, body: Some(func.def.body), + skip_visibility, }); } @@ -41,7 +43,7 @@ impl<'a> Formatter<'a> { self.format_attributes(func.attributes); self.write_indentation(); - self.format_function_modifiers(func.visibility); + self.format_function_modifiers(func.visibility, func.skip_visibility); self.write_keyword(Keyword::Fn); self.write_space(); self.write_identifier(func.name); @@ -94,7 +96,11 @@ impl<'a> Formatter<'a> { } } - pub(super) fn format_function_modifiers(&mut self, visibility: ItemVisibility) { + pub(super) fn format_function_modifiers( + &mut self, + visibility: ItemVisibility, + skip_visibility: bool, + ) { // For backwards compatibility, unconstrained might come before visibility. // We'll remember this but put it after the visibility. let unconstrained = if self.is_at_keyword(Keyword::Unconstrained) { @@ -105,7 +111,14 @@ impl<'a> Formatter<'a> { false }; - self.format_item_visibility(visibility); + if skip_visibility { + // The intention here is to format the visibility into a temporary buffer that is discarded + self.chunk_formatter().chunk(|formatter| { + formatter.format_item_visibility(visibility); + }); + } else { + self.format_item_visibility(visibility); + } if unconstrained { self.write("unconstrained "); diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/impls.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/impls.rs index 1c2c25c9200..71548dd5efa 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/impls.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/impls.rs @@ -38,7 +38,9 @@ impl<'a> Formatter<'a> { if !doc_comments.is_empty() { self.format_outer_doc_comments(); } - self.format_function(method); + self.format_function( + method, false, // skip visibility + ); } self.skip_comments_and_whitespace(); diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/item.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/item.rs index 521e476fe71..3365e52ec29 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/item.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/item.rs @@ -58,7 +58,10 @@ impl<'a> Formatter<'a> { ItemKind::Import(use_tree, item_visibility) => { self.format_import(use_tree, item_visibility); } - ItemKind::Function(noir_function) => self.format_function(noir_function), + ItemKind::Function(noir_function) => self.format_function( + noir_function, + false, // skip visibility + ), ItemKind::Struct(noir_struct) => self.format_struct(noir_struct), ItemKind::Trait(noir_trait) => self.format_trait(noir_trait), ItemKind::TraitImpl(noir_trait_impl) => self.format_trait_impl(noir_trait_impl), diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/trait_impl.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/trait_impl.rs index b31da8a4101..5bb9a0d0025 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/trait_impl.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/trait_impl.rs @@ -1,8 +1,5 @@ use noirc_frontend::{ - ast::{ - FunctionDefinition, ItemVisibility, NoirFunction, NoirTraitImpl, Pattern, TraitImplItem, - TraitImplItemKind, - }, + ast::{NoirTraitImpl, Pattern, TraitImplItem, TraitImplItemKind}, token::{Keyword, Token}, }; @@ -69,12 +66,10 @@ impl<'a> Formatter<'a> { fn format_trait_impl_item(&mut self, item: TraitImplItem) { match item.kind { TraitImplItemKind::Function(noir_function) => { - // Trait impl functions are public, but there's no `pub` keyword in the source code, - // so to format it we pass a private one. - let def = - FunctionDefinition { visibility: ItemVisibility::Private, ..noir_function.def }; - let noir_function = NoirFunction { def, ..noir_function }; - self.format_function(noir_function); + self.format_function( + noir_function, + true, // skip visibility + ); } TraitImplItemKind::Constant(name, typ, value) => { let pattern = Pattern::Identifier(name); @@ -179,6 +174,22 @@ fn foo ( ) { } assert_format(src, expected); } + #[test] + fn format_trait_impl_function_with_visibility() { + let src = " mod moo { impl Foo for Bar { + /// Some doc comment +pub fn foo ( ) { } + } }"; + let expected = "mod moo { + impl Foo for Bar { + /// Some doc comment + fn foo() {} + } +} +"; + assert_format(src, expected); + } + #[test] fn format_trait_impl_constant_without_type() { let src = " mod moo { impl Foo for Bar { diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/traits.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/traits.rs index 1f192be471e..175dcad6170 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/traits.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/traits.rs @@ -113,6 +113,7 @@ impl<'a> Formatter<'a> { return_visibility: Visibility::Private, where_clause, body, + skip_visibility: true, }; self.format_function_impl(func); } @@ -236,12 +237,12 @@ mod tests { fn format_trait_with_function_without_body() { let src = " mod moo { trait Foo { /// hello - pub fn foo ( ); + fn foo ( ); } }"; let expected = "mod moo { trait Foo { /// hello - pub fn foo(); + fn foo(); } } "; @@ -252,12 +253,12 @@ mod tests { fn format_trait_with_function_with_body() { let src = " mod moo { trait Foo { /// hello - pub fn foo ( ) { 1 } + fn foo ( ) { 1 } } }"; let expected = "mod moo { trait Foo { /// hello - pub fn foo() { + fn foo() { 1 } } @@ -270,12 +271,12 @@ mod tests { fn format_trait_with_function_with_params() { let src = " mod moo { trait Foo { /// hello - pub fn foo ( x : i32 , y : Field ); + fn foo ( x : i32 , y : Field ); } }"; let expected = "mod moo { trait Foo { /// hello - pub fn foo(x: i32, y: Field); + fn foo(x: i32, y: Field); } } "; @@ -298,6 +299,24 @@ mod tests { assert_format(src, expected); } + #[test] + fn format_trait_with_function_with_visibility() { + let src = " mod moo { trait Foo { + /// hello + pub fn foo ( ) { 1 } + } }"; + let expected = "mod moo { + trait Foo { + /// hello + fn foo() { + 1 + } + } +} +"; + assert_format(src, expected); + } + #[test] fn format_multiple_traits() { let src = " trait Foo {} diff --git a/noir/noir-repo/tooling/nargo_fmt/src/lib.rs b/noir/noir-repo/tooling/nargo_fmt/src/lib.rs index eda77e78c7c..2b55b86e975 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/lib.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/lib.rs @@ -66,6 +66,7 @@ pub(crate) fn assert_format_with_config(src: &str, expected: &str, config: Confi use noirc_frontend::parser; let (parsed_module, errors) = parser::parse_program(src); + let errors: Vec<_> = errors.into_iter().filter(|error| !error.is_warning()).collect(); if !errors.is_empty() { panic!("Expected no errors, got: {:?}", errors); } diff --git a/noir/noir-repo/tooling/noirc_abi/src/lib.rs b/noir/noir-repo/tooling/noirc_abi/src/lib.rs index bd5674d64f1..5f5f3748bc4 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/lib.rs @@ -13,10 +13,7 @@ use acvm::{ use errors::AbiError; use input_parser::InputValue; use iter_extended::{try_btree_map, try_vecmap, vecmap}; -use noirc_printable_type::{ - decode_value as printable_type_decode_value, PrintableType, PrintableValue, - PrintableValueDisplay, -}; +use noirc_printable_type::{PrintableType, PrintableValue, PrintableValueDisplay}; use serde::{Deserialize, Serialize}; use std::borrow::Borrow; use std::{collections::BTreeMap, str}; @@ -30,8 +27,11 @@ mod arbitrary; pub mod errors; pub mod input_parser; +mod printable_type; mod serialization; +pub use printable_type::decode_value as decode_printable_value; + /// A map from the fields in an TOML/JSON file which correspond to some ABI to their values pub type InputMap = BTreeMap; @@ -417,7 +417,7 @@ pub fn decode_value( Ok(value) } -fn decode_string_value(field_elements: &[FieldElement]) -> String { +pub fn decode_string_value(field_elements: &[F]) -> String { let string_as_slice = vecmap(field_elements, |e| { let mut field_as_bytes = e.to_be_bytes(); let char_byte = field_as_bytes.pop().unwrap(); // A character in a string is represented by a u8, thus we just want the last byte of the element @@ -476,21 +476,21 @@ pub fn display_abi_error( AbiErrorType::FmtString { length, item_types } => { let mut fields_iter = fields.iter().copied(); let PrintableValue::String(string) = - printable_type_decode_value(&mut fields_iter, &PrintableType::String { length }) + decode_printable_value(&mut fields_iter, &PrintableType::String { length }) else { unreachable!("Got non-string from string decoding"); }; let _length_of_items = fields_iter.next(); let items = item_types.into_iter().map(|abi_type| { let printable_typ = (&abi_type).into(); - let decoded = printable_type_decode_value(&mut fields_iter, &printable_typ); + let decoded = decode_printable_value(&mut fields_iter, &printable_typ); (decoded, printable_typ) }); PrintableValueDisplay::FmtString(string, items.collect()) } AbiErrorType::Custom(abi_typ) => { let printable_type = (&abi_typ).into(); - let decoded = printable_type_decode_value(&mut fields.iter().copied(), &printable_type); + let decoded = decode_printable_value(&mut fields.iter().copied(), &printable_type); PrintableValueDisplay::Plain(decoded, printable_type) } AbiErrorType::String { string } => { diff --git a/noir/noir-repo/tooling/noirc_abi/src/printable_type.rs b/noir/noir-repo/tooling/noirc_abi/src/printable_type.rs new file mode 100644 index 00000000000..a81eb0ce8f6 --- /dev/null +++ b/noir/noir-repo/tooling/noirc_abi/src/printable_type.rs @@ -0,0 +1,78 @@ +use std::collections::BTreeMap; + +use acvm::acir::AcirField; +use iter_extended::vecmap; + +use noirc_printable_type::{PrintableType, PrintableValue}; + +use crate::decode_string_value; + +/// Assumes that `field_iterator` contains enough field elements in order to decode the [PrintableType] +pub fn decode_value( + field_iterator: &mut impl Iterator, + typ: &PrintableType, +) -> PrintableValue { + match typ { + PrintableType::Field + | PrintableType::SignedInteger { .. } + | PrintableType::UnsignedInteger { .. } + | PrintableType::Boolean => { + let field_element = field_iterator.next().unwrap(); + + PrintableValue::Field(field_element) + } + PrintableType::Array { length, typ } => { + let length = *length as usize; + let mut array_elements = Vec::with_capacity(length); + for _ in 0..length { + array_elements.push(decode_value(field_iterator, typ)); + } + + PrintableValue::Vec { array_elements, is_slice: false } + } + PrintableType::Slice { typ } => { + let length = field_iterator + .next() + .expect("not enough data to decode variable array length") + .to_u128() as usize; + let mut array_elements = Vec::with_capacity(length); + for _ in 0..length { + array_elements.push(decode_value(field_iterator, typ)); + } + + PrintableValue::Vec { array_elements, is_slice: true } + } + PrintableType::Tuple { types } => PrintableValue::Vec { + array_elements: vecmap(types, |typ| decode_value(field_iterator, typ)), + is_slice: false, + }, + PrintableType::String { length } => { + let field_elements: Vec = field_iterator.take(*length as usize).collect(); + + PrintableValue::String(decode_string_value(&field_elements)) + } + PrintableType::Struct { fields, .. } => { + let mut struct_map = BTreeMap::new(); + + for (field_key, param_type) in fields { + let field_value = decode_value(field_iterator, param_type); + + struct_map.insert(field_key.to_owned(), field_value); + } + + PrintableValue::Struct(struct_map) + } + PrintableType::Function { env, .. } => { + let field_element = field_iterator.next().unwrap(); + let func_ref = PrintableValue::Field(field_element); + // we want to consume the fields from the environment, but for now they are not actually printed + decode_value(field_iterator, env); + func_ref + } + PrintableType::MutableReference { typ } => { + // we decode the reference, but it's not really used for printing + decode_value(field_iterator, typ) + } + PrintableType::Unit => PrintableValue::Field(F::zero()), + } +} diff --git a/noir/noir-repo/tooling/noirc_artifacts/src/debug_vars.rs b/noir/noir-repo/tooling/noirc_artifacts/src/debug_vars.rs index aa9328432b8..8efeeebb3aa 100644 --- a/noir/noir-repo/tooling/noirc_artifacts/src/debug_vars.rs +++ b/noir/noir-repo/tooling/noirc_artifacts/src/debug_vars.rs @@ -1,8 +1,9 @@ use acvm::AcirField; +use noirc_abi::decode_printable_value; use noirc_errors::debug_info::{ DebugFnId, DebugFunction, DebugInfo, DebugTypeId, DebugVarId, DebugVariable, }; -use noirc_printable_type::{decode_value, PrintableType, PrintableValue}; +use noirc_printable_type::{PrintableType, PrintableValue}; use std::collections::HashMap; #[derive(Debug, Default, Clone)] @@ -45,7 +46,7 @@ impl DebugVars { &'a self, fn_id: &DebugFnId, frame: &'a HashMap>, - ) -> StackFrame { + ) -> StackFrame<'a, F> { let debug_fn = &self.functions.get(fn_id).expect("failed to find function metadata"); let params: Vec<&str> = @@ -72,7 +73,7 @@ impl DebugVars { .last_mut() .expect("unexpected empty stack frames") .1 - .insert(var_id, decode_value(&mut values.iter().copied(), ptype)); + .insert(var_id, decode_printable_value(&mut values.iter().copied(), ptype)); } pub fn assign_field(&mut self, var_id: DebugVarId, indexes: Vec, values: &[F]) { @@ -143,7 +144,7 @@ impl DebugVars { } }; } - *cursor = decode_value(&mut values.iter().copied(), cursor_type); + *cursor = decode_printable_value(&mut values.iter().copied(), cursor_type); } pub fn assign_deref(&mut self, _var_id: DebugVarId, _values: &[F]) { diff --git a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs index 76b23ebf739..bab15529744 100644 --- a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs +++ b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs @@ -3,13 +3,13 @@ use std::path::{Path, PathBuf}; use acir::circuit::OpcodeLocation; use clap::Args; use color_eyre::eyre::{self, Context}; +use nargo::foreign_calls::DefaultForeignCallBuilder; use nargo::PrintOutput; use crate::flamegraph::{BrilligExecutionSample, FlamegraphGenerator, InfernoFlamegraphGenerator}; use crate::fs::{read_inputs_from_file, read_program_from_file}; use crate::opcode_formatter::format_brillig_opcode; use bn254_blackbox_solver::Bn254BlackBoxSolver; -use nargo::foreign_calls::DefaultForeignCallExecutor; use noirc_abi::input_parser::Format; use noirc_artifacts::debug::DebugArtifact; @@ -55,7 +55,7 @@ fn run_with_generator( &program.bytecode, initial_witness, &Bn254BlackBoxSolver, - &mut DefaultForeignCallExecutor::new(PrintOutput::Stdout, None, None, None), + &mut DefaultForeignCallBuilder::default().with_output(PrintOutput::Stdout).build(), )?; println!("Executed"); From 8a6de5b7bece0b7b18d3b1909d1d51d6bd2765b7 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Fri, 3 Jan 2025 05:21:37 -0500 Subject: [PATCH 03/20] chore: redo typo PR by panditdhamdhere (#11026) Thanks panditdhamdhere for https://github.com/AztecProtocol/aztec-packages/pull/11025. Our policy is to redo typo changes to dissuade metric farming. This is an automated script. --- .../aztec/smart_contracts/functions/function_transforms.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/aztec/smart_contracts/functions/function_transforms.md b/docs/docs/aztec/smart_contracts/functions/function_transforms.md index a66761f872a..2254bb44a31 100644 --- a/docs/docs/aztec/smart_contracts/functions/function_transforms.md +++ b/docs/docs/aztec/smart_contracts/functions/function_transforms.md @@ -192,7 +192,7 @@ fn compute_fn_signature_hash(fn_name: &str, parameters: &[Type]) -> u32 { - A string representation of the function is created, including the function name and parameter types - This signature string is then hashed using Keccak-256 -- The first 4 bytes of the resulting hash are coverted to a u32 integer +- The first 4 bytes of the resulting hash are converted to a u32 integer ### Integration into contract interface @@ -274,4 +274,4 @@ Contract artifacts are important because: - They help decode function return values in the simulator ## Further reading -- [Function attributes and macros](./attributes.md) \ No newline at end of file +- [Function attributes and macros](./attributes.md) From ad70e9af5023e1e9819b2e7e0dcba32745fb0e99 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Fri, 3 Jan 2025 06:16:10 -0500 Subject: [PATCH 04/20] chore(master): Release 0.69.0 (#10956) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---
aztec-package: 0.69.0 ## [0.69.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.68.2...aztec-package-v0.69.0) (2025-01-03) ### Miscellaneous * Add version number when starting sandbox ([#10935](https://github.com/AztecProtocol/aztec-packages/issues/10935)) ([c8dcd8f](https://github.com/AztecProtocol/aztec-packages/commit/c8dcd8f3e50e6447e1e7a09d768b3aff5f17044b)) * Cl/ci3.2 ([#10919](https://github.com/AztecProtocol/aztec-packages/issues/10919)) ([49dacc3](https://github.com/AztecProtocol/aztec-packages/commit/49dacc3378a339f8cc36971b630c52952249f60c))
barretenberg.js: 0.69.0 ## [0.69.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.68.2...barretenberg.js-v0.69.0) (2025-01-03) ### Miscellaneous * Cl/ci3.2 ([#10919](https://github.com/AztecProtocol/aztec-packages/issues/10919)) ([49dacc3](https://github.com/AztecProtocol/aztec-packages/commit/49dacc3378a339f8cc36971b630c52952249f60c))
aztec-packages: 0.69.0 ## [0.69.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.68.2...aztec-packages-v0.69.0) (2025-01-03) ### âš  BREAKING CHANGES * Switch to using `jsonrpsee` for foreign calls; refactor `run_test`; foreign call layering (https://github.com/noir-lang/noir/pull/6849) ### Features * **docs:** Algolia -> typesense ([#9698](https://github.com/AztecProtocol/aztec-packages/issues/9698)) ([e082063](https://github.com/AztecProtocol/aztec-packages/commit/e0820636bce47d9e8ec3f8c20358d9d4cae0041e)) * Encapsulated UltraHonk Vanilla IVC ([#10900](https://github.com/AztecProtocol/aztec-packages/issues/10900)) ([fd5f611](https://github.com/AztecProtocol/aztec-packages/commit/fd5f611aca60c9c906a6440fdb5683794a183d53)) * **LSP:** Suggest trait methods from where clauses (https://github.com/noir-lang/noir/pull/6915) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * **ssa:** Hoist add and mul binary ops using known induction variables (https://github.com/noir-lang/noir/pull/6910) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * Switch to using `jsonrpsee` for foreign calls; refactor `run_test`; foreign call layering (https://github.com/noir-lang/noir/pull/6849) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * Use full IPA recursive verifier in root rollup ([#10962](https://github.com/AztecProtocol/aztec-packages/issues/10962)) ([37095ce](https://github.com/AztecProtocol/aztec-packages/commit/37095ceba560ad66516467387d186b5afd19a6e0)) * Warn on trait method visibility (https://github.com/noir-lang/noir/pull/6923) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) ### Bug Fixes * Bigint builtins are foreigns (https://github.com/noir-lang/noir/pull/6892) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * **ci:** Acir bench ([#11021](https://github.com/AztecProtocol/aztec-packages/issues/11021)) ([9eaa109](https://github.com/AztecProtocol/aztec-packages/commit/9eaa10983b26616876099896accb0e3093ae8d20)) * Consistent file_id across installation paths (https://github.com/noir-lang/noir/pull/6912) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * Remove unnecessary cast in bit-shift (https://github.com/noir-lang/noir/pull/6890) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * Revert "feat(docs): algolia -> typesense" also fix boxes-test ([#11016](https://github.com/AztecProtocol/aztec-packages/issues/11016)) ([a1a4d76](https://github.com/AztecProtocol/aztec-packages/commit/a1a4d760b950ab563fd51b0c73dc0e06dcfe7fed)) * Small fixes for sepolia deployments ([#10915](https://github.com/AztecProtocol/aztec-packages/issues/10915)) ([37d69bf](https://github.com/AztecProtocol/aztec-packages/commit/37d69bf5601b8f2c7e54f5b69b0376c4eddf10c1)) * Update prompted foundry install command ([#10995](https://github.com/AztecProtocol/aztec-packages/issues/10995)) ([cd59f2e](https://github.com/AztecProtocol/aztec-packages/commit/cd59f2ef651b38da642009aa2a00c3fe710aa8e6)) * Use explicit read transactions ([#10911](https://github.com/AztecProtocol/aztec-packages/issues/10911)) ([2a8e01c](https://github.com/AztecProtocol/aztec-packages/commit/2a8e01cc9f9ca77e75991bd584d6752e120c9db6)) ### Miscellaneous * Add `Instruction::Noop` (https://github.com/noir-lang/noir/pull/6899) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * Add `rollup_root` and `rollup_block_merge` to tracked protocol circuits (https://github.com/noir-lang/noir/pull/6903) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * Add if/loop tip (separate from no-predicate [#5657](https://github.com/AztecProtocol/aztec-packages/issues/5657)) (https://github.com/noir-lang/noir/pull/6806) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * Add rollup circuits to memory reports (https://github.com/noir-lang/noir/pull/6897) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * Add spypsy to nightly canary ([#10961](https://github.com/AztecProtocol/aztec-packages/issues/10961)) ([4dca8f5](https://github.com/AztecProtocol/aztec-packages/commit/4dca8f53b36206bcc34e9e502f7cd3674fb8e9c4)) * Add version number when starting sandbox ([#10935](https://github.com/AztecProtocol/aztec-packages/issues/10935)) ([c8dcd8f](https://github.com/AztecProtocol/aztec-packages/commit/c8dcd8f3e50e6447e1e7a09d768b3aff5f17044b)) * Bump rc1 tps ([#11012](https://github.com/AztecProtocol/aztec-packages/issues/11012)) ([52176f7](https://github.com/AztecProtocol/aztec-packages/commit/52176f7386f100c894dac40abd3c3ffcf4f6b3b1)) * **ci:** Memory reports for execution (https://github.com/noir-lang/noir/pull/6907) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * Cl/ci3.2 ([#10919](https://github.com/AztecProtocol/aztec-packages/issues/10919)) ([49dacc3](https://github.com/AztecProtocol/aztec-packages/commit/49dacc3378a339f8cc36971b630c52952249f60c)) * Clean up translator circuit builder function definitions ([#10944](https://github.com/AztecProtocol/aztec-packages/issues/10944)) ([f6fef05](https://github.com/AztecProtocol/aztec-packages/commit/f6fef05119af7714d60f00c52455e52bdfa98288)) * Disable broken honk test ([#11010](https://github.com/AztecProtocol/aztec-packages/issues/11010)) ([8ad239a](https://github.com/AztecProtocol/aztec-packages/commit/8ad239a7cddcde8df610e9c0287681fc12cca306)) * Disable tt test ([#10999](https://github.com/AztecProtocol/aztec-packages/issues/10999)) ([d9d64c3](https://github.com/AztecProtocol/aztec-packages/commit/d9d64c39a09774110ef6419831c8e5ca0e322ed1)) * Document format strings (https://github.com/noir-lang/noir/pull/6920) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * Fix flake in e2e-block-build ([#11002](https://github.com/AztecProtocol/aztec-packages/issues/11002)) ([2a2932e](https://github.com/AztecProtocol/aztec-packages/commit/2a2932e1b59462560f0be728469a7977fbb41c6f)) * Fix mac build ([#10963](https://github.com/AztecProtocol/aztec-packages/issues/10963)) ([158afc4](https://github.com/AztecProtocol/aztec-packages/commit/158afc4cd34a9fc9cb41bcb083b5197eae1ce442)) * Fix warning (https://github.com/noir-lang/noir/pull/6927) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * Fix warnings in avm transpiler ([#11001](https://github.com/AztecProtocol/aztec-packages/issues/11001)) ([07c5b7f](https://github.com/AztecProtocol/aztec-packages/commit/07c5b7fb336a6df88c23693118bb8e6fa07423dd)) * Move implementation of print foreign call into `nargo` (https://github.com/noir-lang/noir/pull/6865) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * Prover.tomls fix ([#11011](https://github.com/AztecProtocol/aztec-packages/issues/11011)) ([565a67b](https://github.com/AztecProtocol/aztec-packages/commit/565a67b94ca88e1a632c616ea3f7079dd081c627)) * Redo typo PR by Anon-im ([#11009](https://github.com/AztecProtocol/aztec-packages/issues/11009)) ([2044c58](https://github.com/AztecProtocol/aztec-packages/commit/2044c58387b5687658f190cf1b4a078a036eabc0)) * Redo typo PR by Hack666r ([#10992](https://github.com/AztecProtocol/aztec-packages/issues/10992)) ([018f11e](https://github.com/AztecProtocol/aztec-packages/commit/018f11e39266423376b3a56afbc8aaf54b4de31d)) * Redo typo PR by MonkeyKing44 ([#10996](https://github.com/AztecProtocol/aztec-packages/issues/10996)) ([faca458](https://github.com/AztecProtocol/aztec-packages/commit/faca458adda3139e92dcb2709f2c087c85842dd8)) * Redo typo PR by panditdhamdhere ([#11026](https://github.com/AztecProtocol/aztec-packages/issues/11026)) ([8a6de5b](https://github.com/AztecProtocol/aztec-packages/commit/8a6de5b7bece0b7b18d3b1909d1d51d6bd2765b7)) * Redo typo PR by petryshkaCODE ([#10993](https://github.com/AztecProtocol/aztec-packages/issues/10993)) ([0c6a4be](https://github.com/AztecProtocol/aztec-packages/commit/0c6a4bee82c62a522f69756f0d233ec637cd1a7a)) * Redo typo PR by VitalikBerashvili ([#10994](https://github.com/AztecProtocol/aztec-packages/issues/10994)) ([da36da4](https://github.com/AztecProtocol/aztec-packages/commit/da36da48560d3610b2d9abf1a56c47d1b28cf9a1)) * Redo typo PR by whitetechna ([#10997](https://github.com/AztecProtocol/aztec-packages/issues/10997)) ([89a2bd7](https://github.com/AztecProtocol/aztec-packages/commit/89a2bd7fa403ed0ba5472144b7c13d3ab7ab930b)) * Release Noir(1.0.0-beta.1) (https://github.com/noir-lang/noir/pull/6622) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * Remove unused dependency (https://github.com/noir-lang/noir/pull/6922) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) * Replace relative paths to noir-protocol-circuits ([6b34449](https://github.com/AztecProtocol/aztec-packages/commit/6b344493a8e0c2e4d9ac67f037f9202dfe38c83c)) * Replace relative paths to noir-protocol-circuits ([11f8a42](https://github.com/AztecProtocol/aztec-packages/commit/11f8a42c1503386c2323c5305c1058853ac05711)) * Use ssa parser in flattening pass tests (https://github.com/noir-lang/noir/pull/6868) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) ### Documentation * Remove links to (outdated) protocol specs ([#10831](https://github.com/AztecProtocol/aztec-packages/issues/10831)) ([4874d95](https://github.com/AztecProtocol/aztec-packages/commit/4874d95a7fd7103178820724a637479bea39fe0a))
barretenberg: 0.69.0 ## [0.69.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.68.2...barretenberg-v0.69.0) (2025-01-03) ### Features * Encapsulated UltraHonk Vanilla IVC ([#10900](https://github.com/AztecProtocol/aztec-packages/issues/10900)) ([fd5f611](https://github.com/AztecProtocol/aztec-packages/commit/fd5f611aca60c9c906a6440fdb5683794a183d53)) * Use full IPA recursive verifier in root rollup ([#10962](https://github.com/AztecProtocol/aztec-packages/issues/10962)) ([37095ce](https://github.com/AztecProtocol/aztec-packages/commit/37095ceba560ad66516467387d186b5afd19a6e0)) ### Bug Fixes * Bigint builtins are foreigns (https://github.com/noir-lang/noir/pull/6892) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * **ci:** Acir bench ([#11021](https://github.com/AztecProtocol/aztec-packages/issues/11021)) ([9eaa109](https://github.com/AztecProtocol/aztec-packages/commit/9eaa10983b26616876099896accb0e3093ae8d20)) * Consistent file_id across installation paths (https://github.com/noir-lang/noir/pull/6912) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * Remove unnecessary cast in bit-shift (https://github.com/noir-lang/noir/pull/6890) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) ### Miscellaneous * Add `Instruction::Noop` (https://github.com/noir-lang/noir/pull/6899) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * Add `rollup_root` and `rollup_block_merge` to tracked protocol circuits (https://github.com/noir-lang/noir/pull/6903) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) * Cl/ci3.2 ([#10919](https://github.com/AztecProtocol/aztec-packages/issues/10919)) ([49dacc3](https://github.com/AztecProtocol/aztec-packages/commit/49dacc3378a339f8cc36971b630c52952249f60c)) * Clean up translator circuit builder function definitions ([#10944](https://github.com/AztecProtocol/aztec-packages/issues/10944)) ([f6fef05](https://github.com/AztecProtocol/aztec-packages/commit/f6fef05119af7714d60f00c52455e52bdfa98288)) * Disable broken honk test ([#11010](https://github.com/AztecProtocol/aztec-packages/issues/11010)) ([8ad239a](https://github.com/AztecProtocol/aztec-packages/commit/8ad239a7cddcde8df610e9c0287681fc12cca306)) * Fix mac build ([#10963](https://github.com/AztecProtocol/aztec-packages/issues/10963)) ([158afc4](https://github.com/AztecProtocol/aztec-packages/commit/158afc4cd34a9fc9cb41bcb083b5197eae1ce442)) * Redo typo PR by Anon-im ([#11009](https://github.com/AztecProtocol/aztec-packages/issues/11009)) ([2044c58](https://github.com/AztecProtocol/aztec-packages/commit/2044c58387b5687658f190cf1b4a078a036eabc0)) * Redo typo PR by Hack666r ([#10992](https://github.com/AztecProtocol/aztec-packages/issues/10992)) ([018f11e](https://github.com/AztecProtocol/aztec-packages/commit/018f11e39266423376b3a56afbc8aaf54b4de31d)) * Redo typo PR by MonkeyKing44 ([#10996](https://github.com/AztecProtocol/aztec-packages/issues/10996)) ([faca458](https://github.com/AztecProtocol/aztec-packages/commit/faca458adda3139e92dcb2709f2c087c85842dd8)) * Redo typo PR by petryshkaCODE ([#10993](https://github.com/AztecProtocol/aztec-packages/issues/10993)) ([0c6a4be](https://github.com/AztecProtocol/aztec-packages/commit/0c6a4bee82c62a522f69756f0d233ec637cd1a7a)) * Redo typo PR by VitalikBerashvili ([#10994](https://github.com/AztecProtocol/aztec-packages/issues/10994)) ([da36da4](https://github.com/AztecProtocol/aztec-packages/commit/da36da48560d3610b2d9abf1a56c47d1b28cf9a1)) * Release Noir(1.0.0-beta.1) (https://github.com/noir-lang/noir/pull/6622) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- .release-please-manifest.json | 8 ++-- CHANGELOG.md | 69 +++++++++++++++++++++++++++++++++ barretenberg/CHANGELOG.md | 32 +++++++++++++++ barretenberg/cpp/CMakeLists.txt | 2 +- barretenberg/ts/CHANGELOG.md | 7 ++++ barretenberg/ts/package.json | 2 +- yarn-project/aztec/CHANGELOG.md | 8 ++++ yarn-project/aztec/package.json | 2 +- 8 files changed, 123 insertions(+), 7 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index bc60251a774..2bf333f137e 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,7 +1,7 @@ { - ".": "0.68.2", + ".": "0.69.0", "yarn-project/cli": "0.35.1", - "yarn-project/aztec": "0.68.2", - "barretenberg": "0.68.2", - "barretenberg/ts": "0.68.2" + "yarn-project/aztec": "0.69.0", + "barretenberg": "0.69.0", + "barretenberg/ts": "0.69.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index d30ffde113c..34f403ba568 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,74 @@ # Changelog +## [0.69.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.68.2...aztec-packages-v0.69.0) (2025-01-03) + + +### ⚠ BREAKING CHANGES + +* Switch to using `jsonrpsee` for foreign calls; refactor `run_test`; foreign call layering (https://github.com/noir-lang/noir/pull/6849) + +### Features + +* **docs:** Algolia -> typesense ([#9698](https://github.com/AztecProtocol/aztec-packages/issues/9698)) ([e082063](https://github.com/AztecProtocol/aztec-packages/commit/e0820636bce47d9e8ec3f8c20358d9d4cae0041e)) +* Encapsulated UltraHonk Vanilla IVC ([#10900](https://github.com/AztecProtocol/aztec-packages/issues/10900)) ([fd5f611](https://github.com/AztecProtocol/aztec-packages/commit/fd5f611aca60c9c906a6440fdb5683794a183d53)) +* **LSP:** Suggest trait methods from where clauses (https://github.com/noir-lang/noir/pull/6915) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* **ssa:** Hoist add and mul binary ops using known induction variables (https://github.com/noir-lang/noir/pull/6910) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* Switch to using `jsonrpsee` for foreign calls; refactor `run_test`; foreign call layering (https://github.com/noir-lang/noir/pull/6849) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* Use full IPA recursive verifier in root rollup ([#10962](https://github.com/AztecProtocol/aztec-packages/issues/10962)) ([37095ce](https://github.com/AztecProtocol/aztec-packages/commit/37095ceba560ad66516467387d186b5afd19a6e0)) +* Warn on trait method visibility (https://github.com/noir-lang/noir/pull/6923) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) + + +### Bug Fixes + +* Bigint builtins are foreigns (https://github.com/noir-lang/noir/pull/6892) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* **ci:** Acir bench ([#11021](https://github.com/AztecProtocol/aztec-packages/issues/11021)) ([9eaa109](https://github.com/AztecProtocol/aztec-packages/commit/9eaa10983b26616876099896accb0e3093ae8d20)) +* Consistent file_id across installation paths (https://github.com/noir-lang/noir/pull/6912) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* Remove unnecessary cast in bit-shift (https://github.com/noir-lang/noir/pull/6890) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* Revert "feat(docs): algolia -> typesense" also fix boxes-test ([#11016](https://github.com/AztecProtocol/aztec-packages/issues/11016)) ([a1a4d76](https://github.com/AztecProtocol/aztec-packages/commit/a1a4d760b950ab563fd51b0c73dc0e06dcfe7fed)) +* Small fixes for sepolia deployments ([#10915](https://github.com/AztecProtocol/aztec-packages/issues/10915)) ([37d69bf](https://github.com/AztecProtocol/aztec-packages/commit/37d69bf5601b8f2c7e54f5b69b0376c4eddf10c1)) +* Update prompted foundry install command ([#10995](https://github.com/AztecProtocol/aztec-packages/issues/10995)) ([cd59f2e](https://github.com/AztecProtocol/aztec-packages/commit/cd59f2ef651b38da642009aa2a00c3fe710aa8e6)) +* Use explicit read transactions ([#10911](https://github.com/AztecProtocol/aztec-packages/issues/10911)) ([2a8e01c](https://github.com/AztecProtocol/aztec-packages/commit/2a8e01cc9f9ca77e75991bd584d6752e120c9db6)) + + +### Miscellaneous + +* Add `Instruction::Noop` (https://github.com/noir-lang/noir/pull/6899) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* Add `rollup_root` and `rollup_block_merge` to tracked protocol circuits (https://github.com/noir-lang/noir/pull/6903) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* Add if/loop tip (separate from no-predicate [#5657](https://github.com/AztecProtocol/aztec-packages/issues/5657)) (https://github.com/noir-lang/noir/pull/6806) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* Add rollup circuits to memory reports (https://github.com/noir-lang/noir/pull/6897) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* Add spypsy to nightly canary ([#10961](https://github.com/AztecProtocol/aztec-packages/issues/10961)) ([4dca8f5](https://github.com/AztecProtocol/aztec-packages/commit/4dca8f53b36206bcc34e9e502f7cd3674fb8e9c4)) +* Add version number when starting sandbox ([#10935](https://github.com/AztecProtocol/aztec-packages/issues/10935)) ([c8dcd8f](https://github.com/AztecProtocol/aztec-packages/commit/c8dcd8f3e50e6447e1e7a09d768b3aff5f17044b)) +* Bump rc1 tps ([#11012](https://github.com/AztecProtocol/aztec-packages/issues/11012)) ([52176f7](https://github.com/AztecProtocol/aztec-packages/commit/52176f7386f100c894dac40abd3c3ffcf4f6b3b1)) +* **ci:** Memory reports for execution (https://github.com/noir-lang/noir/pull/6907) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* Cl/ci3.2 ([#10919](https://github.com/AztecProtocol/aztec-packages/issues/10919)) ([49dacc3](https://github.com/AztecProtocol/aztec-packages/commit/49dacc3378a339f8cc36971b630c52952249f60c)) +* Clean up translator circuit builder function definitions ([#10944](https://github.com/AztecProtocol/aztec-packages/issues/10944)) ([f6fef05](https://github.com/AztecProtocol/aztec-packages/commit/f6fef05119af7714d60f00c52455e52bdfa98288)) +* Disable broken honk test ([#11010](https://github.com/AztecProtocol/aztec-packages/issues/11010)) ([8ad239a](https://github.com/AztecProtocol/aztec-packages/commit/8ad239a7cddcde8df610e9c0287681fc12cca306)) +* Disable tt test ([#10999](https://github.com/AztecProtocol/aztec-packages/issues/10999)) ([d9d64c3](https://github.com/AztecProtocol/aztec-packages/commit/d9d64c39a09774110ef6419831c8e5ca0e322ed1)) +* Document format strings (https://github.com/noir-lang/noir/pull/6920) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* Fix flake in e2e-block-build ([#11002](https://github.com/AztecProtocol/aztec-packages/issues/11002)) ([2a2932e](https://github.com/AztecProtocol/aztec-packages/commit/2a2932e1b59462560f0be728469a7977fbb41c6f)) +* Fix mac build ([#10963](https://github.com/AztecProtocol/aztec-packages/issues/10963)) ([158afc4](https://github.com/AztecProtocol/aztec-packages/commit/158afc4cd34a9fc9cb41bcb083b5197eae1ce442)) +* Fix warning (https://github.com/noir-lang/noir/pull/6927) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* Fix warnings in avm transpiler ([#11001](https://github.com/AztecProtocol/aztec-packages/issues/11001)) ([07c5b7f](https://github.com/AztecProtocol/aztec-packages/commit/07c5b7fb336a6df88c23693118bb8e6fa07423dd)) +* Move implementation of print foreign call into `nargo` (https://github.com/noir-lang/noir/pull/6865) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* Prover.tomls fix ([#11011](https://github.com/AztecProtocol/aztec-packages/issues/11011)) ([565a67b](https://github.com/AztecProtocol/aztec-packages/commit/565a67b94ca88e1a632c616ea3f7079dd081c627)) +* Redo typo PR by Anon-im ([#11009](https://github.com/AztecProtocol/aztec-packages/issues/11009)) ([2044c58](https://github.com/AztecProtocol/aztec-packages/commit/2044c58387b5687658f190cf1b4a078a036eabc0)) +* Redo typo PR by Hack666r ([#10992](https://github.com/AztecProtocol/aztec-packages/issues/10992)) ([018f11e](https://github.com/AztecProtocol/aztec-packages/commit/018f11e39266423376b3a56afbc8aaf54b4de31d)) +* Redo typo PR by MonkeyKing44 ([#10996](https://github.com/AztecProtocol/aztec-packages/issues/10996)) ([faca458](https://github.com/AztecProtocol/aztec-packages/commit/faca458adda3139e92dcb2709f2c087c85842dd8)) +* Redo typo PR by panditdhamdhere ([#11026](https://github.com/AztecProtocol/aztec-packages/issues/11026)) ([8a6de5b](https://github.com/AztecProtocol/aztec-packages/commit/8a6de5b7bece0b7b18d3b1909d1d51d6bd2765b7)) +* Redo typo PR by petryshkaCODE ([#10993](https://github.com/AztecProtocol/aztec-packages/issues/10993)) ([0c6a4be](https://github.com/AztecProtocol/aztec-packages/commit/0c6a4bee82c62a522f69756f0d233ec637cd1a7a)) +* Redo typo PR by VitalikBerashvili ([#10994](https://github.com/AztecProtocol/aztec-packages/issues/10994)) ([da36da4](https://github.com/AztecProtocol/aztec-packages/commit/da36da48560d3610b2d9abf1a56c47d1b28cf9a1)) +* Redo typo PR by whitetechna ([#10997](https://github.com/AztecProtocol/aztec-packages/issues/10997)) ([89a2bd7](https://github.com/AztecProtocol/aztec-packages/commit/89a2bd7fa403ed0ba5472144b7c13d3ab7ab930b)) +* Release Noir(1.0.0-beta.1) (https://github.com/noir-lang/noir/pull/6622) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* Remove unused dependency (https://github.com/noir-lang/noir/pull/6922) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) +* Replace relative paths to noir-protocol-circuits ([6b34449](https://github.com/AztecProtocol/aztec-packages/commit/6b344493a8e0c2e4d9ac67f037f9202dfe38c83c)) +* Replace relative paths to noir-protocol-circuits ([11f8a42](https://github.com/AztecProtocol/aztec-packages/commit/11f8a42c1503386c2323c5305c1058853ac05711)) +* Use ssa parser in flattening pass tests (https://github.com/noir-lang/noir/pull/6868) ([dc12c2b](https://github.com/AztecProtocol/aztec-packages/commit/dc12c2b678e0c450c05cbd4748296e17ae73860b)) + + +### Documentation + +* Remove links to (outdated) protocol specs ([#10831](https://github.com/AztecProtocol/aztec-packages/issues/10831)) ([4874d95](https://github.com/AztecProtocol/aztec-packages/commit/4874d95a7fd7103178820724a637479bea39fe0a)) + ## [0.68.2](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.68.1...aztec-packages-v0.68.2) (2024-12-24) diff --git a/barretenberg/CHANGELOG.md b/barretenberg/CHANGELOG.md index 683e61b3d29..935ec694eab 100644 --- a/barretenberg/CHANGELOG.md +++ b/barretenberg/CHANGELOG.md @@ -1,5 +1,37 @@ # Changelog +## [0.69.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.68.2...barretenberg-v0.69.0) (2025-01-03) + + +### Features + +* Encapsulated UltraHonk Vanilla IVC ([#10900](https://github.com/AztecProtocol/aztec-packages/issues/10900)) ([fd5f611](https://github.com/AztecProtocol/aztec-packages/commit/fd5f611aca60c9c906a6440fdb5683794a183d53)) +* Use full IPA recursive verifier in root rollup ([#10962](https://github.com/AztecProtocol/aztec-packages/issues/10962)) ([37095ce](https://github.com/AztecProtocol/aztec-packages/commit/37095ceba560ad66516467387d186b5afd19a6e0)) + + +### Bug Fixes + +* Bigint builtins are foreigns (https://github.com/noir-lang/noir/pull/6892) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* **ci:** Acir bench ([#11021](https://github.com/AztecProtocol/aztec-packages/issues/11021)) ([9eaa109](https://github.com/AztecProtocol/aztec-packages/commit/9eaa10983b26616876099896accb0e3093ae8d20)) +* Consistent file_id across installation paths (https://github.com/noir-lang/noir/pull/6912) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* Remove unnecessary cast in bit-shift (https://github.com/noir-lang/noir/pull/6890) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) + + +### Miscellaneous + +* Add `Instruction::Noop` (https://github.com/noir-lang/noir/pull/6899) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* Add `rollup_root` and `rollup_block_merge` to tracked protocol circuits (https://github.com/noir-lang/noir/pull/6903) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) +* Cl/ci3.2 ([#10919](https://github.com/AztecProtocol/aztec-packages/issues/10919)) ([49dacc3](https://github.com/AztecProtocol/aztec-packages/commit/49dacc3378a339f8cc36971b630c52952249f60c)) +* Clean up translator circuit builder function definitions ([#10944](https://github.com/AztecProtocol/aztec-packages/issues/10944)) ([f6fef05](https://github.com/AztecProtocol/aztec-packages/commit/f6fef05119af7714d60f00c52455e52bdfa98288)) +* Disable broken honk test ([#11010](https://github.com/AztecProtocol/aztec-packages/issues/11010)) ([8ad239a](https://github.com/AztecProtocol/aztec-packages/commit/8ad239a7cddcde8df610e9c0287681fc12cca306)) +* Fix mac build ([#10963](https://github.com/AztecProtocol/aztec-packages/issues/10963)) ([158afc4](https://github.com/AztecProtocol/aztec-packages/commit/158afc4cd34a9fc9cb41bcb083b5197eae1ce442)) +* Redo typo PR by Anon-im ([#11009](https://github.com/AztecProtocol/aztec-packages/issues/11009)) ([2044c58](https://github.com/AztecProtocol/aztec-packages/commit/2044c58387b5687658f190cf1b4a078a036eabc0)) +* Redo typo PR by Hack666r ([#10992](https://github.com/AztecProtocol/aztec-packages/issues/10992)) ([018f11e](https://github.com/AztecProtocol/aztec-packages/commit/018f11e39266423376b3a56afbc8aaf54b4de31d)) +* Redo typo PR by MonkeyKing44 ([#10996](https://github.com/AztecProtocol/aztec-packages/issues/10996)) ([faca458](https://github.com/AztecProtocol/aztec-packages/commit/faca458adda3139e92dcb2709f2c087c85842dd8)) +* Redo typo PR by petryshkaCODE ([#10993](https://github.com/AztecProtocol/aztec-packages/issues/10993)) ([0c6a4be](https://github.com/AztecProtocol/aztec-packages/commit/0c6a4bee82c62a522f69756f0d233ec637cd1a7a)) +* Redo typo PR by VitalikBerashvili ([#10994](https://github.com/AztecProtocol/aztec-packages/issues/10994)) ([da36da4](https://github.com/AztecProtocol/aztec-packages/commit/da36da48560d3610b2d9abf1a56c47d1b28cf9a1)) +* Release Noir(1.0.0-beta.1) (https://github.com/noir-lang/noir/pull/6622) ([2d3805a](https://github.com/AztecProtocol/aztec-packages/commit/2d3805a3b682b27bf6275c547b4b3d68d214eebe)) + ## [0.68.2](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.68.1...barretenberg-v0.68.2) (2024-12-24) diff --git a/barretenberg/cpp/CMakeLists.txt b/barretenberg/cpp/CMakeLists.txt index 0eb5e214bf0..0956c08a44b 100644 --- a/barretenberg/cpp/CMakeLists.txt +++ b/barretenberg/cpp/CMakeLists.txt @@ -6,7 +6,7 @@ cmake_minimum_required(VERSION 3.24 FATAL_ERROR) project( Barretenberg DESCRIPTION "BN254 elliptic curve library, and PLONK SNARK prover" - VERSION 0.68.2 # x-release-please-version + VERSION 0.69.0 # x-release-please-version LANGUAGES CXX C ) # Insert version into `bb` config file diff --git a/barretenberg/ts/CHANGELOG.md b/barretenberg/ts/CHANGELOG.md index bb740bf5c01..2e80b2f1e38 100644 --- a/barretenberg/ts/CHANGELOG.md +++ b/barretenberg/ts/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.69.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.68.2...barretenberg.js-v0.69.0) (2025-01-03) + + +### Miscellaneous + +* Cl/ci3.2 ([#10919](https://github.com/AztecProtocol/aztec-packages/issues/10919)) ([49dacc3](https://github.com/AztecProtocol/aztec-packages/commit/49dacc3378a339f8cc36971b630c52952249f60c)) + ## [0.68.2](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.68.1...barretenberg.js-v0.68.2) (2024-12-24) diff --git a/barretenberg/ts/package.json b/barretenberg/ts/package.json index be4d44b18da..0995f7b5f20 100644 --- a/barretenberg/ts/package.json +++ b/barretenberg/ts/package.json @@ -1,7 +1,7 @@ { "name": "@aztec/bb.js", "packageManager": "yarn@4.5.2", - "version": "0.68.2", + "version": "0.69.0", "homepage": "https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/ts", "license": "MIT", "type": "module", diff --git a/yarn-project/aztec/CHANGELOG.md b/yarn-project/aztec/CHANGELOG.md index 540ac77ec33..bf36a15cd5d 100644 --- a/yarn-project/aztec/CHANGELOG.md +++ b/yarn-project/aztec/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.69.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.68.2...aztec-package-v0.69.0) (2025-01-03) + + +### Miscellaneous + +* Add version number when starting sandbox ([#10935](https://github.com/AztecProtocol/aztec-packages/issues/10935)) ([c8dcd8f](https://github.com/AztecProtocol/aztec-packages/commit/c8dcd8f3e50e6447e1e7a09d768b3aff5f17044b)) +* Cl/ci3.2 ([#10919](https://github.com/AztecProtocol/aztec-packages/issues/10919)) ([49dacc3](https://github.com/AztecProtocol/aztec-packages/commit/49dacc3378a339f8cc36971b630c52952249f60c)) + ## [0.68.2](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.68.1...aztec-package-v0.68.2) (2024-12-24) diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 49293727193..714b0362802 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -1,6 +1,6 @@ { "name": "@aztec/aztec", - "version": "0.68.2", + "version": "0.69.0", "type": "module", "exports": { ".": "./dest/index.js" From d08f540eea0f4763b41e2a741a3ba65cfdf37f4e Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 3 Jan 2025 10:01:29 -0300 Subject: [PATCH 05/20] chore: Patch jest to not use JSON serialization in message passing ci3 (#10964) Restores the patch from #5883 which was [removed](https://github.com/AztecProtocol/aztec-packages/pull/10042/files#diff-ce07a7dc313882291a1eb38f5c47598d992270d769e487fd3970dcf61af71b01) in CI3. --- .../jest-runner-npm-29.7.0-3bc9f82b58.patch | 13 ++++++++ yarn-project/package.json | 3 +- yarn-project/yarn.lock | 31 ++++++++++++++++++- 3 files changed, 45 insertions(+), 2 deletions(-) create mode 100644 yarn-project/.yarn/patches/jest-runner-npm-29.7.0-3bc9f82b58.patch diff --git a/yarn-project/.yarn/patches/jest-runner-npm-29.7.0-3bc9f82b58.patch b/yarn-project/.yarn/patches/jest-runner-npm-29.7.0-3bc9f82b58.patch new file mode 100644 index 00000000000..36a2a45009b --- /dev/null +++ b/yarn-project/.yarn/patches/jest-runner-npm-29.7.0-3bc9f82b58.patch @@ -0,0 +1,13 @@ +diff --git a/build/index.js b/build/index.js +index 65c0ed180a1f44a5095f80d572aacb68be1db3da..3bb4938110a50a2eca1b2f01466b7be16c9c8145 100644 +--- a/build/index.js ++++ b/build/index.js +@@ -124,7 +124,7 @@ class TestRunner extends _types.EmittingTestRunner { + enableWorkerThreads: this._globalConfig.workerThreads, + exposedMethods: ['worker'], + forkOptions: { +- serialization: 'json', ++ serialization: 'advanced', + stdio: 'pipe' + }, + // The workerIdleMemoryLimit should've been converted to a number during diff --git a/yarn-project/package.json b/yarn-project/package.json index 4fae37f35e5..0a2e6fbc9eb 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -82,6 +82,7 @@ "@noir-lang/types": "portal:../noir/packages/types", "@noir-lang/noirc_abi": "portal:../noir/packages/noirc_abi", "@noir-lang/noir_codegen": "portal:../noir/packages/noir_codegen", - "@noir-lang/noir_js": "file:../noir/packages/noir_js" + "@noir-lang/noir_js": "file:../noir/packages/noir_js", + "jest-runner@npm:^29.7.0": "patch:jest-runner@npm%3A29.7.0#~/.yarn/patches/jest-runner-npm-29.7.0-3bc9f82b58.patch" } } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 471c216bf75..e7d49f8e155 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -13290,7 +13290,7 @@ __metadata: languageName: node linkType: hard -"jest-runner@npm:^29.7.0": +"jest-runner@npm:29.7.0": version: 29.7.0 resolution: "jest-runner@npm:29.7.0" dependencies: @@ -13319,6 +13319,35 @@ __metadata: languageName: node linkType: hard +"jest-runner@patch:jest-runner@npm%3A29.7.0#~/.yarn/patches/jest-runner-npm-29.7.0-3bc9f82b58.patch": + version: 29.7.0 + resolution: "jest-runner@patch:jest-runner@npm%3A29.7.0#~/.yarn/patches/jest-runner-npm-29.7.0-3bc9f82b58.patch::version=29.7.0&hash=a79dea" + dependencies: + "@jest/console": "npm:^29.7.0" + "@jest/environment": "npm:^29.7.0" + "@jest/test-result": "npm:^29.7.0" + "@jest/transform": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + chalk: "npm:^4.0.0" + emittery: "npm:^0.13.1" + graceful-fs: "npm:^4.2.9" + jest-docblock: "npm:^29.7.0" + jest-environment-node: "npm:^29.7.0" + jest-haste-map: "npm:^29.7.0" + jest-leak-detector: "npm:^29.7.0" + jest-message-util: "npm:^29.7.0" + jest-resolve: "npm:^29.7.0" + jest-runtime: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + jest-watcher: "npm:^29.7.0" + jest-worker: "npm:^29.7.0" + p-limit: "npm:^3.1.0" + source-map-support: "npm:0.5.13" + checksum: 10/d520c4f40179a22626d547b9fdf5f802a7e40d27e50f13a3ecca327581e78164dcdc8c650ed2974ef8f82caef935f7e356f7ce2d1f8dac65a9556101da79a27c + languageName: node + linkType: hard + "jest-runtime@npm:^29.7.0": version: 29.7.0 resolution: "jest-runtime@npm:29.7.0" From b9e71094969071e25533d91879c745776ca76351 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 3 Jan 2025 11:18:17 -0300 Subject: [PATCH 06/20] feat: Prover node checks txs availability before sending quote (#10965) The prover node is now responsible for fetching txs for proving the epoch, instead of delegating to the prover job. It fetches the txs once an epoch is complete and before sending the quote, and reuses them when creating the job. Fixes #10803 --- .../prover-node/src/job/epoch-proving-job.ts | 21 ++----- .../prover-node/src/prover-node.test.ts | 41 +++++++++++-- yarn-project/prover-node/src/prover-node.ts | 60 +++++++++++++++---- 3 files changed, 89 insertions(+), 33 deletions(-) diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index 06e5d26fa82..3db7c5ca4d2 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -7,9 +7,7 @@ import { type L2Block, type L2BlockSource, type ProcessedTx, - type ProverCoordination, type Tx, - type TxHash, } from '@aztec/circuit-types'; import { asyncPool } from '@aztec/foundation/async-pool'; import { createLogger } from '@aztec/foundation/log'; @@ -41,12 +39,12 @@ export class EpochProvingJob implements Traceable { private dbProvider: ForkMerkleTreeOperations, private epochNumber: bigint, private blocks: L2Block[], + private txs: Tx[], private prover: EpochProver, private publicProcessorFactory: PublicProcessorFactory, private publisher: L1Publisher, private l2BlockSource: L2BlockSource, private l1ToL2MessageSource: L1ToL2MessageSource, - private coordination: ProverCoordination, private metrics: ProverNodeMetrics, private config: { parallelBlockLimit: number } = { parallelBlockLimit: 32 }, private cleanUp: (job: EpochProvingJob) => Promise = () => Promise.resolve(), @@ -92,10 +90,9 @@ export class EpochProvingJob implements Traceable { await asyncPool(this.config.parallelBlockLimit, this.blocks, async block => { const globalVariables = block.header.globalVariables; - const txHashes = block.body.txEffects.map(tx => tx.txHash); const txCount = block.body.numberOfTxsIncludingPadded; + const txs = this.getTxs(block); const l1ToL2Messages = await this.getL1ToL2Messages(block); - const txs = await this.getTxs(txHashes, block.number); const previousHeader = await this.getBlockHeader(block.number - 1); this.log.verbose(`Starting processing block ${block.number}`, { @@ -162,17 +159,9 @@ export class EpochProvingJob implements Traceable { return this.l2BlockSource.getBlockHeader(blockNumber); } - private async getTxs(txHashes: TxHash[], blockNumber: number): Promise { - const txs = await Promise.all( - txHashes.map(txHash => this.coordination.getTxByHash(txHash).then(tx => [txHash, tx] as const)), - ); - const notFound = txs.filter(([_, tx]) => !tx); - if (notFound.length) { - throw new Error( - `Txs not found for block ${blockNumber}: ${notFound.map(([txHash]) => txHash.toString()).join(', ')}`, - ); - } - return txs.map(([_, tx]) => tx!); + private getTxs(block: L2Block): Tx[] { + const txHashes = block.body.txEffects.map(tx => tx.txHash.toBigInt()); + return this.txs.filter(tx => txHashes.includes(tx.getTxHash().toBigInt())); } private getL1ToL2Messages(block: L2Block) { diff --git a/yarn-project/prover-node/src/prover-node.test.ts b/yarn-project/prover-node/src/prover-node.test.ts index bb73e84bfe1..56581e5e23d 100644 --- a/yarn-project/prover-node/src/prover-node.test.ts +++ b/yarn-project/prover-node/src/prover-node.test.ts @@ -1,4 +1,5 @@ import { + type Body, type EpochProofClaim, EpochProofQuote, EpochProofQuotePayload, @@ -9,6 +10,9 @@ import { type MerkleTreeWriteOperations, P2PClientType, type ProverCoordination, + type Tx, + type TxEffect, + TxHash, WorldStateRunningState, type WorldStateSynchronizer, } from '@aztec/circuit-types'; @@ -44,7 +48,8 @@ describe('prover-node', () => { let l1ToL2MessageSource: MockProxy; let contractDataSource: MockProxy; let worldState: MockProxy; - let coordination: MockProxy | ProverCoordination; + let coordination: ProverCoordination; + let mockCoordination: MockProxy; let quoteProvider: MockProxy; let quoteSigner: MockProxy; let bondManager: MockProxy; @@ -108,7 +113,8 @@ describe('prover-node', () => { l1ToL2MessageSource = mock(); contractDataSource = mock(); worldState = mock(); - coordination = mock(); + mockCoordination = mock(); + coordination = mockCoordination; quoteProvider = mock(); quoteSigner = mock(); bondManager = mock(); @@ -134,10 +140,23 @@ describe('prover-node', () => { // Signer returns an empty signature quoteSigner.sign.mockImplementation(payload => Promise.resolve(new EpochProofQuote(payload, Signature.empty()))); + // We create 3 fake blocks with 1 tx effect each + blocks = times(3, i => + mock({ + number: i + 20, + hash: () => new Fr(i), + body: mock({ txEffects: [mock({ txHash: TxHash.random() } as TxEffect)] }), + }), + ); + // Archiver returns a bunch of fake blocks - blocks = times(3, i => mock({ number: i + 20, hash: () => new Fr(i) })); l2BlockSource.getBlocksForEpoch.mockResolvedValue(blocks); + // Coordination plays along and returns a tx whenever requested + mockCoordination.getTxByHash.mockImplementation(hash => + Promise.resolve(mock({ getTxHash: () => hash, tryGetTxHash: () => hash })), + ); + // A sample claim claim = { epochToProve: 10n, bondProvider: address } as EpochProofClaim; @@ -175,6 +194,12 @@ describe('prover-node', () => { expect(coordination.addEpochProofQuote).not.toHaveBeenCalled(); }); + it('does not send a quote if there is a tx missing from coordinator', async () => { + mockCoordination.getTxByHash.mockResolvedValue(undefined); + await proverNode.handleEpochCompleted(10n); + expect(coordination.addEpochProofQuote).not.toHaveBeenCalled(); + }); + it('does not send a quote on a finished epoch if the provider does not return one', async () => { quoteProvider.getQuote.mockResolvedValue(undefined); await proverNode.handleEpochCompleted(10n); @@ -309,7 +334,7 @@ describe('prover-node', () => { // Things to test // - Another aztec node receives the proof quote via p2p // - The prover node can get the it is missing via p2p, or it has them in it's mempool - describe('Using a p2p coordination', () => { + describe('using a p2p coordination', () => { let bootnode: BootstrapNode; let epochCache: MockProxy; let p2pClient: P2PClient; @@ -346,6 +371,11 @@ describe('prover-node', () => { // Set the p2p client to be the coordination method coordination = p2pClient; + // But still mock getTxByHash + const mockGetTxByHash = (hash: TxHash) => Promise.resolve(mock({ getTxHash: () => hash })); + jest.spyOn(p2pClient, 'getTxByHash').mockImplementation(mockGetTxByHash); + jest.spyOn(otherP2PClient, 'getTxByHash').mockImplementation(mockGetTxByHash); + await Promise.all([p2pClient.start(), otherP2PClient.start()]); // Sleep to enable peer discovery @@ -373,7 +403,7 @@ describe('prover-node', () => { await proverNode.stop(); }); - it('Should send a proof quote via p2p to another node', async () => { + it('should send a proof quote via p2p to another node', async () => { const epochNumber = 10n; epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: epochNumber, @@ -412,6 +442,7 @@ describe('prover-node', () => { protected override doCreateEpochProvingJob( epochNumber: bigint, _blocks: L2Block[], + _txs: Tx[], _publicProcessorFactory: PublicProcessorFactory, cleanUp: (job: EpochProvingJob) => Promise, ): EpochProvingJob { diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 5bb23e97378..10a65b3594f 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -10,6 +10,7 @@ import { type ProverCoordination, type ProverNodeApi, type Service, + type Tx, type WorldStateSynchronizer, tryStop, } from '@aztec/circuit-types'; @@ -49,6 +50,7 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr private latestEpochWeAreProving: bigint | undefined; private jobs: Map = new Map(); + private cachedEpochData: { epochNumber: bigint; blocks: L2Block[]; txs: Tx[] } | undefined = undefined; private options: ProverNodeOptions; private metrics: ProverNodeMetrics; @@ -139,13 +141,12 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr */ async handleEpochCompleted(epochNumber: bigint): Promise { try { - // Construct a quote for the epoch - const blocks = await this.l2BlockSource.getBlocksForEpoch(epochNumber); - if (blocks.length === 0) { - this.log.info(`No blocks found for epoch ${epochNumber}`); - return; - } + // Gather data for the epoch + const epochData = await this.gatherEpochData(epochNumber); + const { blocks } = epochData; + this.cachedEpochData = { epochNumber, ...epochData }; + // Construct a quote for the epoch const partialQuote = await this.quoteProvider.getQuote(Number(epochNumber), blocks); if (!partialQuote) { this.log.info(`No quote produced for epoch ${epochNumber}`); @@ -256,10 +257,9 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr } // Gather blocks for this epoch - const blocks = await this.l2BlockSource.getBlocksForEpoch(epochNumber); - if (blocks.length === 0) { - throw new Error(`No blocks found for epoch ${epochNumber}`); - } + const cachedEpochData = this.cachedEpochData?.epochNumber === epochNumber ? this.cachedEpochData : undefined; + const { blocks, txs } = cachedEpochData ?? (await this.gatherEpochData(epochNumber)); + const fromBlock = blocks[0].number; const toBlock = blocks.at(-1)!.number; @@ -279,15 +279,51 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr return Promise.resolve(); }; - const job = this.doCreateEpochProvingJob(epochNumber, blocks, publicProcessorFactory, cleanUp); + const job = this.doCreateEpochProvingJob(epochNumber, blocks, txs, publicProcessorFactory, cleanUp); this.jobs.set(job.getId(), job); return job; } + @trackSpan('ProverNode.gatherEpochData', epochNumber => ({ [Attributes.EPOCH_NUMBER]: Number(epochNumber) })) + private async gatherEpochData(epochNumber: bigint) { + // Gather blocks for this epoch and their txs + const blocks = await this.gatherBlocks(epochNumber); + const txs = await this.gatherTxs(epochNumber, blocks); + + return { blocks, txs }; + } + + private async gatherBlocks(epochNumber: bigint) { + const blocks = await this.l2BlockSource.getBlocksForEpoch(epochNumber); + if (blocks.length === 0) { + throw new Error(`No blocks found for epoch ${epochNumber}`); + } + return blocks; + } + + private async gatherTxs(epochNumber: bigint, blocks: L2Block[]) { + const txs = await Promise.all( + blocks.flatMap(block => + block.body.txEffects + .map(tx => tx.txHash) + .map(txHash => this.coordination.getTxByHash(txHash).then(tx => [block.number, txHash, tx] as const)), + ), + ); + + const notFound = txs.filter(([_blockNum, _txHash, tx]) => !tx); + if (notFound.length) { + const notFoundList = notFound.map(([blockNum, txHash]) => `${txHash.toString()} (block ${blockNum})`).join(', '); + throw new Error(`Txs not found for epoch ${epochNumber}: ${notFoundList}`); + } + + return txs.map(([_blockNumber, _txHash, tx]) => tx!); + } + /** Extracted for testing purposes. */ protected doCreateEpochProvingJob( epochNumber: bigint, blocks: L2Block[], + txs: Tx[], publicProcessorFactory: PublicProcessorFactory, cleanUp: () => Promise, ) { @@ -295,12 +331,12 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr this.worldState, epochNumber, blocks, + txs, this.prover.createEpochProver(), publicProcessorFactory, this.publisher, this.l2BlockSource, this.l1ToL2MessageSource, - this.coordination, this.metrics, { parallelBlockLimit: this.options.maxParallelBlocksPerEpoch }, cleanUp, From 79e289d4c77c36e847851ec2a910ed0bc122d307 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Fri, 3 Jan 2025 14:34:33 +0000 Subject: [PATCH 07/20] chore: restore `prove_then_verify` test on `verify_rollup_honk_proof` (#11018) This PR reverts the changes to bootstrap.sh made in #11010. --- barretenberg/acir_tests/bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/barretenberg/acir_tests/bootstrap.sh b/barretenberg/acir_tests/bootstrap.sh index 3c125b620c3..1b1a7771d9b 100755 --- a/barretenberg/acir_tests/bootstrap.sh +++ b/barretenberg/acir_tests/bootstrap.sh @@ -136,7 +136,7 @@ function test_cmds { echo SYS=ultra_honk FLOW=prove_then_verify RECURSIVE=true $run_test assert_statement echo SYS=ultra_honk FLOW=prove_then_verify RECURSIVE=true $run_test double_verify_honk_proof echo SYS=ultra_honk FLOW=prove_and_verify_program $run_test merkle_insert - # echo SYS=ultra_rollup_honk FLOW=prove_then_verify $run_test verify_rollup_honk_proof + echo SYS=ultra_rollup_honk FLOW=prove_then_verify $run_test verify_rollup_honk_proof # barretenberg-acir-tests-bb-client-ivc: echo FLOW=prove_then_verify_client_ivc $run_test 6_array From 9dad251999ad5e5362787c459360955a3004eb37 Mon Sep 17 00:00:00 2001 From: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Date: Fri, 3 Jan 2025 15:39:30 +0100 Subject: [PATCH 08/20] feat: slasher (#10693) --- l1-contracts/src/core/Leonidas.sol | 8 +- l1-contracts/src/core/Rollup.sol | 12 +- l1-contracts/src/core/interfaces/ISlasher.sol | 9 + l1-contracts/src/core/staking/Slasher.sol | 37 ++ .../src/core/staking/SlashingProposer.sol | 35 ++ l1-contracts/src/core/staking/Staking.sol | 16 +- l1-contracts/src/governance/CoinIssuer.sol | 2 +- .../interfaces/IGovernanceProposer.sol | 10 +- .../src/governance/libraries/Errors.sol | 18 +- .../EmpireBase.sol} | 44 +- .../proposer/GovernanceProposer.sol | 36 ++ l1-contracts/src/periphery/SlashFactory.sol | 72 ++++ l1-contracts/src/periphery/SlashPayload.sol | 37 ++ .../periphery/interfaces/ISlashFactory.sol | 18 + l1-contracts/terraform/main.tf | 11 +- l1-contracts/test/Rollup.t.sol | 3 +- l1-contracts/test/fees/FeeRollup.t.sol | 4 +- .../test/governance/coin-issuer/mint.t.sol | 2 +- .../governance/governance-proposer/Base.t.sol | 2 +- .../governance-proposer/constructor.t.sol | 16 +- ...shProposal.t.sol => executeProposal.t.sol} | 44 +- ...pushProposal.tree => executeProposal.tree} | 4 +- .../governance/governance-proposer/vote.t.sol | 7 +- .../test/governance/governance/base.t.sol | 2 +- .../scenario/NewGovernanceProposerPayload.sol | 2 +- .../UpgradeGovernanceProposerTest.t.sol | 4 +- .../scenario/slashing/Slashing.t.sol | 121 ++++++ l1-contracts/test/harnesses/Leonidas.sol | 5 +- l1-contracts/test/harnesses/Rollup.sol | 4 +- l1-contracts/test/harnesses/TestConstants.sol | 2 + l1-contracts/test/sparta/Sparta.t.sol | 68 ++- l1-contracts/test/staking/StakingCheater.sol | 9 +- l1-contracts/test/staking/base.t.sol | 7 +- .../files/config/config-prover-env.sh | 2 + .../files/config/config-validator-env.sh | 2 + .../files/config/deploy-l1-contracts.sh | 2 + .../aztec-node/src/aztec-node/server.ts | 7 +- .../aztec.js/src/contract/contract.test.ts | 1 + .../cli/src/cmds/infrastructure/sequencers.ts | 10 +- .../cli/src/cmds/l1/deploy_l1_contracts.ts | 1 + .../cli/src/cmds/l1/update_l1_validators.ts | 9 +- .../cli/src/cmds/pxe/get_node_info.ts | 2 + .../end-to-end/scripts/e2e_test_config.yml | 2 + .../native-network/deploy-l1-contracts.sh | 2 + .../end-to-end/src/e2e_p2p/p2p_network.ts | 74 +++- .../end-to-end/src/e2e_p2p/slashing.test.ts | 264 ++++++++++++ .../upgrade_governance_proposer.test.ts | 5 +- .../src/fixtures/snapshot_manager.ts | 2 +- yarn-project/ethereum/src/config.ts | 47 +- yarn-project/ethereum/src/constants.ts | 1 - .../ethereum/src/deploy_l1_contracts.ts | 31 +- .../ethereum/src/l1_contract_addresses.ts | 7 + yarn-project/foundation/src/config/env_var.ts | 6 + .../scripts/generate-artifacts.sh | 4 + .../src/pxe_service/test/pxe_service.test.ts | 1 + yarn-project/sequencer-client/package.json | 1 + .../src/client/sequencer-client.ts | 4 + yarn-project/sequencer-client/src/index.ts | 1 + .../src/publisher/l1-publisher.ts | 163 ++++--- .../src/sequencer/sequencer.test.ts | 4 + .../src/sequencer/sequencer.ts | 13 +- .../sequencer-client/src/slasher/factory.ts | 22 + .../sequencer-client/src/slasher/index.ts | 2 + .../src/slasher/slasher_client.test.ts | 120 ++++++ .../src/slasher/slasher_client.ts | 402 ++++++++++++++++++ yarn-project/sequencer-client/tsconfig.json | 3 + yarn-project/yarn.lock | 1 + 67 files changed, 1698 insertions(+), 191 deletions(-) create mode 100644 l1-contracts/src/core/interfaces/ISlasher.sol create mode 100644 l1-contracts/src/core/staking/Slasher.sol create mode 100644 l1-contracts/src/core/staking/SlashingProposer.sol rename l1-contracts/src/governance/{GovernanceProposer.sol => proposer/EmpireBase.sol} (81%) create mode 100644 l1-contracts/src/governance/proposer/GovernanceProposer.sol create mode 100644 l1-contracts/src/periphery/SlashFactory.sol create mode 100644 l1-contracts/src/periphery/SlashPayload.sol create mode 100644 l1-contracts/src/periphery/interfaces/ISlashFactory.sol rename l1-contracts/test/governance/governance-proposer/{pushProposal.t.sol => executeProposal.t.sol} (87%) rename l1-contracts/test/governance/governance-proposer/{pushProposal.tree => executeProposal.tree} (93%) create mode 100644 l1-contracts/test/governance/scenario/slashing/Slashing.t.sol create mode 100644 yarn-project/end-to-end/src/e2e_p2p/slashing.test.ts create mode 100644 yarn-project/sequencer-client/src/slasher/factory.ts create mode 100644 yarn-project/sequencer-client/src/slasher/index.ts create mode 100644 yarn-project/sequencer-client/src/slasher/slasher_client.test.ts create mode 100644 yarn-project/sequencer-client/src/slasher/slasher_client.ts diff --git a/l1-contracts/src/core/Leonidas.sol b/l1-contracts/src/core/Leonidas.sol index 77244bec628..213b6da3925 100644 --- a/l1-contracts/src/core/Leonidas.sol +++ b/l1-contracts/src/core/Leonidas.sol @@ -42,13 +42,17 @@ contract Leonidas is Staking, TimeFns, ILeonidas { LeonidasStorage private leonidasStore; constructor( - address _ares, IERC20 _stakingAsset, uint256 _minimumStake, + uint256 _slashingQuorum, + uint256 _roundSize, uint256 _slotDuration, uint256 _epochDuration, uint256 _targetCommitteeSize - ) Staking(_ares, _stakingAsset, _minimumStake) TimeFns(_slotDuration, _epochDuration) { + ) + Staking(_stakingAsset, _minimumStake, _slashingQuorum, _roundSize) + TimeFns(_slotDuration, _epochDuration) + { GENESIS_TIME = Timestamp.wrap(block.timestamp); SLOT_DURATION = _slotDuration; EPOCH_DURATION = _epochDuration; diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 2e6dedc15b2..531f58186eb 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -51,6 +51,8 @@ struct Config { uint256 targetCommitteeSize; uint256 aztecEpochProofClaimWindowInL2Slots; uint256 minimumStake; + uint256 slashingQuorum; + uint256 slashingRoundSize; } /** @@ -110,15 +112,15 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, Leonidas, IRollup, ITes ) Ownable(_ares) Leonidas( - _ares, _stakingAsset, _config.minimumStake, + _config.slashingQuorum, + _config.slashingRoundSize, _config.aztecSlotDuration, _config.aztecEpochDuration, _config.targetCommitteeSize ) { - rollupStore.epochProofVerifier = new MockVerifier(); FEE_JUICE_PORTAL = _fpcJuicePortal; REWARD_DISTRIBUTOR = _rewardDistributor; ASSET = _fpcJuicePortal.UNDERLYING(); @@ -127,14 +129,16 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, Leonidas, IRollup, ITes ); INBOX = IInbox(address(new Inbox(address(this), Constants.L1_TO_L2_MSG_SUBTREE_HEIGHT))); OUTBOX = IOutbox(address(new Outbox(address(this)))); - rollupStore.vkTreeRoot = _vkTreeRoot; - rollupStore.protocolContractTreeRoot = _protocolContractTreeRoot; VERSION = 1; L1_BLOCK_AT_GENESIS = block.number; CLAIM_DURATION_IN_L2_SLOTS = _config.aztecEpochProofClaimWindowInL2Slots; IS_FOUNDRY_TEST = VM_ADDRESS.code.length > 0; + rollupStore.epochProofVerifier = new MockVerifier(); + rollupStore.vkTreeRoot = _vkTreeRoot; + rollupStore.protocolContractTreeRoot = _protocolContractTreeRoot; + // Genesis block rollupStore.blocks[0] = BlockLog({ feeHeader: FeeHeader({ diff --git a/l1-contracts/src/core/interfaces/ISlasher.sol b/l1-contracts/src/core/interfaces/ISlasher.sol new file mode 100644 index 00000000000..6ad8c695719 --- /dev/null +++ b/l1-contracts/src/core/interfaces/ISlasher.sol @@ -0,0 +1,9 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; + +interface ISlasher { + function slash(IPayload _payload) external returns (bool); +} diff --git a/l1-contracts/src/core/staking/Slasher.sol b/l1-contracts/src/core/staking/Slasher.sol new file mode 100644 index 00000000000..39e44791ff2 --- /dev/null +++ b/l1-contracts/src/core/staking/Slasher.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {ISlasher} from "@aztec/core/interfaces/ISlasher.sol"; +import {SlashingProposer} from "@aztec/core/staking/SlashingProposer.sol"; +import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; + +contract Slasher is ISlasher { + SlashingProposer public immutable PROPOSER; + + event SlashFailed(address target, bytes data, bytes returnData); + + error Slasher__CallerNotProposer(address caller, address proposer); // 0x44c1f74f + + constructor(uint256 _n, uint256 _m) { + PROPOSER = new SlashingProposer(msg.sender, this, _n, _m); + } + + function slash(IPayload _payload) external override(ISlasher) returns (bool) { + require( + msg.sender == address(PROPOSER), Slasher__CallerNotProposer(msg.sender, address(PROPOSER)) + ); + + IPayload.Action[] memory actions = _payload.getActions(); + + for (uint256 i = 0; i < actions.length; i++) { + // Allow failure of individual calls but emit the failure! + (bool success, bytes memory returnData) = actions[i].target.call(actions[i].data); + if (!success) { + emit SlashFailed(actions[i].target, actions[i].data, returnData); + } + } + + return true; + } +} diff --git a/l1-contracts/src/core/staking/SlashingProposer.sol b/l1-contracts/src/core/staking/SlashingProposer.sol new file mode 100644 index 00000000000..dfd445af937 --- /dev/null +++ b/l1-contracts/src/core/staking/SlashingProposer.sol @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {ISlasher} from "@aztec/core/interfaces/ISlasher.sol"; +import {IGovernanceProposer} from "@aztec/governance/interfaces/IGovernanceProposer.sol"; +import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; +import {EmpireBase} from "@aztec/governance/proposer/EmpireBase.sol"; + +/** + * @notice A SlashingProposer implementation following the empire model + */ +contract SlashingProposer is IGovernanceProposer, EmpireBase { + address public immutable INSTANCE; + ISlasher public immutable SLASHER; + + constructor(address _instance, ISlasher _slasher, uint256 _slashingQuorum, uint256 _roundSize) + EmpireBase(_slashingQuorum, _roundSize) + { + INSTANCE = _instance; + SLASHER = _slasher; + } + + function getExecutor() public view override(EmpireBase, IGovernanceProposer) returns (address) { + return address(SLASHER); + } + + function getInstance() public view override(EmpireBase, IGovernanceProposer) returns (address) { + return INSTANCE; + } + + function _execute(IPayload _proposal) internal override(EmpireBase) returns (bool) { + return SLASHER.slash(_proposal); + } +} diff --git a/l1-contracts/src/core/staking/Staking.sol b/l1-contracts/src/core/staking/Staking.sol index 0d75e74e1c1..5e928f64c56 100644 --- a/l1-contracts/src/core/staking/Staking.sol +++ b/l1-contracts/src/core/staking/Staking.sol @@ -12,6 +12,7 @@ import { } from "@aztec/core/interfaces/IStaking.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {Timestamp} from "@aztec/core/libraries/TimeMath.sol"; +import {Slasher} from "@aztec/core/staking/Slasher.sol"; import {IERC20} from "@oz/token/ERC20/IERC20.sol"; import {SafeERC20} from "@oz/token/ERC20/utils/SafeERC20.sol"; import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; @@ -23,14 +24,19 @@ contract Staking is IStaking { // Constant pulled out of the ass Timestamp public constant EXIT_DELAY = Timestamp.wrap(60 * 60 * 24); - address public immutable SLASHER; + Slasher public immutable SLASHER; IERC20 public immutable STAKING_ASSET; uint256 public immutable MINIMUM_STAKE; StakingStorage internal stakingStore; - constructor(address _slasher, IERC20 _stakingAsset, uint256 _minimumStake) { - SLASHER = _slasher; + constructor( + IERC20 _stakingAsset, + uint256 _minimumStake, + uint256 _slashingQuorum, + uint256 _roundSize + ) { + SLASHER = new Slasher(_slashingQuorum, _roundSize); STAKING_ASSET = _stakingAsset; MINIMUM_STAKE = _minimumStake; } @@ -57,7 +63,9 @@ contract Staking is IStaking { } function slash(address _attester, uint256 _amount) external override(IStaking) { - require(msg.sender == SLASHER, Errors.Staking__NotSlasher(SLASHER, msg.sender)); + require( + msg.sender == address(SLASHER), Errors.Staking__NotSlasher(address(SLASHER), msg.sender) + ); ValidatorInfo storage validator = stakingStore.info[_attester]; require(validator.status != Status.NONE, Errors.Staking__NoOneToSlash(_attester)); diff --git a/l1-contracts/src/governance/CoinIssuer.sol b/l1-contracts/src/governance/CoinIssuer.sol index 37ac8f18b4d..33a0c06df0e 100644 --- a/l1-contracts/src/governance/CoinIssuer.sol +++ b/l1-contracts/src/governance/CoinIssuer.sol @@ -33,7 +33,7 @@ contract CoinIssuer is ICoinIssuer, Ownable { */ function mint(address _to, uint256 _amount) external override(ICoinIssuer) onlyOwner { uint256 maxMint = mintAvailable(); - require(_amount <= maxMint, Errors.CoinIssuer__InssuficientMintAvailable(maxMint, _amount)); + require(_amount <= maxMint, Errors.CoinIssuer__InsufficientMintAvailable(maxMint, _amount)); timeOfLastMint = block.timestamp; ASSET.mint(_to, _amount); } diff --git a/l1-contracts/src/governance/interfaces/IGovernanceProposer.sol b/l1-contracts/src/governance/interfaces/IGovernanceProposer.sol index 7539446a1de..52ac72e8d6f 100644 --- a/l1-contracts/src/governance/interfaces/IGovernanceProposer.sol +++ b/l1-contracts/src/governance/interfaces/IGovernanceProposer.sol @@ -3,19 +3,19 @@ pragma solidity >=0.8.27; import {Slot} from "@aztec/core/libraries/TimeMath.sol"; -import {IGovernance} from "@aztec/governance/interfaces/IGovernance.sol"; import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; interface IGovernanceProposer { event VoteCast(IPayload indexed proposal, uint256 indexed round, address indexed voter); - event ProposalPushed(IPayload indexed proposal, uint256 indexed round); + event ProposalExecuted(IPayload indexed proposal, uint256 indexed round); - function vote(IPayload _proposa) external returns (bool); - function pushProposal(uint256 _roundNumber) external returns (bool); + function vote(IPayload _proposal) external returns (bool); + function executeProposal(uint256 _roundNumber) external returns (bool); function yeaCount(address _instance, uint256 _round, IPayload _proposal) external view returns (uint256); function computeRound(Slot _slot) external view returns (uint256); - function getGovernance() external view returns (IGovernance); + function getInstance() external view returns (address); + function getExecutor() external view returns (address); } diff --git a/l1-contracts/src/governance/libraries/Errors.sol b/l1-contracts/src/governance/libraries/Errors.sol index fb835660287..b6654440e55 100644 --- a/l1-contracts/src/governance/libraries/Errors.sol +++ b/l1-contracts/src/governance/libraries/Errors.sol @@ -45,20 +45,20 @@ library Errors { error Governance__ProposalLib__ZeroYeaVotesNeeded(); error Governance__ProposalLib__MoreYeaVoteThanExistNeeded(); - error GovernanceProposer__CanOnlyPushProposalInPast(); // 0x49fdf611" - error GovernanceProposer__FailedToPropose(IPayload proposal); // 0x6ca2a2ed - error GovernanceProposer__InstanceHaveNoCode(address instance); // 0x20a3b441 - error GovernanceProposer__InsufficientVotes(); // 0xba1e05ef + error GovernanceProposer__CanOnlyExecuteProposalInPast(); // 0x8bf1d3b8 + error GovernanceProposer__FailedToPropose(IPayload proposal); // 0x8d94fbfc + error GovernanceProposer__InstanceHaveNoCode(address instance); // 0x5fa92625 + error GovernanceProposer__InsufficientVotes(uint256 votesCast, uint256 votesNeeded); // 0xd4ad89c2 error GovernanceProposer__InvalidNAndMValues(uint256 n, uint256 m); // 0x520d9704 error GovernanceProposer__NCannotBeLargerTHanM(uint256 n, uint256 m); // 0x2fdfc063 error GovernanceProposer__OnlyProposerCanVote(address caller, address proposer); // 0xba27df38 error GovernanceProposer__ProposalAlreadyExecuted(uint256 roundNumber); // 0x7aeacb17 - error GovernanceProposer__ProposalCannotBeAddressZero(); // 0xdb3e4b6e - error GovernanceProposer__ProposalHaveNoCode(IPayload proposal); // 0xdce0615b - error GovernanceProposer__ProposalTooOld(uint256 roundNumber, uint256 currentRoundNumber); //0x02283b1a - error GovernanceProposer__VoteAlreadyCastForSlot(Slot slot); //0xc2201452 + error GovernanceProposer__ProposalCannotBeAddressZero(); // 0x16ac1942 + error GovernanceProposer__ProposalHaveNoCode(IPayload proposal); // 0xb69440a1 + error GovernanceProposer__ProposalTooOld(uint256 roundNumber, uint256 currentRoundNumber); // 0xc3d7aa4f + error GovernanceProposer__VoteAlreadyCastForSlot(Slot slot); // 0x3a6150ca - error CoinIssuer__InssuficientMintAvailable(uint256 available, uint256 needed); // 0xf268b931 + error CoinIssuer__InsufficientMintAvailable(uint256 available, uint256 needed); // 0xa1cc8799 error Registry__RollupAlreadyRegistered(address rollup); // 0x3c34eabf error Registry__RollupNotRegistered(address rollup); // 0xa1fee4cf diff --git a/l1-contracts/src/governance/GovernanceProposer.sol b/l1-contracts/src/governance/proposer/EmpireBase.sol similarity index 81% rename from l1-contracts/src/governance/GovernanceProposer.sol rename to l1-contracts/src/governance/proposer/EmpireBase.sol index 7e665ee3aa8..349fa7b880c 100644 --- a/l1-contracts/src/governance/GovernanceProposer.sol +++ b/l1-contracts/src/governance/proposer/EmpireBase.sol @@ -4,10 +4,8 @@ pragma solidity >=0.8.27; import {ILeonidas} from "@aztec/core/interfaces/ILeonidas.sol"; import {Slot, SlotLib} from "@aztec/core/libraries/TimeMath.sol"; -import {IGovernance} from "@aztec/governance/interfaces/IGovernance.sol"; import {IGovernanceProposer} from "@aztec/governance/interfaces/IGovernanceProposer.sol"; import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; -import {IRegistry} from "@aztec/governance/interfaces/IRegistry.sol"; import {Errors} from "@aztec/governance/libraries/Errors.sol"; /** @@ -17,7 +15,7 @@ import {Errors} from "@aztec/governance/libraries/Errors.sol"; * This also means that the implementation here will need to be "updated" if * the interfaces of the sequencer selection changes, for example going optimistic. */ -contract GovernanceProposer is IGovernanceProposer { +abstract contract EmpireBase is IGovernanceProposer { using SlotLib for Slot; struct RoundAccounting { @@ -29,14 +27,12 @@ contract GovernanceProposer is IGovernanceProposer { uint256 public constant LIFETIME_IN_ROUNDS = 5; - IRegistry public immutable REGISTRY; uint256 public immutable N; uint256 public immutable M; mapping(address instance => mapping(uint256 roundNumber => RoundAccounting)) public rounds; - constructor(IRegistry _registry, uint256 _n, uint256 _m) { - REGISTRY = _registry; + constructor(uint256 _n, uint256 _m) { N = _n; M = _m; @@ -57,11 +53,12 @@ contract GovernanceProposer is IGovernanceProposer { * @return True if executed successfully, false otherwise */ function vote(IPayload _proposal) external override(IGovernanceProposer) returns (bool) { - require( + // For now, skipping this as the check is not really needed but there were not full agreement + /*require( address(_proposal).code.length > 0, Errors.GovernanceProposer__ProposalHaveNoCode(_proposal) - ); + );*/ - address instance = REGISTRY.getRollup(); + address instance = getInstance(); require(instance.code.length > 0, Errors.GovernanceProposer__InstanceHaveNoCode(instance)); ILeonidas selection = ILeonidas(instance); @@ -94,22 +91,26 @@ contract GovernanceProposer is IGovernanceProposer { } /** - * @notice Push the proposal to the appela + * @notice Executes the proposal using the `_execute` function * * @param _roundNumber - The round number to execute * * @return True if executed successfully, false otherwise */ - function pushProposal(uint256 _roundNumber) external override(IGovernanceProposer) returns (bool) { + function executeProposal(uint256 _roundNumber) + external + override(IGovernanceProposer) + returns (bool) + { // Need to ensure that the round is not active. - address instance = REGISTRY.getRollup(); + address instance = getInstance(); require(instance.code.length > 0, Errors.GovernanceProposer__InstanceHaveNoCode(instance)); ILeonidas selection = ILeonidas(instance); Slot currentSlot = selection.getCurrentSlot(); uint256 currentRound = computeRound(currentSlot); - require(_roundNumber < currentRound, Errors.GovernanceProposer__CanOnlyPushProposalInPast()); + require(_roundNumber < currentRound, Errors.GovernanceProposer__CanOnlyExecuteProposalInPast()); require( _roundNumber + LIFETIME_IN_ROUNDS >= currentRound, Errors.GovernanceProposer__ProposalTooOld(_roundNumber, currentRound) @@ -120,16 +121,14 @@ contract GovernanceProposer is IGovernanceProposer { require( round.leader != IPayload(address(0)), Errors.GovernanceProposer__ProposalCannotBeAddressZero() ); - require(round.yeaCount[round.leader] >= N, Errors.GovernanceProposer__InsufficientVotes()); + uint256 votesCast = round.yeaCount[round.leader]; + require(votesCast >= N, Errors.GovernanceProposer__InsufficientVotes(votesCast, N)); round.executed = true; - emit ProposalPushed(round.leader, _roundNumber); + emit ProposalExecuted(round.leader, _roundNumber); - require( - getGovernance().propose(round.leader), - Errors.GovernanceProposer__FailedToPropose(round.leader) - ); + require(_execute(round.leader), Errors.GovernanceProposer__FailedToPropose(round.leader)); return true; } @@ -162,7 +161,8 @@ contract GovernanceProposer is IGovernanceProposer { return _slot.unwrap() / M; } - function getGovernance() public view override(IGovernanceProposer) returns (IGovernance) { - return IGovernance(REGISTRY.getGovernance()); - } + // Virtual functions + function getInstance() public view virtual override(IGovernanceProposer) returns (address); + function getExecutor() public view virtual override(IGovernanceProposer) returns (address); + function _execute(IPayload _proposal) internal virtual returns (bool); } diff --git a/l1-contracts/src/governance/proposer/GovernanceProposer.sol b/l1-contracts/src/governance/proposer/GovernanceProposer.sol new file mode 100644 index 00000000000..734a42172e5 --- /dev/null +++ b/l1-contracts/src/governance/proposer/GovernanceProposer.sol @@ -0,0 +1,36 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {IGovernance} from "@aztec/governance/interfaces/IGovernance.sol"; +import {IGovernanceProposer} from "@aztec/governance/interfaces/IGovernanceProposer.sol"; +import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; +import {IRegistry} from "@aztec/governance/interfaces/IRegistry.sol"; +import {EmpireBase} from "./EmpireBase.sol"; + +/** + * @notice A GovernanceProposer implementation following the empire model + * Beware that while governance generally do not care about the implementation + * this implementation will since it is dependent on the sequencer selection. + * This also means that the implementation here will need to be "updated" if + * the interfaces of the sequencer selection changes, for example going optimistic. + */ +contract GovernanceProposer is IGovernanceProposer, EmpireBase { + IRegistry public immutable REGISTRY; + + constructor(IRegistry _registry, uint256 _n, uint256 _m) EmpireBase(_n, _m) { + REGISTRY = _registry; + } + + function getExecutor() public view override(EmpireBase, IGovernanceProposer) returns (address) { + return REGISTRY.getGovernance(); + } + + function getInstance() public view override(EmpireBase, IGovernanceProposer) returns (address) { + return REGISTRY.getRollup(); + } + + function _execute(IPayload _proposal) internal override(EmpireBase) returns (bool) { + return IGovernance(getExecutor()).propose(_proposal); + } +} diff --git a/l1-contracts/src/periphery/SlashFactory.sol b/l1-contracts/src/periphery/SlashFactory.sol new file mode 100644 index 00000000000..14904c1f62e --- /dev/null +++ b/l1-contracts/src/periphery/SlashFactory.sol @@ -0,0 +1,72 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {ILeonidas} from "@aztec/core/interfaces/ILeonidas.sol"; +import {Epoch} from "@aztec/core/libraries/TimeMath.sol"; +import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; +import {ISlashFactory} from "./interfaces/ISlashFactory.sol"; +import {SlashPayload} from "./SlashPayload.sol"; + +contract SlashFactory is ISlashFactory { + ILeonidas public immutable LEONIDAS; + + constructor(ILeonidas _leonidas) { + LEONIDAS = _leonidas; + } + + function createSlashPayload(Epoch _epoch, uint256 _amount) + external + override(ISlashFactory) + returns (IPayload) + { + (address predictedAddress, bool isDeployed) = getAddressAndIsDeployed(_epoch, _amount); + + if (isDeployed) { + return IPayload(predictedAddress); + } + + SlashPayload payload = + new SlashPayload{salt: bytes32(Epoch.unwrap(_epoch))}(_epoch, LEONIDAS, _amount); + + emit SlashPayloadCreated(address(payload), _epoch, _amount); + return IPayload(address(payload)); + } + + function getAddressAndIsDeployed(Epoch _epoch, uint256 _amount) + public + view + override(ISlashFactory) + returns (address, bool) + { + address predictedAddress = _computeSlashPayloadAddress(_epoch, _amount); + bool isDeployed = predictedAddress.code.length > 0; + return (predictedAddress, isDeployed); + } + + function _computeSlashPayloadAddress(Epoch _epoch, uint256 _amount) + internal + view + returns (address) + { + bytes32 salt = bytes32(Epoch.unwrap(_epoch)); + return address( + uint160( + uint256( + keccak256( + abi.encodePacked( + bytes1(0xff), + address(this), + salt, + keccak256( + abi.encodePacked( + type(SlashPayload).creationCode, abi.encode(_epoch, LEONIDAS, _amount) + ) + ) + ) + ) + ) + ) + ); + } +} diff --git a/l1-contracts/src/periphery/SlashPayload.sol b/l1-contracts/src/periphery/SlashPayload.sol new file mode 100644 index 00000000000..4410cfe0ae9 --- /dev/null +++ b/l1-contracts/src/periphery/SlashPayload.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {ILeonidas} from "@aztec/core/interfaces/ILeonidas.sol"; +import {IStaking} from "@aztec/core/interfaces/IStaking.sol"; +import {Epoch} from "@aztec/core/libraries/TimeMath.sol"; +import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; + +/** + * @notice The simplest payload that you can find, slash all attesters for an epoch. + */ +contract SlashPayload is IPayload { + Epoch public immutable EPOCH; + ILeonidas public immutable LEONIDAS; + uint256 public immutable AMOUNT; + + constructor(Epoch _epoch, ILeonidas _leonidas, uint256 _amount) { + EPOCH = _epoch; + LEONIDAS = _leonidas; + AMOUNT = _amount; + } + + function getActions() external view override(IPayload) returns (IPayload.Action[] memory) { + address[] memory attesters = ILeonidas(LEONIDAS).getEpochCommittee(EPOCH); + IPayload.Action[] memory actions = new IPayload.Action[](attesters.length); + + for (uint256 i = 0; i < attesters.length; i++) { + actions[i] = IPayload.Action({ + target: address(LEONIDAS), + data: abi.encodeWithSelector(IStaking.slash.selector, attesters[i], AMOUNT) + }); + } + + return actions; + } +} diff --git a/l1-contracts/src/periphery/interfaces/ISlashFactory.sol b/l1-contracts/src/periphery/interfaces/ISlashFactory.sol new file mode 100644 index 00000000000..7300cfbba84 --- /dev/null +++ b/l1-contracts/src/periphery/interfaces/ISlashFactory.sol @@ -0,0 +1,18 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {Epoch} from "@aztec/core/libraries/TimeMath.sol"; +import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; + +interface ISlashFactory { + event SlashPayloadCreated( + address indexed payloadAddress, Epoch indexed epoch, uint256 indexed amount + ); + + function createSlashPayload(Epoch _epoch, uint256 _amount) external returns (IPayload); + function getAddressAndIsDeployed(Epoch _epoch, uint256 _amount) + external + view + returns (address, bool); +} diff --git a/l1-contracts/terraform/main.tf b/l1-contracts/terraform/main.tf index d619a827877..a9b9b4a3faa 100644 --- a/l1-contracts/terraform/main.tf +++ b/l1-contracts/terraform/main.tf @@ -109,4 +109,13 @@ variable "GOVERNANCE_CONTRACT_ADDRESS" { output "GOVERNANCE_CONTRACT_ADDRESS" { value = var.GOVERNANCE_CONTRACT_ADDRESS -} \ No newline at end of file +} + +variable "SLASH_FACTORY_CONTRACT_ADDRESS" { + type = string + default = "" +} + +output "SLASH_FACTORY_CONTRACT_ADDRESS" { + value = var.SLASH_FACTORY_CONTRACT_ADDRESS +} diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 0164b72a770..f060e39bb5f 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -75,9 +75,10 @@ contract RollupTest is DecoderBase, TimeFns { testERC20 = new TestERC20("test", "TEST", address(this)); leo = new Leonidas( - address(1), testERC20, TestConstants.AZTEC_MINIMUM_STAKE, + TestConstants.AZTEC_SLASHING_QUORUM, + TestConstants.AZTEC_SLASHING_ROUND_SIZE, TestConstants.AZTEC_SLOT_DURATION, TestConstants.AZTEC_EPOCH_DURATION, TestConstants.AZTEC_TARGET_COMMITTEE_SIZE diff --git a/l1-contracts/test/fees/FeeRollup.t.sol b/l1-contracts/test/fees/FeeRollup.t.sol index 4463ea51cfb..b01f373bd9a 100644 --- a/l1-contracts/test/fees/FeeRollup.t.sol +++ b/l1-contracts/test/fees/FeeRollup.t.sol @@ -131,7 +131,9 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { aztecEpochDuration: EPOCH_DURATION, targetCommitteeSize: 48, aztecEpochProofClaimWindowInL2Slots: 16, - minimumStake: 100 ether + minimumStake: TestConstants.AZTEC_MINIMUM_STAKE, + slashingQuorum: TestConstants.AZTEC_SLASHING_QUORUM, + slashingRoundSize: TestConstants.AZTEC_SLASHING_ROUND_SIZE }) ); fakeCanonical.setCanonicalRollup(address(rollup)); diff --git a/l1-contracts/test/governance/coin-issuer/mint.t.sol b/l1-contracts/test/governance/coin-issuer/mint.t.sol index 29304fbb589..229d2f52d91 100644 --- a/l1-contracts/test/governance/coin-issuer/mint.t.sol +++ b/l1-contracts/test/governance/coin-issuer/mint.t.sol @@ -35,7 +35,7 @@ contract MintTest is CoinIssuerBase { // it reverts uint256 amount = bound(_amount, maxMint + 1, type(uint256).max); vm.expectRevert( - abi.encodeWithSelector(Errors.CoinIssuer__InssuficientMintAvailable.selector, maxMint, amount) + abi.encodeWithSelector(Errors.CoinIssuer__InsufficientMintAvailable.selector, maxMint, amount) ); nom.mint(address(0xdead), amount); } diff --git a/l1-contracts/test/governance/governance-proposer/Base.t.sol b/l1-contracts/test/governance/governance-proposer/Base.t.sol index 1f720911438..bbfd7b54828 100644 --- a/l1-contracts/test/governance/governance-proposer/Base.t.sol +++ b/l1-contracts/test/governance/governance-proposer/Base.t.sol @@ -4,7 +4,7 @@ pragma solidity >=0.8.27; import {Test} from "forge-std/Test.sol"; import {Registry} from "@aztec/governance/Registry.sol"; -import {GovernanceProposer} from "@aztec/governance/GovernanceProposer.sol"; +import {GovernanceProposer} from "@aztec/governance/proposer/GovernanceProposer.sol"; import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; diff --git a/l1-contracts/test/governance/governance-proposer/constructor.t.sol b/l1-contracts/test/governance/governance-proposer/constructor.t.sol index f32b8aefa59..327ef727701 100644 --- a/l1-contracts/test/governance/governance-proposer/constructor.t.sol +++ b/l1-contracts/test/governance/governance-proposer/constructor.t.sol @@ -2,12 +2,22 @@ pragma solidity >=0.8.27; import {Test} from "forge-std/Test.sol"; -import {GovernanceProposer} from "@aztec/governance/GovernanceProposer.sol"; +import {GovernanceProposer} from "@aztec/governance/proposer/GovernanceProposer.sol"; import {Errors} from "@aztec/governance/libraries/Errors.sol"; import {IRegistry} from "@aztec/governance/interfaces/IRegistry.sol"; +contract FakeRegistry { + function getGovernance() external pure returns (address) { + return address(0x01); + } + + function getRollup() external pure returns (address) { + return address(0x02); + } +} + contract ConstructorTest is Test { - IRegistry internal constant REGISTRY = IRegistry(address(0x02)); + IRegistry internal REGISTRY = IRegistry(address(new FakeRegistry())); function test_WhenNIsLessThanOrEqualHalfOfM(uint256 _n, uint256 _m) external { // it revert @@ -42,5 +52,7 @@ contract ConstructorTest is Test { assertEq(address(g.REGISTRY()), address(REGISTRY)); assertEq(g.N(), n); assertEq(g.M(), m); + assertEq(g.getExecutor(), address(REGISTRY.getGovernance()), "executor"); + assertEq(g.getInstance(), address(REGISTRY.getRollup()), "instance"); } } diff --git a/l1-contracts/test/governance/governance-proposer/pushProposal.t.sol b/l1-contracts/test/governance/governance-proposer/executeProposal.t.sol similarity index 87% rename from l1-contracts/test/governance/governance-proposer/pushProposal.t.sol rename to l1-contracts/test/governance/governance-proposer/executeProposal.t.sol index 26de9a2f343..db7bd38c352 100644 --- a/l1-contracts/test/governance/governance-proposer/pushProposal.t.sol +++ b/l1-contracts/test/governance/governance-proposer/executeProposal.t.sol @@ -11,7 +11,7 @@ import {Slot, SlotLib, Timestamp} from "@aztec/core/libraries/TimeMath.sol"; import {FaultyGovernance} from "./mocks/FaultyGovernance.sol"; import {FalsyGovernance} from "./mocks/FalsyGovernance.sol"; -contract PushProposalTest is GovernanceProposerBase { +contract ExecuteProposalTest is GovernanceProposerBase { using SlotLib for Slot; Leonidas internal leonidas; @@ -26,11 +26,11 @@ contract PushProposalTest is GovernanceProposerBase { Errors.GovernanceProposer__InstanceHaveNoCode.selector, address(0xdead) ) ); - governanceProposer.pushProposal(_roundNumber); + governanceProposer.executeProposal(_roundNumber); } modifier givenCanonicalInstanceHoldCode() { - leonidas = new Leonidas(address(this)); + leonidas = new Leonidas(); vm.prank(registry.getGovernance()); registry.upgrade(address(leonidas)); @@ -42,9 +42,9 @@ contract PushProposalTest is GovernanceProposerBase { function test_WhenRoundNotInPast() external givenCanonicalInstanceHoldCode { // it revert vm.expectRevert( - abi.encodeWithSelector(Errors.GovernanceProposer__CanOnlyPushProposalInPast.selector) + abi.encodeWithSelector(Errors.GovernanceProposer__CanOnlyExecuteProposalInPast.selector) ); - governanceProposer.pushProposal(0); + governanceProposer.executeProposal(0); } modifier whenRoundInPast() { @@ -74,7 +74,7 @@ contract PushProposalTest is GovernanceProposerBase { governanceProposer.computeRound(leonidas.getCurrentSlot()) ) ); - governanceProposer.pushProposal(0); + governanceProposer.executeProposal(0); } modifier whenRoundInRecentPast() { @@ -105,13 +105,13 @@ contract PushProposalTest is GovernanceProposerBase { ) ) ); - governanceProposer.pushProposal(1); + governanceProposer.executeProposal(1); } vm.expectRevert( abi.encodeWithSelector(Errors.GovernanceProposer__ProposalAlreadyExecuted.selector, 1) ); - governanceProposer.pushProposal(1); + governanceProposer.executeProposal(1); } modifier givenRoundNotExecutedBefore() { @@ -144,7 +144,7 @@ contract PushProposalTest is GovernanceProposerBase { vm.expectRevert( abi.encodeWithSelector(Errors.GovernanceProposer__ProposalCannotBeAddressZero.selector) ); - governanceProposer.pushProposal(0); + governanceProposer.executeProposal(0); } modifier givenLeaderIsNotAddress0() { @@ -164,13 +164,17 @@ contract PushProposalTest is GovernanceProposerBase { vm.prank(proposer); governanceProposer.vote(proposal); + uint256 votesNeeded = governanceProposer.N(); + vm.warp( Timestamp.unwrap( leonidas.getTimestampForSlot(leonidas.getCurrentSlot() + Slot.wrap(governanceProposer.M())) ) ); - vm.expectRevert(abi.encodeWithSelector(Errors.GovernanceProposer__InsufficientVotes.selector)); - governanceProposer.pushProposal(1); + vm.expectRevert( + abi.encodeWithSelector(Errors.GovernanceProposer__InsufficientVotes.selector, 1, votesNeeded) + ); + governanceProposer.executeProposal(1); } modifier givenSufficientYea(uint256 _yeas) { @@ -204,7 +208,7 @@ contract PushProposalTest is GovernanceProposerBase { // it revert // When using a new registry we change the governanceProposer's interpetation of time :O - Leonidas freshInstance = new Leonidas(address(this)); + Leonidas freshInstance = new Leonidas(); vm.prank(registry.getGovernance()); registry.upgrade(address(freshInstance)); @@ -215,9 +219,9 @@ contract PushProposalTest is GovernanceProposerBase { // As time is perceived differently, round 1 is currently in the future vm.expectRevert( - abi.encodeWithSelector(Errors.GovernanceProposer__CanOnlyPushProposalInPast.selector) + abi.encodeWithSelector(Errors.GovernanceProposer__CanOnlyExecuteProposalInPast.selector) ); - governanceProposer.pushProposal(1); + governanceProposer.executeProposal(1); // Jump 2 rounds, since we are currently in round 0 vm.warp( @@ -230,7 +234,7 @@ contract PushProposalTest is GovernanceProposerBase { vm.expectRevert( abi.encodeWithSelector(Errors.GovernanceProposer__ProposalCannotBeAddressZero.selector) ); - governanceProposer.pushProposal(1); + governanceProposer.executeProposal(1); } function test_GivenGovernanceCallReturnFalse(uint256 _yeas) @@ -249,7 +253,7 @@ contract PushProposalTest is GovernanceProposerBase { vm.expectRevert( abi.encodeWithSelector(Errors.GovernanceProposer__FailedToPropose.selector, proposal) ); - governanceProposer.pushProposal(1); + governanceProposer.executeProposal(1); } function test_GivenGovernanceCallFails(uint256 _yeas) @@ -266,7 +270,7 @@ contract PushProposalTest is GovernanceProposerBase { vm.etch(address(governance), address(faulty).code); vm.expectRevert(abi.encodeWithSelector(FaultyGovernance.Faulty.selector)); - governanceProposer.pushProposal(1); + governanceProposer.executeProposal(1); } function test_GivenGovernanceCallSucceeds(uint256 _yeas) @@ -279,11 +283,11 @@ contract PushProposalTest is GovernanceProposerBase { givenSufficientYea(_yeas) { // it update executed to true - // it emits {ProposalPushed} event + // it emits {ProposalExecuted} event // it return true vm.expectEmit(true, true, true, true, address(governanceProposer)); - emit IGovernanceProposer.ProposalPushed(proposal, 1); - assertTrue(governanceProposer.pushProposal(1)); + emit IGovernanceProposer.ProposalExecuted(proposal, 1); + assertTrue(governanceProposer.executeProposal(1)); (, IPayload leader, bool executed) = governanceProposer.rounds(address(leonidas), 1); assertTrue(executed); assertEq(address(leader), address(proposal)); diff --git a/l1-contracts/test/governance/governance-proposer/pushProposal.tree b/l1-contracts/test/governance/governance-proposer/executeProposal.tree similarity index 93% rename from l1-contracts/test/governance/governance-proposer/pushProposal.tree rename to l1-contracts/test/governance/governance-proposer/executeProposal.tree index 2188b588fde..9b2acb0f3fd 100644 --- a/l1-contracts/test/governance/governance-proposer/pushProposal.tree +++ b/l1-contracts/test/governance/governance-proposer/executeProposal.tree @@ -1,4 +1,4 @@ -PushProposalTest +ExecuteProposalTest ├── given canonical instance hold no code │ └── it revert └── given canonical instance hold code @@ -25,5 +25,5 @@ PushProposalTest │ └── it revert └── given governance call succeeds ├── it update executed to true - ├── it emits {ProposalPushed} event + ├── it emits {ProposalExecuted} event └── it return true \ No newline at end of file diff --git a/l1-contracts/test/governance/governance-proposer/vote.t.sol b/l1-contracts/test/governance/governance-proposer/vote.t.sol index f78f9f009e0..91c28363912 100644 --- a/l1-contracts/test/governance/governance-proposer/vote.t.sol +++ b/l1-contracts/test/governance/governance-proposer/vote.t.sol @@ -15,7 +15,8 @@ contract VoteTest is GovernanceProposerBase { address internal proposer = address(0); Leonidas internal leonidas; - function test_WhenProposalHoldNoCode() external { + // Skipping this test since the it matches the for now skipped check in `EmpireBase::vote` + function skip__test_WhenProposalHoldNoCode() external { // it revert vm.expectRevert( abi.encodeWithSelector(Errors.GovernanceProposer__ProposalHaveNoCode.selector, proposal) @@ -39,7 +40,7 @@ contract VoteTest is GovernanceProposerBase { } modifier givenCanonicalRollupHoldCode() { - leonidas = new Leonidas(address(this)); + leonidas = new Leonidas(); vm.prank(registry.getGovernance()); registry.upgrade(address(leonidas)); @@ -138,7 +139,7 @@ contract VoteTest is GovernanceProposerBase { uint256 leonidasRound = governanceProposer.computeRound(leonidasSlot); uint256 yeaBefore = governanceProposer.yeaCount(address(leonidas), leonidasRound, proposal); - Leonidas freshInstance = new Leonidas(address(this)); + Leonidas freshInstance = new Leonidas(); vm.prank(registry.getGovernance()); registry.upgrade(address(freshInstance)); diff --git a/l1-contracts/test/governance/governance/base.t.sol b/l1-contracts/test/governance/governance/base.t.sol index cc5a9878a06..05a125f10ff 100644 --- a/l1-contracts/test/governance/governance/base.t.sol +++ b/l1-contracts/test/governance/governance/base.t.sol @@ -3,7 +3,7 @@ pragma solidity >=0.8.27; import {TestBase} from "@test/base/Base.sol"; import {Governance} from "@aztec/governance/Governance.sol"; -import {GovernanceProposer} from "@aztec/governance/GovernanceProposer.sol"; +import {GovernanceProposer} from "@aztec/governance/proposer/GovernanceProposer.sol"; import {Registry} from "@aztec/governance/Registry.sol"; import {DataStructures} from "@aztec/governance/libraries/DataStructures.sol"; import {IMintableERC20} from "@aztec/governance/interfaces/IMintableERC20.sol"; diff --git a/l1-contracts/test/governance/scenario/NewGovernanceProposerPayload.sol b/l1-contracts/test/governance/scenario/NewGovernanceProposerPayload.sol index 613dc7006b4..a4cb726dc2e 100644 --- a/l1-contracts/test/governance/scenario/NewGovernanceProposerPayload.sol +++ b/l1-contracts/test/governance/scenario/NewGovernanceProposerPayload.sol @@ -4,7 +4,7 @@ pragma solidity >=0.8.27; import {IPayload} from "@aztec/governance/interfaces/IPayload.sol"; import {IRegistry} from "@aztec/governance/interfaces/IRegistry.sol"; import {Governance} from "@aztec/governance/Governance.sol"; -import {GovernanceProposer} from "@aztec/governance/GovernanceProposer.sol"; +import {GovernanceProposer} from "@aztec/governance/proposer/GovernanceProposer.sol"; /** * @title NewGovernanceProposerPayload diff --git a/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol b/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol index 8504653da17..f5fd35a9a34 100644 --- a/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol +++ b/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol @@ -6,7 +6,7 @@ import {TestBase} from "@test/base/Base.sol"; import {IMintableERC20} from "@aztec/governance/interfaces/IMintableERC20.sol"; import {Rollup} from "../../harnesses/Rollup.sol"; import {Governance} from "@aztec/governance/Governance.sol"; -import {GovernanceProposer} from "@aztec/governance/GovernanceProposer.sol"; +import {GovernanceProposer} from "@aztec/governance/proposer/GovernanceProposer.sol"; import {Registry} from "@aztec/governance/Registry.sol"; import {DataStructures} from "@aztec/governance/libraries/DataStructures.sol"; import {IMintableERC20} from "@aztec/governance/interfaces/IMintableERC20.sol"; @@ -92,7 +92,7 @@ contract UpgradeGovernanceProposerTest is TestBase { vm.warp(Timestamp.unwrap(rollup.getTimestampForSlot(rollup.getCurrentSlot() + Slot.wrap(1)))); } - governanceProposer.pushProposal(0); + governanceProposer.executeProposal(0); proposal = governance.getProposal(0); assertEq(address(proposal.payload), address(payload)); diff --git a/l1-contracts/test/governance/scenario/slashing/Slashing.t.sol b/l1-contracts/test/governance/scenario/slashing/Slashing.t.sol new file mode 100644 index 00000000000..e2a40cec054 --- /dev/null +++ b/l1-contracts/test/governance/scenario/slashing/Slashing.t.sol @@ -0,0 +1,121 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity >=0.8.27; + +import {TestBase} from "@test/base/Base.sol"; + +import {Errors} from "@aztec/core/libraries/Errors.sol"; +import {Registry} from "@aztec/governance/Registry.sol"; +import {Rollup, Config} from "@aztec/core/Rollup.sol"; +import {TestERC20} from "@aztec/mock/TestERC20.sol"; +import {MockFeeJuicePortal} from "@aztec/mock/MockFeeJuicePortal.sol"; +import {TestConstants} from "../../../harnesses/TestConstants.sol"; +import {CheatDepositArgs} from "@aztec/core/interfaces/IRollup.sol"; + +import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; + +import {SlashFactory} from "@aztec/periphery/SlashFactory.sol"; +import {Slasher, IPayload} from "@aztec/core/staking/Slasher.sol"; +import {ILeonidas} from "@aztec/core/interfaces/ILeonidas.sol"; +import {Status, ValidatorInfo} from "@aztec/core/interfaces/IStaking.sol"; + +import {Errors} from "@aztec/core/libraries/Errors.sol"; +import {Timestamp} from "@aztec/core/libraries/TimeMath.sol"; + +import {CheatDepositArgs} from "@aztec/core/interfaces/IRollup.sol"; +import {SlashingProposer} from "@aztec/core/staking/SlashingProposer.sol"; + +import {Slot, SlotLib, Epoch} from "@aztec/core/libraries/TimeMath.sol"; + +contract SlashingScenario is TestBase { + using SlotLib for Slot; + + TestERC20 internal testERC20; + RewardDistributor internal rewardDistributor; + Rollup internal rollup; + Slasher internal slasher; + SlashFactory internal slashFactory; + SlashingProposer internal slashingProposer; + + function test_Slashing() public { + uint256 validatorCount = 4; + + CheatDepositArgs[] memory initialValidators = new CheatDepositArgs[](validatorCount); + + for (uint256 i = 1; i < validatorCount + 1; i++) { + uint256 attesterPrivateKey = uint256(keccak256(abi.encode("attester", i))); + address attester = vm.addr(attesterPrivateKey); + uint256 proposerPrivateKey = uint256(keccak256(abi.encode("proposer", i))); + address proposer = vm.addr(proposerPrivateKey); + + initialValidators[i - 1] = CheatDepositArgs({ + attester: attester, + proposer: proposer, + withdrawer: address(this), + amount: TestConstants.AZTEC_MINIMUM_STAKE + }); + } + + testERC20 = new TestERC20("test", "TEST", address(this)); + Registry registry = new Registry(address(this)); + rewardDistributor = new RewardDistributor(testERC20, registry, address(this)); + rollup = new Rollup({ + _fpcJuicePortal: new MockFeeJuicePortal(), + _rewardDistributor: rewardDistributor, + _stakingAsset: testERC20, + _vkTreeRoot: bytes32(0), + _protocolContractTreeRoot: bytes32(0), + _ares: address(this), + _config: Config({ + aztecSlotDuration: TestConstants.AZTEC_SLOT_DURATION, + aztecEpochDuration: TestConstants.AZTEC_EPOCH_DURATION, + targetCommitteeSize: TestConstants.AZTEC_TARGET_COMMITTEE_SIZE, + aztecEpochProofClaimWindowInL2Slots: TestConstants.AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS, + minimumStake: TestConstants.AZTEC_MINIMUM_STAKE, + slashingQuorum: TestConstants.AZTEC_SLASHING_QUORUM, + slashingRoundSize: TestConstants.AZTEC_SLASHING_ROUND_SIZE + }) + }); + slasher = rollup.SLASHER(); + slashingProposer = slasher.PROPOSER(); + slashFactory = new SlashFactory(ILeonidas(address(rollup))); + + testERC20.mint(address(this), TestConstants.AZTEC_MINIMUM_STAKE * validatorCount); + testERC20.approve(address(rollup), TestConstants.AZTEC_MINIMUM_STAKE * validatorCount); + rollup.cheat__InitialiseValidatorSet(initialValidators); + + // Lets make a proposal to slash! + + uint256 slashAmount = 10e18; + IPayload payload = slashFactory.createSlashPayload(Epoch.wrap(0), slashAmount); + + // Cast a bunch of votes + vm.warp(Timestamp.unwrap(rollup.getTimestampForSlot(Slot.wrap(1)))); + + for (uint256 i = 0; i < 10; i++) { + address proposer = rollup.getCurrentProposer(); + vm.prank(proposer); + slashingProposer.vote(payload); + vm.warp(Timestamp.unwrap(rollup.getTimestampForSlot(rollup.getCurrentSlot() + Slot.wrap(1)))); + } + + address[] memory attesters = rollup.getAttesters(); + uint256[] memory stakes = new uint256[](attesters.length); + + for (uint256 i = 0; i < attesters.length; i++) { + ValidatorInfo memory info = rollup.getInfo(attesters[i]); + stakes[i] = info.stake; + assertTrue(info.status == Status.VALIDATING, "Invalid status"); + } + + slashingProposer.executeProposal(0); + + // Make sure that the slash was successful, + // Meaning that validators are now LIVING and have lost the slash amount + for (uint256 i = 0; i < attesters.length; i++) { + ValidatorInfo memory info = rollup.getInfo(attesters[i]); + uint256 stake = info.stake; + assertEq(stake, stakes[i] - slashAmount, "Invalid stake"); + assertTrue(info.status == Status.LIVING, "Invalid status"); + } + } +} diff --git a/l1-contracts/test/harnesses/Leonidas.sol b/l1-contracts/test/harnesses/Leonidas.sol index a7c78f304b1..c52eb301589 100644 --- a/l1-contracts/test/harnesses/Leonidas.sol +++ b/l1-contracts/test/harnesses/Leonidas.sol @@ -7,11 +7,12 @@ import {TestConstants} from "./TestConstants.sol"; import {TestERC20} from "@aztec/mock/TestERC20.sol"; contract Leonidas is RealLeonidas { - constructor(address _ares) + constructor() RealLeonidas( - _ares, new TestERC20("test", "TEST", address(this)), 100e18, + TestConstants.AZTEC_SLASHING_QUORUM, + TestConstants.AZTEC_SLASHING_ROUND_SIZE, TestConstants.AZTEC_SLOT_DURATION, TestConstants.AZTEC_EPOCH_DURATION, TestConstants.AZTEC_TARGET_COMMITTEE_SIZE diff --git a/l1-contracts/test/harnesses/Rollup.sol b/l1-contracts/test/harnesses/Rollup.sol index 41d72b20de9..27d55a9913e 100644 --- a/l1-contracts/test/harnesses/Rollup.sol +++ b/l1-contracts/test/harnesses/Rollup.sol @@ -29,7 +29,9 @@ contract Rollup is RealRollup { aztecEpochDuration: TestConstants.AZTEC_EPOCH_DURATION, targetCommitteeSize: TestConstants.AZTEC_TARGET_COMMITTEE_SIZE, aztecEpochProofClaimWindowInL2Slots: TestConstants.AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS, - minimumStake: TestConstants.AZTEC_MINIMUM_STAKE + minimumStake: TestConstants.AZTEC_MINIMUM_STAKE, + slashingQuorum: TestConstants.AZTEC_SLASHING_QUORUM, + slashingRoundSize: TestConstants.AZTEC_SLASHING_ROUND_SIZE }) ) {} diff --git a/l1-contracts/test/harnesses/TestConstants.sol b/l1-contracts/test/harnesses/TestConstants.sol index 371a2d8f594..aad8edd6db0 100644 --- a/l1-contracts/test/harnesses/TestConstants.sol +++ b/l1-contracts/test/harnesses/TestConstants.sol @@ -10,4 +10,6 @@ library TestConstants { uint256 internal constant AZTEC_TARGET_COMMITTEE_SIZE = 48; uint256 internal constant AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS = 13; uint256 internal constant AZTEC_MINIMUM_STAKE = 100e18; + uint256 internal constant AZTEC_SLASHING_QUORUM = 6; + uint256 internal constant AZTEC_SLASHING_ROUND_SIZE = 10; } diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index 165dd9f7b4f..eccf797ee16 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -12,7 +12,7 @@ import {Inbox} from "@aztec/core/messagebridge/Inbox.sol"; import {Outbox} from "@aztec/core/messagebridge/Outbox.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {Registry} from "@aztec/governance/Registry.sol"; -import {Rollup} from "../harnesses/Rollup.sol"; +import {Rollup, Config} from "@aztec/core/Rollup.sol"; import {Leonidas} from "@aztec/core/Leonidas.sol"; import {NaiveMerkle} from "../merkle/Naive.sol"; import {MerkleTestUtil} from "../merkle/TestUtil.sol"; @@ -27,6 +27,11 @@ import {CheatDepositArgs} from "@aztec/core/interfaces/IRollup.sol"; import {Slot, Epoch, SlotLib, EpochLib} from "@aztec/core/libraries/TimeMath.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; + +import {SlashFactory} from "@aztec/periphery/SlashFactory.sol"; +import {Slasher, IPayload} from "@aztec/core/staking/Slasher.sol"; +import {ILeonidas} from "@aztec/core/interfaces/ILeonidas.sol"; +import {Status, ValidatorInfo} from "@aztec/core/interfaces/IStaking.sol"; // solhint-disable comprehensive-interface /** @@ -44,6 +49,8 @@ contract SpartaTest is DecoderBase { bool shouldRevert; } + SlashFactory internal slashFactory; + Slasher internal slasher; Inbox internal inbox; Outbox internal outbox; Rollup internal rollup; @@ -64,9 +71,10 @@ contract SpartaTest is DecoderBase { string memory _name = "mixed_block_1"; { Leonidas leonidas = new Leonidas( - address(1), testERC20, TestConstants.AZTEC_MINIMUM_STAKE, + TestConstants.AZTEC_SLASHING_QUORUM, + TestConstants.AZTEC_SLASHING_ROUND_SIZE, TestConstants.AZTEC_SLOT_DURATION, TestConstants.AZTEC_EPOCH_DURATION, TestConstants.AZTEC_TARGET_COMMITTEE_SIZE @@ -102,9 +110,25 @@ contract SpartaTest is DecoderBase { testERC20 = new TestERC20("test", "TEST", address(this)); Registry registry = new Registry(address(this)); rewardDistributor = new RewardDistributor(testERC20, registry, address(this)); - rollup = new Rollup( - new MockFeeJuicePortal(), rewardDistributor, testERC20, bytes32(0), bytes32(0), address(this) - ); + rollup = new Rollup({ + _fpcJuicePortal: new MockFeeJuicePortal(), + _rewardDistributor: rewardDistributor, + _stakingAsset: testERC20, + _vkTreeRoot: bytes32(0), + _protocolContractTreeRoot: bytes32(0), + _ares: address(this), + _config: Config({ + aztecSlotDuration: TestConstants.AZTEC_SLOT_DURATION, + aztecEpochDuration: TestConstants.AZTEC_EPOCH_DURATION, + targetCommitteeSize: TestConstants.AZTEC_TARGET_COMMITTEE_SIZE, + aztecEpochProofClaimWindowInL2Slots: TestConstants.AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS, + minimumStake: TestConstants.AZTEC_MINIMUM_STAKE, + slashingQuorum: TestConstants.AZTEC_SLASHING_QUORUM, + slashingRoundSize: TestConstants.AZTEC_SLASHING_ROUND_SIZE + }) + }); + slasher = rollup.SLASHER(); + slashFactory = new SlashFactory(ILeonidas(address(rollup))); testERC20.mint(address(this), TestConstants.AZTEC_MINIMUM_STAKE * _validatorCount); testERC20.approve(address(rollup), TestConstants.AZTEC_MINIMUM_STAKE * _validatorCount); @@ -180,6 +204,40 @@ contract SpartaTest is DecoderBase { _testBlock("mixed_block_2", false, 3, false); } + function testNukeFromOrbit() public setup(4) { + // We propose some blocks, and have a bunch of validators attest to them. + // Then we slash EVERYONE that was in the committees because the epoch never + // got finalised. + // This is LIKELY, not the action you really want to take, you want to slash + // the people actually attesting, etc, but for simplicity we can do this as showcase. + _testBlock("mixed_block_1", false, 3, false); + _testBlock("mixed_block_2", false, 3, false); + + address[] memory attesters = rollup.getAttesters(); + uint256[] memory stakes = new uint256[](attesters.length); + + for (uint256 i = 0; i < attesters.length; i++) { + ValidatorInfo memory info = rollup.getInfo(attesters[i]); + stakes[i] = info.stake; + assertTrue(info.status == Status.VALIDATING, "Invalid status"); + } + + // We say, these things are bad, call the baba yaga to take care of them! + uint256 slashAmount = 10e18; + IPayload slashPayload = slashFactory.createSlashPayload(rollup.getCurrentEpoch(), slashAmount); + vm.prank(address(slasher.PROPOSER())); + slasher.slash(slashPayload); + + // Make sure that the slash was successful, + // Meaning that validators are now LIVING and have lost the slash amount + for (uint256 i = 0; i < attesters.length; i++) { + ValidatorInfo memory info = rollup.getInfo(attesters[i]); + uint256 stake = info.stake; + assertEq(stake, stakes[i] - slashAmount, "Invalid stake"); + assertTrue(info.status == Status.LIVING, "Invalid status"); + } + } + function testInvalidProposer() public setup(4) { _testBlock("mixed_block_1", true, 3, true); } diff --git a/l1-contracts/test/staking/StakingCheater.sol b/l1-contracts/test/staking/StakingCheater.sol index ba89e1e07ab..a886a3d2f72 100644 --- a/l1-contracts/test/staking/StakingCheater.sol +++ b/l1-contracts/test/staking/StakingCheater.sol @@ -9,9 +9,12 @@ import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; contract StakingCheater is Staking { using EnumerableSet for EnumerableSet.AddressSet; - constructor(address _slasher, IERC20 _stakingAsset, uint256 _minimumStake) - Staking(_slasher, _stakingAsset, _minimumStake) - {} + constructor( + IERC20 _stakingAsset, + uint256 _minimumStake, + uint256 _slashingQuorum, + uint256 _roundSize + ) Staking(_stakingAsset, _minimumStake, _slashingQuorum, _roundSize) {} function cheat__SetStatus(address _attester, Status _status) external { stakingStore.info[_attester].status = _status; diff --git a/l1-contracts/test/staking/base.t.sol b/l1-contracts/test/staking/base.t.sol index 6aa8eaa8ca4..441d418d244 100644 --- a/l1-contracts/test/staking/base.t.sol +++ b/l1-contracts/test/staking/base.t.sol @@ -16,10 +16,13 @@ contract StakingBase is TestBase { address internal constant ATTESTER = address(bytes20("ATTESTER")); address internal constant WITHDRAWER = address(bytes20("WITHDRAWER")); address internal constant RECIPIENT = address(bytes20("RECIPIENT")); - address internal constant SLASHER = address(bytes20("SLASHER")); + + address internal SLASHER; function setUp() public virtual { stakingAsset = new TestERC20("test", "TEST", address(this)); - staking = new StakingCheater(SLASHER, stakingAsset, MINIMUM_STAKE); + staking = new StakingCheater(stakingAsset, MINIMUM_STAKE, 1, 1); + + SLASHER = address(staking.SLASHER()); } } diff --git a/spartan/aztec-network/files/config/config-prover-env.sh b/spartan/aztec-network/files/config/config-prover-env.sh index 073547821d4..2d56ed1c897 100644 --- a/spartan/aztec-network/files/config/config-prover-env.sh +++ b/spartan/aztec-network/files/config/config-prover-env.sh @@ -19,6 +19,7 @@ coin_issuer_address=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F reward_distributor_address=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') governance_proposer_address=$(echo "$output" | grep -oP 'GovernanceProposer Address: \K0x[a-fA-F0-9]{40}') governance_address=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0-9]{40}') +slash_factory_address=$(echo "$output" | grep -oP 'SlashFactory Address: \K0x[a-fA-F0-9]{40}') # Write the addresses to a file in the shared volume cat </shared/contracts/contracts.env @@ -34,6 +35,7 @@ export COIN_ISSUER_CONTRACT_ADDRESS=$coin_issuer_address export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$reward_distributor_address export GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=$governance_proposer_address export GOVERNANCE_CONTRACT_ADDRESS=$governance_address +export SLASH_FACTORY_CONTRACT_ADDRESS=$slash_factory_address EOF cat /shared/contracts/contracts.env diff --git a/spartan/aztec-network/files/config/config-validator-env.sh b/spartan/aztec-network/files/config/config-validator-env.sh index 78b6b319f36..7576d424ee0 100644 --- a/spartan/aztec-network/files/config/config-validator-env.sh +++ b/spartan/aztec-network/files/config/config-validator-env.sh @@ -19,6 +19,7 @@ coin_issuer_address=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F reward_distributor_address=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') governance_proposer_address=$(echo "$output" | grep -oP 'GovernanceProposer Address: \K0x[a-fA-F0-9]{40}') governance_address=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0-9]{40}') +slash_factory_address=$(echo "$output" | grep -oP 'SlashFactory Address: \K0x[a-fA-F0-9]{40}') # We assume that there is an env var set for validator keys from the config map # We get the index in the config map from the pod name, which will have the validator index within it @@ -39,6 +40,7 @@ export COIN_ISSUER_CONTRACT_ADDRESS=$coin_issuer_address export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$reward_distributor_address export GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=$governance_proposer_address export GOVERNANCE_CONTRACT_ADDRESS=$governance_address +export SLASH_FACTORY_CONTRACT_ADDRESS=$slash_factory_address export VALIDATOR_PRIVATE_KEY=$private_key export L1_PRIVATE_KEY=$private_key export SEQ_PUBLISHER_PRIVATE_KEY=$private_key diff --git a/spartan/aztec-network/files/config/deploy-l1-contracts.sh b/spartan/aztec-network/files/config/deploy-l1-contracts.sh index 855372ca77c..d9352ed0b39 100755 --- a/spartan/aztec-network/files/config/deploy-l1-contracts.sh +++ b/spartan/aztec-network/files/config/deploy-l1-contracts.sh @@ -48,6 +48,7 @@ coin_issuer_address=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F reward_distributor_address=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') governance_proposer_address=$(echo "$output" | grep -oP 'GovernanceProposer Address: \K0x[a-fA-F0-9]{40}') governance_address=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0-9]{40}') +slash_factory_address=$(echo "$output" | grep -oP 'SlashFactory Address: \K0x[a-fA-F0-9]{40}') # Write the addresses to a file in the shared volume cat </shared/contracts/contracts.env @@ -62,6 +63,7 @@ export COIN_ISSUER_CONTRACT_ADDRESS=$coin_issuer_address export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$reward_distributor_address export GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=$governance_proposer_address export GOVERNANCE_CONTRACT_ADDRESS=$governance_address +export SLASH_FACTORY_CONTRACT_ADDRESS=$slash_factory_address EOF cat /shared/contracts/contracts.env diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index a801fbabf28..545a6d13d7f 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -73,7 +73,7 @@ import { createP2PClient, } from '@aztec/p2p'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; -import { GlobalVariableBuilder, type L1Publisher, SequencerClient } from '@aztec/sequencer-client'; +import { GlobalVariableBuilder, type L1Publisher, SequencerClient, createSlasherClient } from '@aztec/sequencer-client'; import { PublicProcessorFactory } from '@aztec/simulator'; import { Attributes, type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -180,8 +180,10 @@ export class AztecNodeService implements AztecNode, Traceable { telemetry, ); + const slasherClient = await createSlasherClient(config, archiver, telemetry); + // start both and wait for them to sync from the block source - await Promise.all([p2pClient.start(), worldStateSynchronizer.start()]); + await Promise.all([p2pClient.start(), worldStateSynchronizer.start(), slasherClient.start()]); const validatorClient = createValidatorClient(config, { p2pClient, telemetry, dateProvider, epochCache }); @@ -192,6 +194,7 @@ export class AztecNodeService implements AztecNode, Traceable { validatorClient, p2pClient, worldStateSynchronizer, + slasherClient, contractDataSource: archiver, l2BlockSource: archiver, l1ToL2MessageSource: archiver, diff --git a/yarn-project/aztec.js/src/contract/contract.test.ts b/yarn-project/aztec.js/src/contract/contract.test.ts index 66a54e8cfb5..f45eb0203d1 100644 --- a/yarn-project/aztec.js/src/contract/contract.test.ts +++ b/yarn-project/aztec.js/src/contract/contract.test.ts @@ -47,6 +47,7 @@ describe('Contract Class', () => { coinIssuerAddress: EthAddress.random(), rewardDistributorAddress: EthAddress.random(), governanceProposerAddress: EthAddress.random(), + slashFactoryAddress: EthAddress.random(), }; const mockNodeInfo: NodeInfo = { nodeVersion: 'vx.x.x', diff --git a/yarn-project/cli/src/cmds/infrastructure/sequencers.ts b/yarn-project/cli/src/cmds/infrastructure/sequencers.ts index a3e6c77d39d..cf5dbe7bdc1 100644 --- a/yarn-project/cli/src/cmds/infrastructure/sequencers.ts +++ b/yarn-project/cli/src/cmds/infrastructure/sequencers.ts @@ -1,5 +1,5 @@ import { createCompatibleClient } from '@aztec/aztec.js'; -import { MINIMUM_STAKE, createEthereumChain } from '@aztec/ethereum'; +import { createEthereumChain, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; import { type LogFn, type Logger } from '@aztec/foundation/log'; import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; @@ -71,14 +71,16 @@ export async function sequencers(opts: { client: walletClient, }); + const config = getL1ContractsConfigEnvVars(); + await Promise.all( [ - await stakingAsset.write.mint([walletClient.account.address, MINIMUM_STAKE], {} as any), - await stakingAsset.write.approve([rollup.address, MINIMUM_STAKE], {} as any), + await stakingAsset.write.mint([walletClient.account.address, config.minimumStake], {} as any), + await stakingAsset.write.approve([rollup.address, config.minimumStake], {} as any), ].map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash })), ); - const hash = await writeableRollup.write.deposit([who, who, who, MINIMUM_STAKE]); + const hash = await writeableRollup.write.deposit([who, who, who, config.minimumStake]); await publicClient.waitForTransactionReceipt({ hash }); log(`Added in tx ${hash}`); } else if (command === 'remove') { diff --git a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts index 21ac9d71ec6..39b4bfd4635 100644 --- a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts +++ b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts @@ -48,5 +48,6 @@ export async function deployL1Contracts( log(`RewardDistributor Address: ${l1ContractAddresses.rewardDistributorAddress.toString()}`); log(`GovernanceProposer Address: ${l1ContractAddresses.governanceProposerAddress.toString()}`); log(`Governance Address: ${l1ContractAddresses.governanceAddress.toString()}`); + log(`SlashFactory Address: ${l1ContractAddresses.slashFactoryAddress.toString()}`); } } diff --git a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts index 7d5edca07ba..40d06e2fd6d 100644 --- a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts +++ b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts @@ -1,6 +1,6 @@ import { EthCheatCodes } from '@aztec/aztec.js'; import { type EthAddress } from '@aztec/circuits.js'; -import { MINIMUM_STAKE, createEthereumChain, getL1ContractsConfigEnvVars, isAnvilTestChain } from '@aztec/ethereum'; +import { createEthereumChain, getL1ContractsConfigEnvVars, isAnvilTestChain } from '@aztec/ethereum'; import { type LogFn, type Logger } from '@aztec/foundation/log'; import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; @@ -40,6 +40,7 @@ export async function addL1Validator({ log, debugLogger, }: RollupCommandArgs & LoggerArgs & { validatorAddress: EthAddress }) { + const config = getL1ContractsConfigEnvVars(); const dualLog = makeDualLog(log, debugLogger); const publicClient = getPublicClient(rpcUrl, chainId); const walletClient = getWalletClient(rpcUrl, chainId, privateKey, mnemonic); @@ -57,8 +58,8 @@ export async function addL1Validator({ await Promise.all( [ - await stakingAsset.write.mint([walletClient.account.address, MINIMUM_STAKE], {} as any), - await stakingAsset.write.approve([rollupAddress.toString(), MINIMUM_STAKE], {} as any), + await stakingAsset.write.mint([walletClient.account.address, config.minimumStake], {} as any), + await stakingAsset.write.approve([rollupAddress.toString(), config.minimumStake], {} as any), ].map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash })), ); @@ -67,7 +68,7 @@ export async function addL1Validator({ validatorAddress.toString(), validatorAddress.toString(), validatorAddress.toString(), - MINIMUM_STAKE, + config.minimumStake, ]); dualLog(`Transaction hash: ${txHash}`); await publicClient.waitForTransactionReceipt({ hash: txHash }); diff --git a/yarn-project/cli/src/cmds/pxe/get_node_info.ts b/yarn-project/cli/src/cmds/pxe/get_node_info.ts index 329b2428297..fdd44f8d1db 100644 --- a/yarn-project/cli/src/cmds/pxe/get_node_info.ts +++ b/yarn-project/cli/src/cmds/pxe/get_node_info.ts @@ -34,6 +34,7 @@ export async function getNodeInfo( rewardDistributor: info.l1ContractAddresses.rewardDistributorAddress.toString(), governanceProposer: info.l1ContractAddresses.governanceProposerAddress.toString(), governance: info.l1ContractAddresses.governanceAddress.toString(), + slashFactory: info.l1ContractAddresses.slashFactoryAddress.toString(), }, protocolContractAddresses: { classRegisterer: info.protocolContractAddresses.classRegisterer.toString(), @@ -59,6 +60,7 @@ export async function getNodeInfo( log(` RewardDistributor Address: ${info.l1ContractAddresses.rewardDistributorAddress.toString()}`); log(` GovernanceProposer Address: ${info.l1ContractAddresses.governanceProposerAddress.toString()}`); log(` Governance Address: ${info.l1ContractAddresses.governanceAddress.toString()}`); + log(` SlashFactory Address: ${info.l1ContractAddresses.slashFactoryAddress.toString()}`); log(`L2 Contract Addresses:`); log(` Class Registerer: ${info.protocolContractAddresses.classRegisterer.toString()}`); diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml index 80c0d27b2e2..36ebb2e9ed3 100644 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ b/yarn-project/end-to-end/scripts/e2e_test_config.yml @@ -90,6 +90,8 @@ tests: e2e_p2p_gossip: test_path: 'e2e_p2p/gossip_network.test.ts' with_alerts: true + e2e_p2p_slashing: + test_path: 'e2e_p2p/slashing.test.ts' e2e_p2p_upgrade_governance_proposer: test_path: 'e2e_p2p/upgrade_governance_proposer.test.ts' e2e_p2p_rediscovery: diff --git a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh index 2f1d670620c..9c87ef3332c 100755 --- a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh +++ b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh @@ -63,6 +63,7 @@ COIN_ISSUER_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'CoinIssuer Address: \K REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'GovernanceProposer Address: \K0x[a-fA-F0-9]{40}') GOVERNANCE_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0-9]{40}') +SLASH_FACTORY_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'SlashFactory Address: \K0x[a-fA-F0-9]{40}') # Save contract addresses to state/l1-contracts.env cat <$(git rev-parse --show-toplevel)/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env @@ -77,6 +78,7 @@ export COIN_ISSUER_CONTRACT_ADDRESS=$COIN_ISSUER_CONTRACT_ADDRESS export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$REWARD_DISTRIBUTOR_CONTRACT_ADDRESS export GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=$GOVERNANCE_PROPOSER_CONTRACT_ADDRESS export GOVERNANCE_CONTRACT_ADDRESS=$GOVERNANCE_CONTRACT_ADDRESS +export SLASH_FACTORY_CONTRACT_ADDRESS=$SLASH_FACTORY_CONTRACT_ADDRESS EOCONFIG echo "Contract addresses saved to state/l1-contracts.env" diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 4bfe4c45e99..e7a8f8fd56b 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -1,7 +1,7 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; import { type AccountWalletWithSecretKey } from '@aztec/aztec.js'; -import { MINIMUM_STAKE, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; +import { L1TxUtils, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; import { EthCheatCodesWithState } from '@aztec/ethereum/test'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; @@ -53,6 +53,8 @@ export class P2PNetworkTest { private cleanupInterval: NodeJS.Timeout | undefined = undefined; + private gasUtils: L1TxUtils | undefined = undefined; + constructor( testName: string, public bootstrapNode: BootstrapNode, @@ -61,6 +63,7 @@ export class P2PNetworkTest { initialValidatorConfig: AztecNodeConfig, // If set enable metrics collection metricsPort?: number, + assumeProvenThrough?: number, ) { this.logger = createLogger(`e2e:e2e_p2p:${testName}`); @@ -72,12 +75,24 @@ export class P2PNetworkTest { this.bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); - this.snapshotManager = createSnapshotManager(`e2e_p2p_network/${testName}`, process.env.E2E_DATA_PATH, { - ...initialValidatorConfig, - ethereumSlotDuration: l1ContractsConfig.ethereumSlotDuration, - salt: 420, - metricsPort: metricsPort, - }); + this.snapshotManager = createSnapshotManager( + `e2e_p2p_network/${testName}`, + process.env.E2E_DATA_PATH, + { + ...initialValidatorConfig, + ethereumSlotDuration: l1ContractsConfig.ethereumSlotDuration, + salt: 420, + metricsPort: metricsPort, + }, + { + aztecEpochDuration: initialValidatorConfig.aztecEpochDuration ?? l1ContractsConfig.aztecEpochDuration, + aztecEpochProofClaimWindowInL2Slots: + initialValidatorConfig.aztecEpochProofClaimWindowInL2Slots ?? + l1ContractsConfig.aztecEpochProofClaimWindowInL2Slots, + assumeProvenThrough: assumeProvenThrough ?? Number.MAX_SAFE_INTEGER, + initialValidators: [], + }, + ); } static async create({ @@ -85,11 +100,15 @@ export class P2PNetworkTest { numberOfNodes, basePort, metricsPort, + initialConfig, + assumeProvenThrough, }: { testName: string; numberOfNodes: number; basePort?: number; metricsPort?: number; + initialConfig?: Partial; + assumeProvenThrough?: number; }) { const port = basePort || (await getPort()); @@ -97,9 +116,20 @@ export class P2PNetworkTest { const bootstrapNode = await createBootstrapNodeFromPrivateKey(BOOTSTRAP_NODE_PRIVATE_KEY, port, telemetry); const bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); - const initialValidatorConfig = await createValidatorConfig({} as AztecNodeConfig, bootstrapNodeEnr); + const initialValidatorConfig = await createValidatorConfig( + (initialConfig ?? {}) as AztecNodeConfig, + bootstrapNodeEnr, + ); - return new P2PNetworkTest(testName, bootstrapNode, port, numberOfNodes, initialValidatorConfig); + return new P2PNetworkTest( + testName, + bootstrapNode, + port, + numberOfNodes, + initialValidatorConfig, + metricsPort, + assumeProvenThrough, + ); } /** @@ -118,15 +148,13 @@ export class P2PNetworkTest { this.logger.info('Syncing mock system time'); const { dateProvider, deployL1ContractsValues } = this.ctx!; // Send a tx and only update the time after the tx is mined, as eth time is not continuous - const tx = await deployL1ContractsValues.walletClient.sendTransaction({ + const receipt = await this.gasUtils!.sendAndMonitorTransaction({ to: this.baseAccount.address, + data: '0x', value: 1n, - account: this.baseAccount, - }); - const receipt = await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: tx, }); const timestamp = await deployL1ContractsValues.publicClient.getBlock({ blockNumber: receipt.blockNumber }); + this.logger.info(`Timestamp: ${timestamp.timestamp}`); dateProvider.setTime(Number(timestamp.timestamp) * 1000); } @@ -148,7 +176,7 @@ export class P2PNetworkTest { client: deployL1ContractsValues.walletClient, }); - const stakeNeeded = MINIMUM_STAKE * BigInt(this.numberOfNodes); + const stakeNeeded = l1ContractsConfig.minimumStake * BigInt(this.numberOfNodes); await Promise.all( [ await stakingAsset.write.mint( @@ -171,7 +199,7 @@ export class P2PNetworkTest { attester: attester.address, proposer: proposer.address, withdrawer: attester.address, - amount: MINIMUM_STAKE, + amount: l1ContractsConfig.minimumStake, } as const); this.logger.verbose( @@ -266,6 +294,20 @@ export class P2PNetworkTest { async setup() { this.ctx = await this.snapshotManager.setup(); this.startSyncMockSystemTimeInterval(); + + this.gasUtils = new L1TxUtils( + this.ctx.deployL1ContractsValues.publicClient, + this.ctx.deployL1ContractsValues.walletClient, + this.logger, + { + gasLimitBufferPercentage: 20n, + maxGwei: 500n, + minGwei: 1n, + maxAttempts: 3, + checkIntervalMs: 100, + stallTimeMs: 1000, + }, + ); } async stopNodes(nodes: AztecNodeService[]) { diff --git a/yarn-project/end-to-end/src/e2e_p2p/slashing.test.ts b/yarn-project/end-to-end/src/e2e_p2p/slashing.test.ts new file mode 100644 index 00000000000..fcb6cca9c3f --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_p2p/slashing.test.ts @@ -0,0 +1,264 @@ +import { type AztecNodeService } from '@aztec/aztec-node'; +import { sleep } from '@aztec/aztec.js'; +import { RollupAbi, SlashFactoryAbi, SlasherAbi, SlashingProposerAbi } from '@aztec/l1-artifacts'; + +import fs from 'fs'; +import { getAddress, getContract, parseEventLogs } from 'viem'; + +import { shouldCollectMetrics } from '../fixtures/fixtures.js'; +import { createNodes } from '../fixtures/setup_p2p_test.js'; +import { P2PNetworkTest } from './p2p_network.js'; +import { createPXEServiceAndSubmitTransactions } from './shared.js'; + +// Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds +const NUM_NODES = 4; +const BOOT_NODE_UDP_PORT = 40600; + +const DATA_DIR = './data/slashing'; + +// This test is showcasing that slashing can happen, abusing that our nodes are honest but stupid +// making them slash themselves. +describe('e2e_p2p_slashing', () => { + let t: P2PNetworkTest; + let nodes: AztecNodeService[]; + + const slashingQuorum = 6; + const slashingRoundSize = 10; + + beforeEach(async () => { + t = await P2PNetworkTest.create({ + testName: 'e2e_p2p_slashing', + numberOfNodes: NUM_NODES, + basePort: BOOT_NODE_UDP_PORT, + metricsPort: shouldCollectMetrics(), + initialConfig: { + aztecEpochDuration: 1, + aztecEpochProofClaimWindowInL2Slots: 1, + slashingQuorum, + slashingRoundSize, + }, + assumeProvenThrough: 1, + }); + + await t.applyBaseSnapshots(); + await t.setup(); + await t.removeInitialNode(); + }); + + afterEach(async () => { + await t.stopNodes(nodes); + await t.teardown(); + for (let i = 0; i < NUM_NODES; i++) { + fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); + } + }); + + it('should slash the attesters', async () => { + // create the bootstrap node for the network + if (!t.bootstrapNodeEnr) { + throw new Error('Bootstrap node ENR is not available'); + } + + const rollup = getContract({ + address: t.ctx.deployL1ContractsValues!.l1ContractAddresses.rollupAddress.toString(), + abi: RollupAbi, + client: t.ctx.deployL1ContractsValues!.walletClient, + }); + + const slasherContract = getContract({ + address: getAddress(await rollup.read.SLASHER()), + abi: SlasherAbi, + client: t.ctx.deployL1ContractsValues.publicClient, + }); + + const slashingProposer = getContract({ + address: getAddress(await slasherContract.read.PROPOSER()), + abi: SlashingProposerAbi, + client: t.ctx.deployL1ContractsValues.publicClient, + }); + + const slashFactory = getContract({ + address: getAddress(t.ctx.deployL1ContractsValues.l1ContractAddresses.slashFactoryAddress.toString()), + abi: SlashFactoryAbi, + client: t.ctx.deployL1ContractsValues.publicClient, + }); + + const slashingInfo = async () => { + const bn = await t.ctx.cheatCodes.eth.blockNumber(); + const slotNumber = await rollup.read.getCurrentSlot(); + const roundNumber = await slashingProposer.read.computeRound([slotNumber]); + const instanceAddress = t.ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(); + const info = await slashingProposer.read.rounds([instanceAddress, roundNumber]); + const leaderVotes = await slashingProposer.read.yeaCount([instanceAddress, roundNumber, info[1]]); + return { bn, slotNumber, roundNumber, info, leaderVotes }; + }; + + const jumpToNextRound = async () => { + t.logger.info(`Jumping to next round`); + const roundSize = await slashingProposer.read.M(); + const nextRoundTimestamp = await rollup.read.getTimestampForSlot([ + ((await rollup.read.getCurrentSlot()) / roundSize) * roundSize + roundSize, + ]); + await t.ctx.cheatCodes.eth.warp(Number(nextRoundTimestamp)); + + await t.syncMockSystemTime(); + }; + + t.ctx.aztecNodeConfig.validatorReexecute = false; + + // create our network of nodes and submit txs into each of them + // the number of txs per node and the number of txs per rollup + // should be set so that the only way for rollups to be built + // is if the txs are successfully gossiped around the nodes. + t.logger.info('Creating nodes'); + nodes = await createNodes( + t.ctx.aztecNodeConfig, + t.ctx.dateProvider, + t.bootstrapNodeEnr, + NUM_NODES, + BOOT_NODE_UDP_PORT, + DATA_DIR, + // To collect metrics - run in aztec-packages `docker compose --profile metrics up` and set COLLECT_METRICS=true + shouldCollectMetrics(), + ); + + // We are overriding the slashing amount to 1, such that the slashing will "really" happen. + for (const node of nodes) { + const seqClient = node.getSequencer(); + if (!seqClient) { + throw new Error('Sequencer not found'); + } + const sequencer = (seqClient as any).sequencer; + const slasher = (sequencer as any).slasherClient; + slasher.slashingAmount = 1n; + } + + // wait a bit for peers to discover each other + await sleep(4000); + + let sInfo = await slashingInfo(); + + const votesNeeded = await slashingProposer.read.N(); + + // We should push us to land exactly at the next round + await jumpToNextRound(); + + // Produce blocks until we hit an issue with pruning. + // Then we should jump in time to the next round so we are sure that we have the votes + // Then we just sit on our hands and wait. + + const seqClient = nodes[0].getSequencer(); + if (!seqClient) { + throw new Error('Sequencer not found'); + } + const sequencer = (seqClient as any).sequencer; + const slasher = (sequencer as any).slasherClient; + + t.logger.info(`Producing blocks until we hit a pruning event`); + + // Run for up to the slashing round size, or as long as needed to get a slash event + // Variable because sometimes hit race-condition issues with attestations. + for (let i = 0; i < slashingRoundSize; i++) { + t.logger.info('Submitting transactions'); + const bn = await nodes[0].getBlockNumber(); + await createPXEServiceAndSubmitTransactions(t.logger, nodes[0], 1); + + t.logger.info(`Waiting for block number to change`); + while (bn === (await nodes[0].getBlockNumber())) { + await sleep(1000); + } + + if (slasher.slashEvents.length > 0) { + t.logger.info(`We have a slash event ${i}`); + break; + } + } + + expect(slasher.slashEvents.length).toBeGreaterThan(0); + + // We should push us to land exactly at the next round + await jumpToNextRound(); + + // For the next round we will try to cast votes. + // Stop early if we have enough votes. + t.logger.info(`Waiting for votes to be cast`); + for (let i = 0; i < slashingRoundSize; i++) { + t.logger.info('Waiting for slot number to change and votes to be cast'); + const slotNumber = await rollup.read.getCurrentSlot(); + t.logger.info(`Waiting for block number to change`); + while (slotNumber === (await rollup.read.getCurrentSlot())) { + await sleep(1000); + } + sInfo = await slashingInfo(); + t.logger.info(`We have ${sInfo.leaderVotes} votes in round ${sInfo.roundNumber} on ${sInfo.info[1]}`); + if (sInfo.leaderVotes > votesNeeded) { + t.logger.info(`We have sufficient votes`); + break; + } + } + + t.logger.info('Deploy the actual payload for slashing!'); + const slashEvent = slasher.slashEvents[0]; + await t.ctx.deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: await slashFactory.write.createSlashPayload([slashEvent.epoch, slashEvent.amount], { + account: t.ctx.deployL1ContractsValues.walletClient.account, + }), + }); + + t.logger.info(`We jump in time to the next round to execute`); + await jumpToNextRound(); + const attestersPre = await rollup.read.getAttesters(); + + for (const attester of attestersPre) { + const attesterInfo = await rollup.read.getInfo([attester]); + // Check that status isValidating + expect(attesterInfo.status).toEqual(1); + } + + t.logger.info(`Push the proposal, SLASHING!`); + const tx = await slashingProposer.write.executeProposal([sInfo.roundNumber], { + account: t.ctx.deployL1ContractsValues.walletClient.account, + }); + await t.ctx.deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: tx, + }); + + const receipt = await t.ctx.deployL1ContractsValues.publicClient.getTransactionReceipt({ + hash: tx, + }); + + const slashingEvents = parseEventLogs({ + abi: RollupAbi, + logs: receipt.logs, + }).filter(log => log.eventName === 'Slashed'); + + const attestersSlashed = slashingEvents.map(event => { + // Because TS is a little nagging bitch + return (event.args as any).attester; + }); + + // Convert attestersPre elements to lowercase for consistent comparison + const normalizedAttestersPre = attestersPre.map(addr => addr.toLowerCase()); + const normalizedAttestersSlashed = attestersSlashed.map(addr => addr.toLowerCase()); + expect(new Set(normalizedAttestersPre)).toEqual(new Set(normalizedAttestersSlashed)); + + const instanceAddress = t.ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(); + const infoPost = await slashingProposer.read.rounds([instanceAddress, sInfo.roundNumber]); + + expect(sInfo.info[0]).toEqual(infoPost[0]); + expect(sInfo.info[1]).toEqual(infoPost[1]); + expect(sInfo.info[2]).toEqual(false); + expect(infoPost[2]).toEqual(true); + + const attestersPost = await rollup.read.getAttesters(); + + for (const attester of attestersPre) { + const attesterInfo = await rollup.read.getInfo([attester]); + // Check that status is Living + expect(attesterInfo.status).toEqual(2); + } + const committee = await rollup.read.getEpochCommittee([slashEvent.epoch]); + expect(attestersPre.length).toBe(committee.length); + expect(attestersPost.length).toBe(0); + }, 1_000_000); +}); diff --git a/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts b/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts index 9499bce2eef..bad8f5a7c7b 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts @@ -162,7 +162,10 @@ describe('e2e_p2p_governance_proposer', () => { await waitL1Block(); - const txHash = await governanceProposer.write.pushProposal([govData.round], { account: emperor, gas: 1_000_000n }); + const txHash = await governanceProposer.write.executeProposal([govData.round], { + account: emperor, + gas: 1_000_000n, + }); await t.ctx.deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash }); const token = getContract({ diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index d031d18c5bd..929768285c7 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -317,9 +317,9 @@ async function setupFromFresh( } const deployL1ContractsValues = await setupL1Contracts(aztecNodeConfig.l1RpcUrl, hdAccount, logger, { + ...getL1ContractsConfigEnvVars(), salt: opts.salt, ...deployL1ContractsArgs, - ...getL1ContractsConfigEnvVars(), initialValidators: opts.initialValidators, }); aztecNodeConfig.l1Contracts = deployL1ContractsValues.l1ContractAddresses; diff --git a/yarn-project/ethereum/src/config.ts b/yarn-project/ethereum/src/config.ts index 9d1fee99530..2a2bff7e4fa 100644 --- a/yarn-project/ethereum/src/config.ts +++ b/yarn-project/ethereum/src/config.ts @@ -1,4 +1,9 @@ -import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } from '@aztec/foundation/config'; +import { + type ConfigMappingsType, + bigintConfigHelper, + getConfigFromMappings, + numberConfigHelper, +} from '@aztec/foundation/config'; export type L1ContractsConfig = { /** How many seconds an L1 slot lasts. */ @@ -11,6 +16,16 @@ export type L1ContractsConfig = { aztecTargetCommitteeSize: number; /** The number of L2 slots that we can wait for a proof of an epoch to be produced. */ aztecEpochProofClaimWindowInL2Slots: number; + /** The minimum stake for a validator. */ + minimumStake: bigint; + /** The slashing quorum */ + slashingQuorum: number; + /** The slashing round size */ + slashingRoundSize: number; + /** Governance proposing quorum */ + governanceProposerQuorum: number; + /** Governance proposing round size */ + governanceProposerRoundSize: number; }; export const DefaultL1ContractsConfig = { @@ -19,6 +34,11 @@ export const DefaultL1ContractsConfig = { aztecEpochDuration: 16, aztecTargetCommitteeSize: 48, aztecEpochProofClaimWindowInL2Slots: 13, + minimumStake: BigInt(100e18), + slashingQuorum: 6, + slashingRoundSize: 10, + governanceProposerQuorum: 6, + governanceProposerRoundSize: 10, } satisfies L1ContractsConfig; export const l1ContractsConfigMappings: ConfigMappingsType = { @@ -47,6 +67,31 @@ export const l1ContractsConfigMappings: ConfigMappingsType = description: 'The number of L2 slots that we can wait for a proof of an epoch to be produced.', ...numberConfigHelper(DefaultL1ContractsConfig.aztecEpochProofClaimWindowInL2Slots), }, + minimumStake: { + env: 'AZTEC_MINIMUM_STAKE', + description: 'The minimum stake for a validator.', + ...bigintConfigHelper(DefaultL1ContractsConfig.minimumStake), + }, + slashingQuorum: { + env: 'AZTEC_SLASHING_QUORUM', + description: 'The slashing quorum', + ...numberConfigHelper(DefaultL1ContractsConfig.slashingQuorum), + }, + slashingRoundSize: { + env: 'AZTEC_SLASHING_ROUND_SIZE', + description: 'The slashing round size', + ...numberConfigHelper(DefaultL1ContractsConfig.slashingRoundSize), + }, + governanceProposerQuorum: { + env: 'AZTEC_GOVERNANCE_PROPOSER_QUORUM', + description: 'The governance proposing quorum', + ...numberConfigHelper(DefaultL1ContractsConfig.governanceProposerQuorum), + }, + governanceProposerRoundSize: { + env: 'AZTEC_GOVERNANCE_PROPOSER_ROUND_SIZE', + description: 'The governance proposing round size', + ...numberConfigHelper(DefaultL1ContractsConfig.governanceProposerRoundSize), + }, }; export function getL1ContractsConfigEnvVars(): L1ContractsConfig { diff --git a/yarn-project/ethereum/src/constants.ts b/yarn-project/ethereum/src/constants.ts index 2fea0175aca..c1f4b34d732 100644 --- a/yarn-project/ethereum/src/constants.ts +++ b/yarn-project/ethereum/src/constants.ts @@ -2,4 +2,3 @@ import { type Hex } from 'viem'; export const NULL_KEY: Hex = `0x${'0000000000000000000000000000000000000000000000000000000000000000'}`; export const AZTEC_TEST_CHAIN_ID = 677692; -export const MINIMUM_STAKE = BigInt(100e18); diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index 31b2c1eeb50..657c9ff7145 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -26,6 +26,8 @@ import { RollupAbi, RollupBytecode, RollupLinkReferences, + SlashFactoryAbi, + SlashFactoryBytecode, TestERC20Abi, TestERC20Bytecode, } from '@aztec/l1-artifacts'; @@ -53,7 +55,6 @@ import { type HDAccount, type PrivateKeyAccount, mnemonicToAccount, privateKeyTo import { foundry } from 'viem/chains'; import { type L1ContractsConfig } from './config.js'; -import { MINIMUM_STAKE } from './constants.js'; import { isAnvilTestChain } from './ethereum_chain.js'; import { type L1ContractAddresses } from './l1_contract_addresses.js'; import { L1TxUtils } from './l1_tx_utils.js'; @@ -156,6 +157,10 @@ export interface L1ContractArtifactsForDeployment { * Governance contract artifacts. */ governance: ContractArtifacts; + /** + * SlashFactory contract artifacts. + */ + slashFactory: ContractArtifacts; } export const l1Artifacts: L1ContractArtifactsForDeployment = { @@ -216,6 +221,10 @@ export const l1Artifacts: L1ContractArtifactsForDeployment = { contractAbi: GovernanceAbi, contractBytecode: GovernanceBytecode, }, + slashFactory: { + contractAbi: SlashFactoryAbi, + contractBytecode: SlashFactoryBytecode, + }, }; export interface DeployL1ContractsArgs extends L1ContractsConfig { @@ -331,14 +340,10 @@ export const deployL1Contracts = async ( ]); logger.verbose(`Deployed Staking Asset at ${stakingAssetAddress}`); - // @todo #8084 - // @note These numbers are just chosen to make testing simple. - const quorumSize = 6n; - const roundSize = 10n; const governanceProposerAddress = await govDeployer.deploy(l1Artifacts.governanceProposer, [ registryAddress.toString(), - quorumSize, - roundSize, + args.governanceProposerQuorum, + args.governanceProposerRoundSize, ]); logger.verbose(`Deployed GovernanceProposer at ${governanceProposerAddress}`); @@ -382,7 +387,9 @@ export const deployL1Contracts = async ( aztecEpochDuration: args.aztecEpochDuration, targetCommitteeSize: args.aztecTargetCommitteeSize, aztecEpochProofClaimWindowInL2Slots: args.aztecEpochProofClaimWindowInL2Slots, - minimumStake: MINIMUM_STAKE, + minimumStake: args.minimumStake, + slashingQuorum: args.slashingQuorum, + slashingRoundSize: args.slashingRoundSize, }; const rollupArgs = [ feeJuicePortalAddress.toString(), @@ -396,6 +403,9 @@ export const deployL1Contracts = async ( const rollupAddress = await deployer.deploy(l1Artifacts.rollup, rollupArgs); logger.verbose(`Deployed Rollup at ${rollupAddress}`, rollupConfigArgs); + const slashFactoryAddress = await deployer.deploy(l1Artifacts.slashFactory, [rollupAddress.toString()]); + logger.info(`Deployed SlashFactory at ${slashFactoryAddress}`); + await deployer.waitForDeployments(); logger.verbose(`All core contracts have been deployed`); @@ -434,7 +444,7 @@ export const deployL1Contracts = async ( if (args.initialValidators && args.initialValidators.length > 0) { // Mint tokens, approve them, use cheat code to initialise validator set without setting up the epoch. - const stakeNeeded = MINIMUM_STAKE * BigInt(args.initialValidators.length); + const stakeNeeded = args.minimumStake * BigInt(args.initialValidators.length); await Promise.all( [ await stakingAsset.write.mint([walletClient.account.address, stakeNeeded], {} as any), @@ -447,7 +457,7 @@ export const deployL1Contracts = async ( attester: v.toString(), proposer: v.toString(), withdrawer: v.toString(), - amount: MINIMUM_STAKE, + amount: args.minimumStake, })), ]); txHashes.push(initiateValidatorSetTxHash); @@ -560,6 +570,7 @@ export const deployL1Contracts = async ( rewardDistributorAddress, governanceProposerAddress, governanceAddress, + slashFactoryAddress, }; logger.info(`Aztec L1 contracts initialized`, l1Contracts); diff --git a/yarn-project/ethereum/src/l1_contract_addresses.ts b/yarn-project/ethereum/src/l1_contract_addresses.ts index eca35f4edea..aca32ba2dd2 100644 --- a/yarn-project/ethereum/src/l1_contract_addresses.ts +++ b/yarn-project/ethereum/src/l1_contract_addresses.ts @@ -21,6 +21,7 @@ export const L1ContractsNames = [ 'governanceProposerAddress', 'governanceAddress', 'stakingAssetAddress', + 'slashFactoryAddress', ] as const; /** Provides the directory of current L1 contract addresses */ @@ -40,6 +41,7 @@ export const L1ContractAddressesSchema = z.object({ rewardDistributorAddress: schemas.EthAddress, governanceProposerAddress: schemas.EthAddress, governanceAddress: schemas.EthAddress, + slashFactoryAddress: schemas.EthAddress, }) satisfies ZodFor; const parseEnv = (val: string) => EthAddress.fromString(val); @@ -100,4 +102,9 @@ export const l1ContractAddressesMapping: ConfigMappingsType description: 'The deployed L1 governance contract address', parseEnv, }, + slashFactoryAddress: { + env: 'SLASH_FACTORY_CONTRACT_ADDRESS', + description: 'The deployed L1 slashFactory contract address', + parseEnv, + }, }; diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index cf8d34bfd05..4ce23b9946e 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -145,6 +145,7 @@ export type EnvVar = | 'SEQ_TX_POLLING_INTERVAL_MS' | 'SEQ_ENFORCE_TIME_TABLE' | 'SEQ_MAX_L1_TX_INCLUSION_TIME_INTO_SLOT' + | 'SLASH_FACTORY_CONTRACT_ADDRESS' | 'STAKING_ASSET_CONTRACT_ADDRESS' | 'REWARD_DISTRIBUTOR_CONTRACT_ADDRESS' | 'TELEMETRY' @@ -172,6 +173,11 @@ export type EnvVar = | 'AZTEC_EPOCH_DURATION' | 'AZTEC_TARGET_COMMITTEE_SIZE' | 'AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS' + | 'AZTEC_MINIMUM_STAKE' + | 'AZTEC_SLASHING_QUORUM' + | 'AZTEC_SLASHING_ROUND_SIZE' + | 'AZTEC_GOVERNANCE_PROPOSER_QUORUM' + | 'AZTEC_GOVERNANCE_PROPOSER_ROUND_SIZE' | 'L1_GAS_LIMIT_BUFFER_PERCENTAGE' | 'L1_GAS_LIMIT_BUFFER_FIXED' | 'L1_GAS_PRICE_MIN' diff --git a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh index 04bb74e264d..fd3825893f6 100755 --- a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh +++ b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh @@ -30,6 +30,10 @@ CONTRACTS=( "l1-contracts:NewGovernanceProposerPayload" "l1-contracts:LeonidasLib" "l1-contracts:ExtRollupLib" + "l1-contracts:SlashingProposer" + "l1-contracts:Slasher" + "l1-contracts:EmpireBase" + "l1-contracts:SlashFactory" ) # Read the error ABI's once and store it in COMBINED_ERRORS variable diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts index 5f333ecb0c4..f000655f45f 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts @@ -44,6 +44,7 @@ async function createPXEService(): Promise { coinIssuerAddress: EthAddress.random(), rewardDistributorAddress: EthAddress.random(), governanceProposerAddress: EthAddress.random(), + slashFactoryAddress: EthAddress.random(), }; node.getL1ContractAddresses.mockResolvedValue(mockedContracts); diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index 58a2116a91c..6721e3080b5 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -53,6 +53,7 @@ "viem": "^2.7.15" }, "devDependencies": { + "@aztec/archiver": "workspace:^", "@aztec/kv-store": "workspace:^", "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 81bf69ad10f..6947f4101ac 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -13,6 +13,7 @@ import { type SequencerClientConfig } from '../config.js'; import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { L1Publisher } from '../publisher/index.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; +import { type SlasherClient } from '../slasher/index.js'; import { TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; /** @@ -40,6 +41,7 @@ export class SequencerClient { validatorClient: ValidatorClient | undefined; // allowed to be undefined while we migrate p2pClient: P2P; worldStateSynchronizer: WorldStateSynchronizer; + slasherClient: SlasherClient; contractDataSource: ContractDataSource; l2BlockSource: L2BlockSource; l1ToL2MessageSource: L1ToL2MessageSource; @@ -52,6 +54,7 @@ export class SequencerClient { validatorClient, p2pClient, worldStateSynchronizer, + slasherClient, contractDataSource, l2BlockSource, l1ToL2MessageSource, @@ -91,6 +94,7 @@ export class SequencerClient { globalsBuilder, p2pClient, worldStateSynchronizer, + slasherClient, new LightweightBlockBuilderFactory(telemetryClient), l2BlockSource, l1ToL2MessageSource, diff --git a/yarn-project/sequencer-client/src/index.ts b/yarn-project/sequencer-client/src/index.ts index dcac430ac13..35129eed538 100644 --- a/yarn-project/sequencer-client/src/index.ts +++ b/yarn-project/sequencer-client/src/index.ts @@ -2,6 +2,7 @@ export * from './client/index.js'; export * from './config.js'; export * from './publisher/index.js'; export { Sequencer, SequencerState } from './sequencer/index.js'; +export * from './slasher/index.js'; // Used by the node to simulate public parts of transactions. Should these be moved to a shared library? // ISSUE(#9832) diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 3642f039874..c27d4710031 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -16,24 +16,18 @@ import { type Proof, } from '@aztec/circuits.js'; import { type FeeRecipient, type RootRollupPublicInputs } from '@aztec/circuits.js/rollup'; -import { - type EthereumChain, - type L1ContractsConfig, - L1TxUtils, - type L1TxUtilsConfig, - createEthereumChain, -} from '@aztec/ethereum'; +import { type EthereumChain, type L1ContractsConfig, L1TxUtils, createEthereumChain } from '@aztec/ethereum'; import { makeTuple } from '@aztec/foundation/array'; import { toHex } from '@aztec/foundation/bigint-buffer'; import { Blob } from '@aztec/foundation/blob'; import { areArraysEqual, compactArray, times } from '@aztec/foundation/collection'; import { type Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; -import { createLogger } from '@aztec/foundation/log'; +import { type Logger, createLogger } from '@aztec/foundation/log'; import { type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; import { InterruptibleSleep } from '@aztec/foundation/sleep'; import { Timer } from '@aztec/foundation/timer'; -import { ExtRollupLibAbi, GovernanceProposerAbi, LeonidasLibAbi, RollupAbi } from '@aztec/l1-artifacts'; +import { EmpireBaseAbi, ExtRollupLibAbi, LeonidasLibAbi, RollupAbi, SlasherAbi } from '@aztec/l1-artifacts'; import { type TelemetryClient } from '@aztec/telemetry-client'; import pick from 'lodash.pick'; @@ -135,6 +129,13 @@ export type L1SubmitEpochProofArgs = { proof: Proof; }; +export enum VoteType { + GOVERNANCE, + SLASHING, +} + +type GetSlashPayloadCallBack = (slotNumber: bigint) => Promise; + /** * Publishes L2 blocks to L1. This implementation does *not* retry a transaction in * the event of network congestion, but should work for local development. @@ -149,20 +150,25 @@ export class L1Publisher { private interrupted = false; private metrics: L1PublisherMetrics; - private payload: EthAddress = EthAddress.ZERO; - private myLastVote: bigint = 0n; + protected governanceLog = createLogger('sequencer:publisher:governance'); + protected governanceProposerAddress?: EthAddress; + private governancePayload: EthAddress = EthAddress.ZERO; + + protected slashingLog = createLogger('sequencer:publisher:slashing'); + protected slashingProposerAddress?: EthAddress; + private getSlashPayload?: GetSlashPayloadCallBack = undefined; + + private myLastVotes: Record = { + [VoteType.GOVERNANCE]: 0n, + [VoteType.SLASHING]: 0n, + }; protected log = createLogger('sequencer:publisher'); - protected governanceLog = createLogger('sequencer:publisher:governance'); protected rollupContract: GetContractReturnType< typeof RollupAbi, WalletClient >; - protected governanceProposerContract?: GetContractReturnType< - typeof GovernanceProposerAbi, - WalletClient - > = undefined; protected publicClient: PublicClient; protected walletClient: WalletClient; @@ -178,7 +184,7 @@ export class L1Publisher { private readonly l1TxUtils: L1TxUtils; constructor( - config: TxSenderConfig & PublisherConfig & Pick & L1TxUtilsConfig, + config: TxSenderConfig & PublisherConfig & Pick, client: TelemetryClient, ) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; @@ -205,16 +211,31 @@ export class L1Publisher { }); if (l1Contracts.governanceProposerAddress) { - this.governanceProposerContract = getContract({ - address: getAddress(l1Contracts.governanceProposerAddress.toString()), - abi: GovernanceProposerAbi, - client: this.walletClient, - }); + this.governanceProposerAddress = EthAddress.fromString(l1Contracts.governanceProposerAddress.toString()); } this.l1TxUtils = new L1TxUtils(this.publicClient, this.walletClient, this.log, config); } + public registerSlashPayloadGetter(callback: GetSlashPayloadCallBack) { + this.getSlashPayload = callback; + } + + private async getSlashingProposerAddress() { + if (this.slashingProposerAddress) { + return this.slashingProposerAddress; + } + + const slasherAddress = await this.rollupContract.read.SLASHER(); + const slasher = getContract({ + address: getAddress(slasherAddress.toString()), + abi: SlasherAbi, + client: this.walletClient, + }); + this.slashingProposerAddress = EthAddress.fromString(await slasher.read.PROPOSER()); + return this.slashingProposerAddress; + } + get publisherAddress() { return this.account.address; } @@ -230,12 +251,12 @@ export class L1Publisher { }); } - public getPayLoad() { - return this.payload; + public getGovernancePayload() { + return this.governancePayload; } - public setPayload(payload: EthAddress) { - this.payload = payload; + public setGovernancePayload(payload: EthAddress) { + this.governancePayload = payload; } public getSenderAddress(): EthAddress { @@ -450,68 +471,106 @@ export class L1Publisher { calldataGas: getCalldataGasUsage(calldata), }; } - - public async castVote(slotNumber: bigint, timestamp: bigint): Promise { - if (this.payload.equals(EthAddress.ZERO)) { + public async castVote(slotNumber: bigint, timestamp: bigint, voteType: VoteType) { + // @todo This function can be optimized by doing some of the computations locally instead of calling the L1 contracts + if (this.myLastVotes[voteType] >= slotNumber) { return false; } - if (!this.governanceProposerContract) { - return false; - } + const voteConfig = async (): Promise< + { payload: EthAddress; voteContractAddress: EthAddress; logger: Logger } | undefined + > => { + if (voteType === VoteType.GOVERNANCE) { + if (this.governancePayload.equals(EthAddress.ZERO)) { + return undefined; + } + if (!this.governanceProposerAddress) { + return undefined; + } + return { + payload: this.governancePayload, + voteContractAddress: this.governanceProposerAddress, + logger: this.governanceLog, + }; + } else if (voteType === VoteType.SLASHING) { + if (!this.getSlashPayload) { + return undefined; + } + const slashingProposerAddress = await this.getSlashingProposerAddress(); + if (!slashingProposerAddress) { + return undefined; + } + + const slashPayload = await this.getSlashPayload(slotNumber); + + if (!slashPayload) { + return undefined; + } + + return { + payload: slashPayload, + voteContractAddress: slashingProposerAddress, + logger: this.slashingLog, + }; + } else { + throw new Error('Invalid vote type'); + } + }; - if (this.myLastVote >= slotNumber) { + const vConfig = await voteConfig(); + + if (!vConfig) { return false; } - // @todo This can be optimized A LOT by doing the computation instead of making calls to L1, but it is very convenient - // for when we keep changing the values and don't want to have multiple versions of the same logic implemented. + const { payload, voteContractAddress, logger } = vConfig; + + const voteContract = getContract({ + address: getAddress(voteContractAddress.toString()), + abi: EmpireBaseAbi, + client: this.walletClient, + }); const [proposer, roundNumber] = await Promise.all([ this.rollupContract.read.getProposerAt([timestamp]), - this.governanceProposerContract.read.computeRound([slotNumber]), + voteContract.read.computeRound([slotNumber]), ]); if (proposer.toLowerCase() !== this.account.address.toLowerCase()) { return false; } - const [slotForLastVote] = await this.governanceProposerContract.read.rounds([ - this.rollupContract.address, - roundNumber, - ]); + const [slotForLastVote] = await voteContract.read.rounds([this.rollupContract.address, roundNumber]); if (slotForLastVote >= slotNumber) { return false; } - // Storing these early such that a quick entry again would not send another tx, - // revert the state if there is a failure. - const cachedMyLastVote = this.myLastVote; - this.myLastVote = slotNumber; - - this.governanceLog.verbose(`Casting vote for ${this.payload}`); + const cachedMyLastVote = this.myLastVotes[voteType]; + this.myLastVotes[voteType] = slotNumber; let txHash; try { - txHash = await this.governanceProposerContract.write.vote([this.payload.toString()], { account: this.account }); + txHash = await voteContract.write.vote([payload.toString()], { + account: this.account, + }); } catch (err) { const msg = prettyLogViemErrorMsg(err); - this.governanceLog.error(`Failed to vote`, msg); - this.myLastVote = cachedMyLastVote; + logger.error(`Failed to vote`, msg); + this.myLastVotes[voteType] = cachedMyLastVote; return false; } if (txHash) { const receipt = await this.getTransactionReceipt(txHash); if (!receipt) { - this.governanceLog.warn(`Failed to get receipt for tx ${txHash}`); - this.myLastVote = cachedMyLastVote; + logger.warn(`Failed to get receipt for tx ${txHash}`); + this.myLastVotes[voteType] = cachedMyLastVote; return false; } } - this.governanceLog.info(`Cast vote for ${this.payload}`); + logger.info(`Cast vote for ${payload}`); return true; } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index f45e80ed260..60aa4208611 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -48,6 +48,7 @@ import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import { type L1Publisher } from '../publisher/l1-publisher.js'; +import { type SlasherClient } from '../slasher/index.js'; import { TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; import { Sequencer } from './sequencer.js'; import { SequencerState } from './utils.js'; @@ -195,6 +196,8 @@ describe('sequencer', () => { const l1GenesisTime = BigInt(Math.floor(Date.now() / 1000)); const l1Constants = { l1GenesisTime, slotDuration, ethereumSlotDuration }; + const slasherClient = mock(); + sequencer = new TestSubject( publisher, // TODO(md): add the relevant methods to the validator client that will prevent it stalling when waiting for attestations @@ -202,6 +205,7 @@ describe('sequencer', () => { globalVariableBuilder, p2p, worldState, + slasherClient, blockBuilderFactory, l2BlockSource, l1ToL2MessageSource, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index cedd98abfcf..44acbec00d3 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -36,8 +36,9 @@ import { Attributes, type TelemetryClient, type Tracer, trackSpan } from '@aztec import { type ValidatorClient } from '@aztec/validator-client'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; -import { type L1Publisher } from '../publisher/l1-publisher.js'; +import { type L1Publisher, VoteType } from '../publisher/l1-publisher.js'; import { prettyLogViemErrorMsg } from '../publisher/utils.js'; +import { type SlasherClient } from '../slasher/slasher_client.js'; import { type TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; import { getDefaultAllowedSetupFunctions } from './allowed.js'; import { type SequencerConfig } from './config.js'; @@ -106,6 +107,7 @@ export class Sequencer { private globalsBuilder: GlobalVariableBuilder, private p2pClient: P2P, private worldState: WorldStateSynchronizer, + private slasherClient: SlasherClient, private blockBuilderFactory: BlockBuilderFactory, private l2BlockSource: L2BlockSource, private l1ToL2MessageSource: L1ToL2MessageSource, @@ -122,6 +124,9 @@ export class Sequencer { // Register the block builder with the validator client for re-execution this.validatorClient?.registerBlockBuilder(this.buildBlock.bind(this)); + + // Register the slasher on the publisher to fetch slashing payloads + this.publisher.registerSlashPayloadGetter(this.slasherClient.getSlashPayload.bind(this.slasherClient)); } get tracer(): Tracer { @@ -157,7 +162,7 @@ export class Sequencer { this.maxBlockSizeInBytes = config.maxBlockSizeInBytes; } if (config.governanceProposerPayload) { - this.publisher.setPayload(config.governanceProposerPayload); + this.publisher.setGovernancePayload(config.governanceProposerPayload); } if (config.maxL1TxInclusionTimeIntoSlot !== undefined) { this.maxL1TxInclusionTimeIntoSlot = config.maxL1TxInclusionTimeIntoSlot; @@ -245,6 +250,7 @@ export class Sequencer { this.log.debug(`Stopping sequencer`); await this.validatorClient?.stop(); await this.runningPromise?.stop(); + await this.slasherClient?.stop(); this.publisher.interrupt(); this.setState(SequencerState.STOPPED, 0n, true /** force */); this.log.info('Stopped sequencer'); @@ -314,7 +320,8 @@ export class Sequencer { slot, ); - void this.publisher.castVote(slot, newGlobalVariables.timestamp.toBigInt()); + void this.publisher.castVote(slot, newGlobalVariables.timestamp.toBigInt(), VoteType.GOVERNANCE); + void this.publisher.castVote(slot, newGlobalVariables.timestamp.toBigInt(), VoteType.SLASHING); if (!this.shouldProposeBlock(historicalHeader, {})) { return; diff --git a/yarn-project/sequencer-client/src/slasher/factory.ts b/yarn-project/sequencer-client/src/slasher/factory.ts new file mode 100644 index 00000000000..85decb074ea --- /dev/null +++ b/yarn-project/sequencer-client/src/slasher/factory.ts @@ -0,0 +1,22 @@ +import type { L2BlockSource } from '@aztec/circuit-types'; +import { type L1ContractsConfig, type L1ReaderConfig } from '@aztec/ethereum'; +import { createLogger } from '@aztec/foundation/log'; +import { type AztecKVStore } from '@aztec/kv-store'; +import { type DataStoreConfig } from '@aztec/kv-store/config'; +import { createStore } from '@aztec/kv-store/lmdb'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +import { SlasherClient } from './slasher_client.js'; +import { type SlasherConfig } from './slasher_client.js'; + +export const createSlasherClient = async ( + _config: SlasherConfig & DataStoreConfig & L1ContractsConfig & L1ReaderConfig, + l2BlockSource: L2BlockSource, + telemetry: TelemetryClient = new NoopTelemetryClient(), + deps: { store?: AztecKVStore } = {}, +) => { + const config = { ..._config }; + const store = deps.store ?? (await createStore('slasher', config, createLogger('slasher:lmdb'))); + return new SlasherClient(config, store, l2BlockSource, telemetry); +}; diff --git a/yarn-project/sequencer-client/src/slasher/index.ts b/yarn-project/sequencer-client/src/slasher/index.ts new file mode 100644 index 00000000000..e33e274a76b --- /dev/null +++ b/yarn-project/sequencer-client/src/slasher/index.ts @@ -0,0 +1,2 @@ +export * from './slasher_client.js'; +export { createSlasherClient } from './factory.js'; diff --git a/yarn-project/sequencer-client/src/slasher/slasher_client.test.ts b/yarn-project/sequencer-client/src/slasher/slasher_client.test.ts new file mode 100644 index 00000000000..bb097b9da72 --- /dev/null +++ b/yarn-project/sequencer-client/src/slasher/slasher_client.test.ts @@ -0,0 +1,120 @@ +import { MockL2BlockSource } from '@aztec/archiver/test'; +import { L2Block } from '@aztec/circuit-types'; +import { + type L1ContractAddresses, + type L1ContractsConfig, + type L1ReaderConfig, + getL1ContractsConfigEnvVars, +} from '@aztec/ethereum'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { retryUntil } from '@aztec/foundation/retry'; +import { sleep } from '@aztec/foundation/sleep'; +import { type AztecKVStore } from '@aztec/kv-store'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; + +import { expect } from '@jest/globals'; + +import { SlasherClient, type SlasherConfig } from './slasher_client.js'; + +// Most of this test are directly copied from the P2P client test. +describe('In-Memory Slasher Client', () => { + let blockSource: MockL2BlockSource; + let kvStore: AztecKVStore; + let client: SlasherClient; + let config: SlasherConfig & L1ContractsConfig & L1ReaderConfig; + + beforeEach(() => { + blockSource = new MockL2BlockSource(); + blockSource.createBlocks(100); + + const l1Config = getL1ContractsConfigEnvVars(); + + // Need some configuration here. Can be a basic bitch config really. + config = { + ...l1Config, + blockCheckIntervalMS: 100, + blockRequestBatchSize: 20, + l1Contracts: { + slashFactoryAddress: EthAddress.ZERO, + } as unknown as L1ContractAddresses, + l1RpcUrl: 'http://127.0.0.1:8545', + l1ChainId: 1, + viemPollingIntervalMS: 1000, + }; + + kvStore = openTmpStore(); + client = new SlasherClient(config, kvStore, blockSource); + }); + + const advanceToProvenBlock = async (getProvenBlockNumber: number, provenEpochNumber = getProvenBlockNumber) => { + blockSource.setProvenBlockNumber(getProvenBlockNumber); + blockSource.setProvenEpochNumber(provenEpochNumber); + await retryUntil( + () => Promise.resolve(client.getSyncedProvenBlockNum() >= getProvenBlockNumber), + 'synced', + 10, + 0.1, + ); + }; + + afterEach(async () => { + if (client.isReady()) { + await client.stop(); + } + }); + + it('can start & stop', async () => { + expect(client.isReady()).toEqual(false); + + await client.start(); + expect(client.isReady()).toEqual(true); + + await client.stop(); + expect(client.isReady()).toEqual(false); + }); + + it('restores the previous block number it was at', async () => { + await client.start(); + await client.stop(); + + const client2 = new SlasherClient(config, kvStore, blockSource); + expect(client2.getSyncedLatestBlockNum()).toEqual(client.getSyncedLatestBlockNum()); + }); + + describe('Chain prunes', () => { + it('moves the tips on a chain reorg', async () => { + blockSource.setProvenBlockNumber(0); + await client.start(); + + await advanceToProvenBlock(90); + + await expect(client.getL2Tips()).resolves.toEqual({ + latest: { number: 100, hash: expect.any(String) }, + proven: { number: 90, hash: expect.any(String) }, + finalized: { number: 90, hash: expect.any(String) }, + }); + + blockSource.removeBlocks(10); + + // give the client a chance to react to the reorg + await sleep(100); + + await expect(client.getL2Tips()).resolves.toEqual({ + latest: { number: 90, hash: expect.any(String) }, + proven: { number: 90, hash: expect.any(String) }, + finalized: { number: 90, hash: expect.any(String) }, + }); + + blockSource.addBlocks([L2Block.random(91), L2Block.random(92)]); + + // give the client a chance to react to the new blocks + await sleep(100); + + await expect(client.getL2Tips()).resolves.toEqual({ + latest: { number: 92, hash: expect.any(String) }, + proven: { number: 90, hash: expect.any(String) }, + finalized: { number: 90, hash: expect.any(String) }, + }); + }); + }); +}); diff --git a/yarn-project/sequencer-client/src/slasher/slasher_client.ts b/yarn-project/sequencer-client/src/slasher/slasher_client.ts new file mode 100644 index 00000000000..cc5e60e25ea --- /dev/null +++ b/yarn-project/sequencer-client/src/slasher/slasher_client.ts @@ -0,0 +1,402 @@ +import { + type L2Block, + type L2BlockId, + type L2BlockSource, + L2BlockStream, + type L2BlockStreamEvent, + type L2Tips, +} from '@aztec/circuit-types'; +import { INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js/constants'; +import { type L1ContractsConfig, type L1ReaderConfig, createEthereumChain } from '@aztec/ethereum'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { createLogger } from '@aztec/foundation/log'; +import { type AztecKVStore, type AztecMap, type AztecSingleton } from '@aztec/kv-store'; +import { SlashFactoryAbi } from '@aztec/l1-artifacts'; +import { type TelemetryClient, WithTracer } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +import { + type Chain, + type GetContractReturnType, + type HttpTransport, + type PublicClient, + createPublicClient, + getAddress, + getContract, + http, +} from 'viem'; + +/** + * Enum defining the possible states of the Slasher client. + */ +export enum SlasherClientState { + IDLE, + SYNCHING, + RUNNING, + STOPPED, +} + +/** + * The synchronization status of the Slasher client. + */ +export interface SlasherSyncState { + /** + * The current state of the slasher client. + */ + state: SlasherClientState; + /** + * The block number that the slasher client is synced to. + */ + syncedToL2Block: L2BlockId; +} + +export interface SlasherConfig { + blockCheckIntervalMS: number; + blockRequestBatchSize: number; +} + +type SlashEvent = { + epoch: bigint; + amount: bigint; + lifetime: bigint; +}; + +/** + * @notice A Hypomeiones slasher client implementation + * + * Hypomeiones: a class of individuals in ancient Sparta who were considered inferior or lesser citizens compared + * to the full Spartan citizens. + * + * The implementation here is less than ideal. It exists, not to be the end all be all, but to show that + * slashing can be done with this mechanism. + * + * The implementation is VERY brute in the sense that it only looks for pruned blocks and then tries to slash + * the full committee of that. + * If it sees a prune, it will mark the full epoch as "to be slashed". + * + * Also, it is not particularly smart around what it should if there were to be multiple slashing events. + * + * A few improvements: + * - Only vote on the proposal if it is possible to reach, e.g., if 6 votes are needed and only 4 slots are left don't vote. + * - Stop voting on a payload once it is processed. + * - Only vote on the proposal if it have not already been executed + * - Caveat, we need to fully decide if it is acceptable to have the same payload address multiple times. In the current + * slash factory that could mean slashing the same committee for the same error multiple times. + * - Decide how to deal with multiple slashing events in the same round. + * - This could be that multiple epochs are pruned in the same round, but with the current naive implementation we could end up + * slashing only the first, because the "lifetime" of the second would have passed after that vote + */ +export class SlasherClient extends WithTracer { + private currentState = SlasherClientState.IDLE; + private syncPromise = Promise.resolve(); + private syncResolve?: () => void = undefined; + private latestBlockNumberAtStart = -1; + private provenBlockNumberAtStart = -1; + + private synchedBlockHashes: AztecMap; + private synchedLatestBlockNumber: AztecSingleton; + private synchedProvenBlockNumber: AztecSingleton; + + private blockStream; + + private slashEvents: SlashEvent[] = []; + + protected slashFactoryContract?: GetContractReturnType> = + undefined; + + // The amount to slash for a prune. + // Note that we set it to 0, such that no actual slashing will happen, but the event will be fired, + // showing that the slashing mechanism is working. + private slashingAmount: bigint = 0n; + + constructor( + private config: SlasherConfig & L1ContractsConfig & L1ReaderConfig, + private store: AztecKVStore, + private l2BlockSource: L2BlockSource, + telemetry: TelemetryClient = new NoopTelemetryClient(), + private log = createLogger('slasher'), + ) { + super(telemetry, 'slasher'); + + this.blockStream = new L2BlockStream(l2BlockSource, this, this, createLogger('slasher:block_stream'), { + batchSize: config.blockRequestBatchSize, + pollIntervalMS: config.blockCheckIntervalMS, + }); + + this.synchedBlockHashes = store.openMap('slasher_block_hashes'); + this.synchedLatestBlockNumber = store.openSingleton('slasher_last_l2_block'); + this.synchedProvenBlockNumber = store.openSingleton('slasher_last_proven_l2_block'); + + if (config.l1Contracts.slashFactoryAddress && config.l1Contracts.slashFactoryAddress !== EthAddress.ZERO) { + const chain = createEthereumChain(config.l1RpcUrl, config.l1ChainId); + const publicClient = createPublicClient({ + chain: chain.chainInfo, + transport: http(chain.rpcUrl), + pollingInterval: config.viemPollingIntervalMS, + }); + + this.slashFactoryContract = getContract({ + address: getAddress(config.l1Contracts.slashFactoryAddress.toString()), + abi: SlashFactoryAbi, + client: publicClient, + }); + } else { + this.log.warn('No slash factory address found, slashing will not be enabled'); + } + + this.log.info(`Slasher client initialized`); + } + + // This is where we should put a bunch of the improvements mentioned earlier. + public async getSlashPayload(slotNumber: bigint): Promise { + if (!this.slashFactoryContract) { + return undefined; + } + + // As long as the slot is greater than the lifetime, we want to keep deleting the first element + // since it will not make sense to include anymore. + while (this.slashEvents.length > 0 && this.slashEvents[0].lifetime < slotNumber) { + this.slashEvents.shift(); + } + + if (this.slashEvents.length == 0) { + return undefined; + } + + const slashEvent = this.slashEvents[0]; + + const [payloadAddress, isDeployed] = await this.slashFactoryContract.read.getAddressAndIsDeployed([ + slashEvent.epoch, + slashEvent.amount, + ]); + + if (!isDeployed) { + // The proposal cannot be executed until it is deployed + this.log.verbose(`Voting on not yet deployed payload: ${payloadAddress}`); + } + + return EthAddress.fromString(payloadAddress); + } + + public getL2BlockHash(number: number): Promise { + return Promise.resolve(this.synchedBlockHashes.get(number)); + } + + public getL2Tips(): Promise { + const latestBlockNumber = this.getSyncedLatestBlockNum(); + let latestBlockHash: string | undefined; + const provenBlockNumber = this.getSyncedProvenBlockNum(); + let provenBlockHash: string | undefined; + + if (latestBlockNumber > 0) { + latestBlockHash = this.synchedBlockHashes.get(latestBlockNumber); + if (typeof latestBlockHash === 'undefined') { + this.log.warn(`Block hash for latest block ${latestBlockNumber} not found`); + throw new Error(); + } + } + + if (provenBlockNumber > 0) { + provenBlockHash = this.synchedBlockHashes.get(provenBlockNumber); + if (typeof provenBlockHash === 'undefined') { + this.log.warn(`Block hash for proven block ${provenBlockNumber} not found`); + throw new Error(); + } + } + + return Promise.resolve({ + latest: { hash: latestBlockHash!, number: latestBlockNumber }, + proven: { hash: provenBlockHash!, number: provenBlockNumber }, + finalized: { hash: provenBlockHash!, number: provenBlockNumber }, + }); + } + + public async handleBlockStreamEvent(event: L2BlockStreamEvent): Promise { + this.log.debug(`Handling block stream event ${event.type}`); + switch (event.type) { + case 'blocks-added': + await this.handleLatestL2Blocks(event.blocks); + break; + case 'chain-finalized': + // TODO (alexg): I think we can prune the block hashes map here + break; + case 'chain-proven': { + const from = this.getSyncedProvenBlockNum() + 1; + const limit = event.blockNumber - from + 1; + await this.handleProvenL2Blocks(await this.l2BlockSource.getBlocks(from, limit)); + break; + } + case 'chain-pruned': + await this.handlePruneL2Blocks(event.blockNumber); + break; + default: { + const _: never = event; + break; + } + } + } + + public async start() { + if (this.currentState === SlasherClientState.STOPPED) { + throw new Error('Slasher already stopped'); + } + if (this.currentState !== SlasherClientState.IDLE) { + return this.syncPromise; + } + + // get the current latest block numbers + this.latestBlockNumberAtStart = await this.l2BlockSource.getBlockNumber(); + this.provenBlockNumberAtStart = await this.l2BlockSource.getProvenBlockNumber(); + + const syncedLatestBlock = this.getSyncedLatestBlockNum() + 1; + const syncedProvenBlock = this.getSyncedProvenBlockNum() + 1; + + // if there are blocks to be retrieved, go to a synching state + if (syncedLatestBlock <= this.latestBlockNumberAtStart || syncedProvenBlock <= this.provenBlockNumberAtStart) { + this.setCurrentState(SlasherClientState.SYNCHING); + this.syncPromise = new Promise(resolve => { + this.syncResolve = resolve; + }); + this.log.verbose(`Starting sync from ${syncedLatestBlock} (last proven ${syncedProvenBlock})`); + } else { + // if no blocks to be retrieved, go straight to running + this.setCurrentState(SlasherClientState.RUNNING); + this.syncPromise = Promise.resolve(); + this.log.verbose(`Block ${syncedLatestBlock} (proven ${syncedProvenBlock}) already beyond current block`); + } + + this.blockStream.start(); + this.log.verbose(`Started block downloader from block ${syncedLatestBlock}`); + + return this.syncPromise; + } + + /** + * Allows consumers to stop the instance of the slasher client. + * 'ready' will now return 'false' and the running promise that keeps the client synced is interrupted. + */ + public async stop() { + this.log.debug('Stopping Slasher client...'); + await this.blockStream.stop(); + this.log.debug('Stopped block downloader'); + this.setCurrentState(SlasherClientState.STOPPED); + this.log.info('Slasher client stopped.'); + } + + /** + * Public function to check if the slasher client is fully synced and ready to receive txs. + * @returns True if the slasher client is ready to receive txs. + */ + public isReady() { + return this.currentState === SlasherClientState.RUNNING; + } + + /** + * Public function to check the latest block number that the slasher client is synced to. + * @returns Block number of latest L2 Block we've synced with. + */ + public getSyncedLatestBlockNum() { + return this.synchedLatestBlockNumber.get() ?? INITIAL_L2_BLOCK_NUM - 1; + } + + /** + * Public function to check the latest proven block number that the slasher client is synced to. + * @returns Block number of latest proven L2 Block we've synced with. + */ + public getSyncedProvenBlockNum() { + return this.synchedProvenBlockNumber.get() ?? INITIAL_L2_BLOCK_NUM - 1; + } + + /** + * Method to check the status of the slasher client. + * @returns Information about slasher client status: state & syncedToBlockNum. + */ + public async getStatus(): Promise { + const blockNumber = this.getSyncedLatestBlockNum(); + const blockHash = + blockNumber == 0 + ? '' + : await this.l2BlockSource.getBlockHeader(blockNumber).then(header => header?.hash().toString()); + return Promise.resolve({ + state: this.currentState, + syncedToL2Block: { number: blockNumber, hash: blockHash }, + } as SlasherSyncState); + } + + /** + * Handles new blocks + * @param blocks - A list of blocks that the slasher client needs to store block hashes for + * @returns Empty promise. + */ + private async handleLatestL2Blocks(blocks: L2Block[]): Promise { + if (!blocks.length) { + return Promise.resolve(); + } + + const lastBlockNum = blocks[blocks.length - 1].number; + await Promise.all(blocks.map(block => this.synchedBlockHashes.set(block.number, block.hash().toString()))); + await this.synchedLatestBlockNumber.set(lastBlockNum); + this.log.debug(`Synched to latest block ${lastBlockNum}`); + this.startServiceIfSynched(); + } + + /** + * Handles new proven blocks by updating the proven block number + * @param blocks - A list of proven L2 blocks. + * @returns Empty promise. + */ + private async handleProvenL2Blocks(blocks: L2Block[]): Promise { + if (!blocks.length) { + return Promise.resolve(); + } + const lastBlockNum = blocks[blocks.length - 1].number; + await this.synchedProvenBlockNumber.set(lastBlockNum); + this.log.debug(`Synched to proven block ${lastBlockNum}`); + + this.startServiceIfSynched(); + } + + private async handlePruneL2Blocks(latestBlock: number): Promise { + const blockHeader = await this.l2BlockSource.getBlockHeader(latestBlock); + const slotNumber = blockHeader ? blockHeader.globalVariables.slotNumber.toBigInt() : BigInt(0); + const epochNumber = slotNumber / BigInt(this.config.aztecEpochDuration); + this.log.info(`Detected chain prune. Punishing the validators at epoch ${epochNumber}`); + + // Set the lifetime such that we have a full round that we could vote throughout. + const slotsIntoRound = slotNumber % BigInt(this.config.slashingRoundSize); + const toNext = slotsIntoRound == 0n ? 0n : BigInt(this.config.slashingRoundSize) - slotsIntoRound; + + const lifetime = slotNumber + toNext + BigInt(this.config.slashingRoundSize); + + this.slashEvents.push({ + epoch: epochNumber, + amount: this.slashingAmount, + lifetime, + }); + + await this.synchedLatestBlockNumber.set(latestBlock); + } + + private startServiceIfSynched() { + if ( + this.currentState === SlasherClientState.SYNCHING && + this.getSyncedLatestBlockNum() >= this.latestBlockNumberAtStart && + this.getSyncedProvenBlockNum() >= this.provenBlockNumberAtStart + ) { + this.log.debug(`Synched to blocks at start`); + this.setCurrentState(SlasherClientState.RUNNING); + if (this.syncResolve !== undefined) { + this.syncResolve(); + } + } + } + + /** + * Method to set the value of the current state. + * @param newState - New state value. + */ + private setCurrentState(newState: SlasherClientState) { + this.currentState = newState; + this.log.debug(`Moved to state ${SlasherClientState[this.currentState]}`); + } +} diff --git a/yarn-project/sequencer-client/tsconfig.json b/yarn-project/sequencer-client/tsconfig.json index 9a8615c0299..4161156625e 100644 --- a/yarn-project/sequencer-client/tsconfig.json +++ b/yarn-project/sequencer-client/tsconfig.json @@ -60,6 +60,9 @@ { "path": "../world-state" }, + { + "path": "../archiver" + }, { "path": "../kv-store" } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index e7d49f8e155..c4806a57b34 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -1157,6 +1157,7 @@ __metadata: version: 0.0.0-use.local resolution: "@aztec/sequencer-client@workspace:sequencer-client" dependencies: + "@aztec/archiver": "workspace:^" "@aztec/aztec.js": "workspace:^" "@aztec/bb-prover": "workspace:^" "@aztec/circuit-types": "workspace:^" From fb58c166f0c1e1ec60de567fc55df80b07f76894 Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Fri, 3 Jan 2025 09:50:27 -0500 Subject: [PATCH 09/20] chore: bump rc-1 prover agents (#11033) --- spartan/aztec-network/values/rc-1.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spartan/aztec-network/values/rc-1.yaml b/spartan/aztec-network/values/rc-1.yaml index fe51be81984..249fd2a6569 100644 --- a/spartan/aztec-network/values/rc-1.yaml +++ b/spartan/aztec-network/values/rc-1.yaml @@ -126,7 +126,7 @@ bootNode: storageSize: "100Gi" proverAgent: - replicas: 12 + replicas: 60 bb: hardwareConcurrency: 31 gke: From 671065e837c58bd5934c442316305580a0ed6c0e Mon Sep 17 00:00:00 2001 From: Charlie Lye Date: Fri, 3 Jan 2025 18:29:48 +0000 Subject: [PATCH 10/20] yolo fix noir-projects test run hash --- noir-projects/bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/bootstrap.sh b/noir-projects/bootstrap.sh index e70da821e60..4b98d4fa7fb 100755 --- a/noir-projects/bootstrap.sh +++ b/noir-projects/bootstrap.sh @@ -46,7 +46,7 @@ case "$cmd" in exit ;; "hash") - cache_content_hash .rebuild_patterns + cache_content_hash .rebuild_patterns ../noir/.rebuild_patterns exit ;; *) From 6e94c1ac13d63826a8ecd29eec74c79c053a2a05 Mon Sep 17 00:00:00 2001 From: Charlie Lye Date: Fri, 3 Jan 2025 20:56:53 +0000 Subject: [PATCH 11/20] remove keep-alive. --- .github/ensure-builder/action.yml | 4 ++-- .github/ensure-tester/action.yml | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/ensure-builder/action.yml b/.github/ensure-builder/action.yml index 6ff89d78471..32925ace844 100644 --- a/.github/ensure-builder/action.yml +++ b/.github/ensure-builder/action.yml @@ -70,7 +70,7 @@ runs: ec2_subnet_id: subnet-4cfabd25 ec2_security_group_id: sg-0ccd4e5df0dcca0c9 ec2_key_name: "build-instance" - ec2_instance_tags: '[{"Key": "Keep-Alive", "Value": "true"},{"Key": "Builder", "Value": "true"}]' + ec2_instance_tags: '[{"Key": "Builder", "Value": "true"}]' # This disambiguates from 'tester' - name: Set BUILDER_SPOT_IP and BUILDER_SPOT_KEY shell: bash @@ -100,4 +100,4 @@ runs: - name: Report Exit Code shell: bash if: steps.test.outputs.exit_code != '155' || inputs.spot_strategy == 'None' - run: exit ${{ steps.test.outputs.exit_code }} \ No newline at end of file + run: exit ${{ steps.test.outputs.exit_code }} diff --git a/.github/ensure-tester/action.yml b/.github/ensure-tester/action.yml index fcc896ff596..67a0d68c862 100644 --- a/.github/ensure-tester/action.yml +++ b/.github/ensure-tester/action.yml @@ -78,7 +78,6 @@ runs: ec2_subnet_id: subnet-4cfabd25 ec2_security_group_id: sg-0ccd4e5df0dcca0c9 ec2_key_name: "build-instance" - ec2_instance_tags: '[{"Key": "Keep-Alive", "Value": "true"}]' - name: Ensure Tester Cleanup uses: gacts/run-and-post-run@v1 @@ -107,4 +106,4 @@ runs: - name: Report Exit Code shell: bash if: steps.test.outputs.exit_code != '155' || inputs.spot_strategy == 'None' - run: exit ${{ steps.test.outputs.exit_code }} \ No newline at end of file + run: exit ${{ steps.test.outputs.exit_code }} From f6f2c1258d63b155059e70532c7e0c5aecfa6782 Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Fri, 3 Jan 2025 16:40:51 -0500 Subject: [PATCH 12/20] chore: load in the big dashboard during metrics install (#11007) We now have a `values.tmp.yaml` in the metrics chart. This gets filled in with the full dashboard json when running an install of the metrics stack. --- spartan/metrics/.gitignore | 1 + spartan/metrics/install-kind.sh | 4 + spartan/metrics/install-prod.sh | 3 + spartan/metrics/values.tmp.yaml | 166 ++++++++++++++++ spartan/metrics/values.yaml | 332 -------------------------------- 5 files changed, 174 insertions(+), 332 deletions(-) create mode 100644 spartan/metrics/.gitignore create mode 100644 spartan/metrics/values.tmp.yaml delete mode 100644 spartan/metrics/values.yaml diff --git a/spartan/metrics/.gitignore b/spartan/metrics/.gitignore new file mode 100644 index 00000000000..7d101009614 --- /dev/null +++ b/spartan/metrics/.gitignore @@ -0,0 +1 @@ +values.yaml \ No newline at end of file diff --git a/spartan/metrics/install-kind.sh b/spartan/metrics/install-kind.sh index 3a9ecfb4ccf..5b1c9ce7900 100755 --- a/spartan/metrics/install-kind.sh +++ b/spartan/metrics/install-kind.sh @@ -10,6 +10,10 @@ if helm ls --namespace metrics | grep -q metrics; then exit 0 fi +# Inject the Aztec Networks dashboard into values.yaml +DASHBOARD_JSON=$(jq -c '.' grafana_dashboards/aztec-dashboard-all-in-one.json) +DASHBOARD_JSON=$DASHBOARD_JSON yq e '.grafana.dashboards.default."aztec-networks".json = strenv(DASHBOARD_JSON)' values.tmp.yaml > values.yaml + helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts helm repo add grafana https://grafana.github.io/helm-charts helm repo add prometheus-community https://prometheus-community.github.io/helm-charts diff --git a/spartan/metrics/install-prod.sh b/spartan/metrics/install-prod.sh index a61cc2b8ef4..edb5af00aa7 100755 --- a/spartan/metrics/install-prod.sh +++ b/spartan/metrics/install-prod.sh @@ -3,4 +3,7 @@ set -eu cd "$(dirname "${BASH_SOURCE[0]}")" +DASHBOARD_JSON=$(jq -c '.' grafana_dashboards/aztec-dashboard-all-in-one.json) +DASHBOARD_JSON=$DASHBOARD_JSON yq e '.grafana.dashboards.default."aztec-networks".json = strenv(DASHBOARD_JSON)' values.tmp.yaml > values.yaml + helm upgrade metrics . -n metrics --values "./values/prod.yaml" --install --create-namespace $@ diff --git a/spartan/metrics/values.tmp.yaml b/spartan/metrics/values.tmp.yaml new file mode 100644 index 00000000000..f6eb0e506c1 --- /dev/null +++ b/spartan/metrics/values.tmp.yaml @@ -0,0 +1,166 @@ +opentelemetry-collector: + mode: deployment + + service: + enabled: true + + image: + repository: "otel/opentelemetry-collector-contrib" + + ports: + otlp-http: + enabled: true + containerPort: 4318 + servicePort: 4318 + hostPort: 4318 + protocol: TCP + otel-metrics: + enabled: true + containerPort: 8888 + servicePort: 8888 + hostPort: 8888 + protocol: TCP + aztec-metrics: + enabled: true + containerPort: 8889 + servicePort: 8889 + hostPort: 8889 + protocol: TCP + + presets: + kubernetesAttributes: + enabled: true + config: + extensions: + health_check: + endpoint: ${env:MY_POD_IP}:13133 + processors: + resource: + attributes: + - action: preserve + key: k8s.namespace.name + batch: {} + receivers: + otlp: + protocols: + http: + endpoint: ${env:MY_POD_IP}:4318 + grpc: + endpoint: ${env:MY_POD_IP}:4317 + service: + extensions: [health_check] + telemetry: + metrics: + address: ${env:MY_POD_IP}:8888 + pipelines: + logs: + receivers: + - otlp + processors: + - batch + exporters: + - otlphttp/logs + traces: + receivers: + - otlp + processors: + - batch + exporters: + - otlp/tempo + metrics: + receivers: + - otlp + processors: + - batch + exporters: + - prometheus + # - debug + +# Enable and configure the Loki subchart +# https://artifacthub.io/packages/helm/grafana/loki +# loki: +# Nothing set here, because we need to use values from the values directory; +# otherwise, things don't get overridden correctly. + +# Enable and configure the Tempo subchart +# https://artifacthub.io/packages/helm/grafana/tempo +tempo: + minio: + enabled: true + mode: standalone + rootUser: grafana-tempo + rootPassword: supersecret + buckets: + # Default Tempo storage bucket + - name: tempo-traces + policy: none + purge: false + traces: + otlp: + grpc: + enabled: true + http: + enabled: true + zipkin: + enabled: false + jaeger: + thriftHttp: + enabled: false + opencensus: + enabled: false + +prometheus: + server: + global: + evaluation_interval: 15s + scrape_interval: 15s + serverFiles: + prometheus.yml: + scrape_configs: + - job_name: otel-collector + static_configs: + - targets: ["metrics-opentelemetry-collector.metrics:8888"] + - job_name: aztec + static_configs: + - targets: ["metrics-opentelemetry-collector.metrics:8889"] + - job_name: "kube-state-metrics" + static_configs: + - targets: + ["metrics-kube-state-metrics.metrics.svc.cluster.local:8080"] + +# Enable and configure Grafana +# https://artifacthub.io/packages/helm/grafana/grafana +grafana: + datasources: + datasources.yaml: + apiVersion: 1 + datasources: + - name: Loki + type: loki + url: http://metrics-loki.metrics:3100 + - name: Tempo + type: tempo + url: http://metrics-tempo.metrics:3100 + - name: Prometheus + type: prometheus + uid: spartan-metrics-prometheus + isDefault: true + url: http://metrics-prometheus-server.metrics:80 + dashboardProviders: + dashboardproviders.yaml: + apiVersion: 1 + providers: + - name: "default" + orgId: 1 + folder: "" + type: file + disableDeletion: false + editable: true + options: + path: /var/lib/grafana/dashboards/default + dashboards: + default: + # unfortunately, we can't use the `file` helper here, so we have to inline the dashboard + # json. This is a limitation of Helm. + # See the install scripts: we inject the dashboard json into a copy of this file, which is the + # version that actually gets helm installed. diff --git a/spartan/metrics/values.yaml b/spartan/metrics/values.yaml deleted file mode 100644 index df2ca87aa21..00000000000 --- a/spartan/metrics/values.yaml +++ /dev/null @@ -1,332 +0,0 @@ -opentelemetry-collector: - mode: deployment - - service: - enabled: true - - image: - repository: "otel/opentelemetry-collector-contrib" - - ports: - otlp-http: - enabled: true - containerPort: 4318 - servicePort: 4318 - hostPort: 4318 - protocol: TCP - otel-metrics: - enabled: true - containerPort: 8888 - servicePort: 8888 - hostPort: 8888 - protocol: TCP - aztec-metrics: - enabled: true - containerPort: 8889 - servicePort: 8889 - hostPort: 8889 - protocol: TCP - - presets: - kubernetesAttributes: - enabled: true - config: - extensions: - health_check: - endpoint: ${env:MY_POD_IP}:13133 - processors: - resource: - attributes: - - action: preserve - key: k8s.namespace.name - batch: {} - receivers: - otlp: - protocols: - http: - endpoint: ${env:MY_POD_IP}:4318 - grpc: - endpoint: ${env:MY_POD_IP}:4317 - service: - extensions: [health_check] - telemetry: - metrics: - address: ${env:MY_POD_IP}:8888 - pipelines: - logs: - receivers: - - otlp - processors: - - batch - exporters: - - otlphttp/logs - traces: - receivers: - - otlp - processors: - - batch - exporters: - - otlp/tempo - metrics: - receivers: - - otlp - processors: - - batch - exporters: - - prometheus - # - debug - -# Enable and configure the Loki subchart -# https://artifacthub.io/packages/helm/grafana/loki -# loki: -# Nothing set here, because we need to use values from the values directory; -# otherwise, things don't get overridden correctly. - -# Enable and configure the Tempo subchart -# https://artifacthub.io/packages/helm/grafana/tempo -tempo: - minio: - enabled: true - mode: standalone - rootUser: grafana-tempo - rootPassword: supersecret - buckets: - # Default Tempo storage bucket - - name: tempo-traces - policy: none - purge: false - traces: - otlp: - grpc: - enabled: true - http: - enabled: true - zipkin: - enabled: false - jaeger: - thriftHttp: - enabled: false - opencensus: - enabled: false - -prometheus: - server: - global: - evaluation_interval: 15s - scrape_interval: 15s - serverFiles: - prometheus.yml: - scrape_configs: - - job_name: otel-collector - static_configs: - - targets: ["metrics-opentelemetry-collector.metrics:8888"] - - job_name: aztec - static_configs: - - targets: ["metrics-opentelemetry-collector.metrics:8889"] - - job_name: "kube-state-metrics" - static_configs: - - targets: - ["metrics-kube-state-metrics.metrics.svc.cluster.local:8080"] - -# Enable and configure Grafana -# https://artifacthub.io/packages/helm/grafana/grafana -grafana: - datasources: - datasources.yaml: - apiVersion: 1 - datasources: - - name: Loki - type: loki - url: http://metrics-loki.metrics:3100 - - name: Tempo - type: tempo - url: http://metrics-tempo.metrics:3100 - - name: Prometheus - type: prometheus - uid: spartan-metrics-prometheus - isDefault: true - url: http://metrics-prometheus-server.metrics:80 - dashboardProviders: - dashboardproviders.yaml: - apiVersion: 1 - providers: - - name: "default" - orgId: 1 - folder: "" - type: file - disableDeletion: false - editable: true - options: - path: /var/lib/grafana/dashboards/default - dashboards: - default: - # unfortunately, we can't use the `file` helper here, so we have to inline the dashboard - # json. This is a limitation of Helm. - # See https://github.com/helm/helm/issues/1892 - spartan-dashboard: - json: | - { - "annotations": { - "list": [ - { - "builtIn": 1, - "datasource": { - "type": "grafana", - "uid": "-- Grafana --" - }, - "enable": true, - "hide": true, - "iconColor": "rgba(0, 211, 255, 1)", - "name": "Annotations & Alerts", - "type": "dashboard" - } - ] - }, - "editable": true, - "fiscalYearStartMonth": 0, - "graphTooltip": 0, - "id": 1, - "links": [], - "panels": [ - { - "datasource": { - "default": false, - "type": "prometheus", - "uid": "spartan-metrics-prometheus" - }, - "description": "", - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "series", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "barWidthFactor": 0.6, - "drawStyle": "line", - "fillOpacity": 0, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "auto", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 0 - }, - "id": 1, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "spartan-metrics-prometheus" - }, - "editorMode": "code", - "expr": "aztec_archiver_block_height", - "legendFormat": "__auto", - "range": true, - "refId": "A" - } - ], - "title": "L2 Block Height", - "type": "timeseries" - } - ], - "schemaVersion": 39, - "tags": [], - "templating": { - "list": [ - { - "current": { - "selected": false, - "text": "smoke", - "value": "smoke" - }, - "datasource": { - "type": "prometheus", - "uid": "spartan-metrics-prometheus" - }, - "definition": "label_values(k8s_namespace_name)", - "hide": 0, - "includeAll": false, - "multi": false, - "name": "Deployment", - "options": [], - "query": { - "qryType": 1, - "query": "label_values(k8s_namespace_name)", - "refId": "PrometheusVariableQueryEditor-VariableQuery" - }, - "refresh": 1, - "regex": "", - "skipUrlSync": false, - "sort": 1, - "type": "query" - } - ] - }, - "time": { - "from": "now-6h", - "to": "now" - }, - "timepicker": {}, - "timezone": "browser", - "title": "Spartan Deployments", - "uid": "ae01y5sn1bls0a", - "version": 1, - "weekStart": "" - } From 55de1ce03d5b91e6778942591cf6d5bc56955f2c Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Fri, 3 Jan 2025 16:51:34 -0500 Subject: [PATCH 13/20] chore: bump devnet prover agents (#11046) --- spartan/aztec-network/values/release-devnet.yaml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/spartan/aztec-network/values/release-devnet.yaml b/spartan/aztec-network/values/release-devnet.yaml index a6bf3ce8c53..16a9ca389ca 100644 --- a/spartan/aztec-network/values/release-devnet.yaml +++ b/spartan/aztec-network/values/release-devnet.yaml @@ -18,11 +18,15 @@ bootNode: disabled: true proverAgent: - replicas: 1 + replicas: 10 + bb: + hardwareConcurrency: 31 + gke: + spotEnabled: true resources: requests: - memory: "4Gi" - cpu: "1" + memory: "116Gi" + cpu: "31" bot: followChain: "PENDING" From 547e55680f1383acd7b7673afb508e36ba09a5ae Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Fri, 3 Jan 2025 18:28:22 -0500 Subject: [PATCH 14/20] fix: update schema naming (#11038) Fix here and await master build since I can't build an image locally. --- yarn-project/circuit-types/src/interfaces/configs.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/yarn-project/circuit-types/src/interfaces/configs.ts b/yarn-project/circuit-types/src/interfaces/configs.ts index baafd364294..8d56779c6f2 100644 --- a/yarn-project/circuit-types/src/interfaces/configs.ts +++ b/yarn-project/circuit-types/src/interfaces/configs.ts @@ -60,6 +60,7 @@ export const SequencerConfigSchema = z.object({ allowedInSetup: z.array(AllowedElementSchema).optional(), maxBlockSizeInBytes: z.number().optional(), enforceFees: z.boolean().optional(), - gerousiaPayload: schemas.EthAddress.optional(), + governanceProposerPayload: schemas.EthAddress.optional(), maxL1TxInclusionTimeIntoSlot: z.number().optional(), + enforceTimeTable: z.boolean().optional(), }) satisfies ZodFor; From 5e3183c9ab4e6668c7c0ac634246c29f14147c11 Mon Sep 17 00:00:00 2001 From: David Banks <47112877+dbanks12@users.noreply.github.com> Date: Fri, 3 Jan 2025 19:22:23 -0500 Subject: [PATCH 15/20] chore: new test that you can register, deploy, and call a public function all in one tx (#11045) --- .../e2e_deploy_contract/deploy_method.test.ts | 29 ++++++++++++++++--- ...ild_private_kernel_reset_private_inputs.ts | 2 +- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts index 5f2fe0781b4..443ea035333 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts @@ -1,5 +1,13 @@ import { getDeployedTestAccountsWallets } from '@aztec/accounts/testing'; -import { AztecAddress, type Logger, type PXE, type Wallet, createPXEClient, makeFetch } from '@aztec/aztec.js'; +import { + AztecAddress, + BatchCall, + type Logger, + type PXE, + type Wallet, + createPXEClient, + makeFetch, +} from '@aztec/aztec.js'; import { CounterContract } from '@aztec/noir-contracts.js/Counter'; import { StatefulTestContract } from '@aztec/noir-contracts.js/StatefulTest'; import { TestContract } from '@aztec/noir-contracts.js/Test'; @@ -89,9 +97,22 @@ describe('e2e_deploy_contract deploy method', () => { await expect(TestContract.deploy(wallet).prove(opts)).rejects.toThrow(/no function calls needed/i); }); - it.skip('publicly deploys and calls a public function in the same batched call', async () => { - // TODO(@spalladino): Requires being able to read a nullifier on the same tx it was emitted. - }); + it('publicly deploys and calls a public contract in the same batched call', async () => { + const owner = wallet.getAddress(); + // Create a contract instance and make the PXE aware of it + logger.debug(`Initializing deploy method`); + const deployMethod = StatefulTestContract.deploy(wallet, owner, owner, 42); + logger.debug(`Creating request/calls to register and deploy contract`); + const deploy = await deployMethod.request(); + logger.debug(`Getting an instance of the not-yet-deployed contract to batch calls to`); + const contract = await StatefulTestContract.at(deployMethod.getInstance().address, wallet); + + // Batch registration, deployment, and public call into same TX + logger.debug(`Creating public calls to run in same batch as deployment`); + const init = contract.methods.increment_public_value(owner, 84).request(); + logger.debug(`Deploying a contract and calling a public function in the same batched call`); + await new BatchCall(wallet, [...deploy.calls, init]).send().wait(); + }, 300_000); it.skip('publicly deploys and calls a public function in a tx in the same block', async () => { // TODO(@spalladino): Requires being able to read a nullifier on the same block it was emitted. diff --git a/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts b/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts index b7435a4483b..fde4074171d 100644 --- a/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts +++ b/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts @@ -57,7 +57,7 @@ function getNullifierMembershipWitnessResolver(oracle: ProvingDataOracle) { return async (nullifier: Fr) => { const res = await oracle.getNullifierMembershipWitness(nullifier); if (!res) { - throw new Error(`Cannot find the leaf for nullifier ${nullifier.toBigInt()}.`); + throw new Error(`Cannot find the leaf for nullifier ${nullifier}.`); } const { index, siblingPath, leafPreimage } = res; From 94b6c8641d4dd5b2788bc91c735e82a48504400a Mon Sep 17 00:00:00 2001 From: Maddiaa <47148561+Maddiaa0@users.noreply.github.com> Date: Sat, 4 Jan 2025 09:07:42 +0800 Subject: [PATCH 16/20] feat(blobs): blob sink (#10079) fixes: https://github.com/AztecProtocol/aztec-packages/issues/10053 --- yarn-project/blob-sink/.eslintrc.cjs | 1 + yarn-project/blob-sink/README.md | 21 +++ yarn-project/blob-sink/package.json | 84 +++++++++ yarn-project/blob-sink/src/blob-sink.test.ts | 134 ++++++++++++++ .../src/blobstore/blob_store_test_suite.ts | 142 +++++++++++++++ .../src/blobstore/disk_blob_store.test.ts | 8 + .../src/blobstore/disk_blob_store.ts | 32 ++++ yarn-project/blob-sink/src/blobstore/index.ts | 3 + .../blob-sink/src/blobstore/interface.ts | 12 ++ .../src/blobstore/memory_blob_store.test.ts | 6 + .../src/blobstore/memory_blob_store.ts | 25 +++ yarn-project/blob-sink/src/config.ts | 7 + yarn-project/blob-sink/src/factory.ts | 27 +++ yarn-project/blob-sink/src/index.ts | 3 + yarn-project/blob-sink/src/metrics.ts | 27 +++ yarn-project/blob-sink/src/server.ts | 170 ++++++++++++++++++ yarn-project/blob-sink/src/types/api.ts | 49 +++++ .../src/types/blob_with_index.test.ts | 31 ++++ .../blob-sink/src/types/blob_with_index.ts | 51 ++++++ yarn-project/blob-sink/src/types/index.ts | 1 + yarn-project/blob-sink/tsconfig.json | 23 +++ .../structs/blobs/blob_public_inputs.test.ts | 4 +- yarn-project/end-to-end/package.json | 1 + .../composed/integration_l1_publisher.test.ts | 4 + .../end-to-end/src/e2e_synching.test.ts | 25 ++- .../src/fixtures/snapshot_manager.ts | 32 ++++ yarn-project/end-to-end/src/fixtures/utils.ts | 12 ++ yarn-project/end-to-end/tsconfig.json | 3 + yarn-project/foundation/src/blob/blob.test.ts | 20 ++- yarn-project/foundation/src/blob/index.ts | 109 +++++++---- yarn-project/foundation/src/config/env_var.ts | 2 + .../foundation/src/serialize/buffer_reader.ts | 14 ++ .../foundation/src/serialize/serialize.ts | 1 + .../tx_pool/aztec_kv_tx_pool.test.ts | 2 +- yarn-project/package.json | 1 + yarn-project/sequencer-client/package.json | 1 + .../sequencer-client/src/publisher/config.ts | 10 ++ .../src/publisher/l1-publisher.test.ts | 77 +++++++- .../src/publisher/l1-publisher.ts | 69 ++++++- yarn-project/telemetry-client/src/metrics.ts | 3 + yarn-project/yarn.lock | 101 ++++++++++- 41 files changed, 1288 insertions(+), 60 deletions(-) create mode 100644 yarn-project/blob-sink/.eslintrc.cjs create mode 100644 yarn-project/blob-sink/README.md create mode 100644 yarn-project/blob-sink/package.json create mode 100644 yarn-project/blob-sink/src/blob-sink.test.ts create mode 100644 yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts create mode 100644 yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts create mode 100644 yarn-project/blob-sink/src/blobstore/disk_blob_store.ts create mode 100644 yarn-project/blob-sink/src/blobstore/index.ts create mode 100644 yarn-project/blob-sink/src/blobstore/interface.ts create mode 100644 yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts create mode 100644 yarn-project/blob-sink/src/blobstore/memory_blob_store.ts create mode 100644 yarn-project/blob-sink/src/config.ts create mode 100644 yarn-project/blob-sink/src/factory.ts create mode 100644 yarn-project/blob-sink/src/index.ts create mode 100644 yarn-project/blob-sink/src/metrics.ts create mode 100644 yarn-project/blob-sink/src/server.ts create mode 100644 yarn-project/blob-sink/src/types/api.ts create mode 100644 yarn-project/blob-sink/src/types/blob_with_index.test.ts create mode 100644 yarn-project/blob-sink/src/types/blob_with_index.ts create mode 100644 yarn-project/blob-sink/src/types/index.ts create mode 100644 yarn-project/blob-sink/tsconfig.json diff --git a/yarn-project/blob-sink/.eslintrc.cjs b/yarn-project/blob-sink/.eslintrc.cjs new file mode 100644 index 00000000000..e659927475c --- /dev/null +++ b/yarn-project/blob-sink/.eslintrc.cjs @@ -0,0 +1 @@ +module.exports = require('@aztec/foundation/eslint'); diff --git a/yarn-project/blob-sink/README.md b/yarn-project/blob-sink/README.md new file mode 100644 index 00000000000..649e8eab867 --- /dev/null +++ b/yarn-project/blob-sink/README.md @@ -0,0 +1,21 @@ +## Blob Sink + +A HTTP api that losely emulates the https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars API. +We do not support all of the possible values of block_id, namely `genesis`, `head`, `finalized`. As we are not using any of these values in our +blobs integration. + +## When is this used? + +This service will run alongside end to end tests to capture the blob transactions that are sent alongside a `propose` transaction. + +### Why? + +Once we make the transition to blob transactions, we will need to be able to query for blobs. One way to do this is to run an entire L1 execution layer and consensus layer pair alongside all of our e2e tests and inside the sandbox. But this is a bit much, so instead the blob sink can be used to store and request blobs, without needing to run an entire consensus layer pair client. + +### Other Usecases + +Blobs are only held in the L1 consensus layer for a period of ~3 weeks, the blob sink can be used to store blobs for longer. + +### How? + +The blob sink is a simple HTTP server that can be run alongside the e2e tests. It will store the blobs in a local file system and provide an API to query for them. diff --git a/yarn-project/blob-sink/package.json b/yarn-project/blob-sink/package.json new file mode 100644 index 00000000000..7090bf99527 --- /dev/null +++ b/yarn-project/blob-sink/package.json @@ -0,0 +1,84 @@ +{ + "name": "@aztec/blob-sink", + "version": "0.1.0", + "type": "module", + "exports": { + ".": "./dest/index.js" + }, + "inherits": [ + "../package.common.json" + ], + "scripts": { + "build": "yarn clean && tsc -b", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "test": "HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16} RAYON_NUM_THREADS=${RAYON_NUM_THREADS:-4} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests --maxWorkers=${JEST_MAX_WORKERS:-8}" + }, + "jest": { + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src", + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" + } + } + } + ] + }, + "extensionsToTreatAsEsm": [ + ".ts" + ], + "reporters": [ + "default" + ], + "testTimeout": 30000, + "setupFiles": [ + "../../foundation/src/jest/setup.mjs" + ] + }, + "dependencies": { + "@aztec/circuit-types": "workspace:^", + "@aztec/foundation": "workspace:^", + "@aztec/kv-store": "workspace:*", + "@aztec/telemetry-client": "workspace:*", + "express": "^4.21.1", + "source-map-support": "^0.5.21", + "tslib": "^2.4.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "@types/memdown": "^3.0.0", + "@types/node": "^18.7.23", + "@types/source-map-support": "^0.5.10", + "@types/supertest": "^6.0.2", + "jest": "^29.5.0", + "jest-mock-extended": "^3.0.3", + "supertest": "^7.0.0", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "types": "./dest/index.d.ts", + "engines": { + "node": ">=18" + } +} diff --git a/yarn-project/blob-sink/src/blob-sink.test.ts b/yarn-project/blob-sink/src/blob-sink.test.ts new file mode 100644 index 00000000000..3107b953d47 --- /dev/null +++ b/yarn-project/blob-sink/src/blob-sink.test.ts @@ -0,0 +1,134 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import request from 'supertest'; + +import { BlobSinkServer } from './server.js'; + +describe('BlobSinkService', () => { + let service: BlobSinkServer; + + beforeEach(async () => { + service = new BlobSinkServer({ + port: 0, // Using port 0 lets the OS assign a random available port + }); + await service.start(); + }); + + afterEach(async () => { + await service.stop(); + }); + + describe('should store and retrieve a blob sidecar', () => { + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const testFields2 = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blob2 = Blob.fromFields(testFields2); + const blockId = '0x1234'; + + beforeEach(async () => { + // Post the blob + const postResponse = await request(service.getApp()) + .post('/blob_sidecar') + .send({ + // eslint-disable-next-line camelcase + block_id: blockId, + blobs: [ + { + index: 0, + blob: blob.toBuffer(), + }, + { + index: 1, + blob: blob2.toBuffer(), + }, + ], + }); + + expect(postResponse.status).toBe(200); + }); + + it('should retrieve the blob', async () => { + // Retrieve the blob + const getResponse = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}`); + + expect(getResponse.status).toBe(200); + + // Convert the response blob back to a Blob object and verify it matches + const retrievedBlobs = getResponse.body.data; + + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + const retrievedBlob2 = Blob.fromBuffer(Buffer.from(retrievedBlobs[1].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + expect(retrievedBlob2.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); + expect(retrievedBlob2.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should retrieve specific indicies', async () => { + // We can also request specific indicies + const getWithIndicies = await request(service.getApp()).get( + `/eth/v1/beacon/blob_sidecars/${blockId}?indices=0,1`, + ); + + expect(getWithIndicies.status).toBe(200); + expect(getWithIndicies.body.data.length).toBe(2); + + const retrievedBlobs = getWithIndicies.body.data; + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + const retrievedBlob2 = Blob.fromBuffer(Buffer.from(retrievedBlobs[1].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + expect(retrievedBlob2.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); + expect(retrievedBlob2.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should retreive a single index', async () => { + const getWithIndicies = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}?indices=1`); + + expect(getWithIndicies.status).toBe(200); + expect(getWithIndicies.body.data.length).toBe(1); + + const retrievedBlobs = getWithIndicies.body.data; + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + }); + + it('should return an error if invalid indicies are provided', async () => { + const blockId = '0x1234'; + + const response = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}?indices=word`); + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid indices parameter'); + }); + + it('should return an error if the block ID is invalid (POST)', async () => { + const response = await request(service.getApp()).post('/blob_sidecar').send({ + // eslint-disable-next-line camelcase + block_id: undefined, + }); + + expect(response.status).toBe(400); + }); + + it('should return an error if the block ID is invalid (GET)', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/invalid-id'); + + expect(response.status).toBe(400); + }); + + it('should return 404 for non-existent blob', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/0x999999'); + + expect(response.status).toBe(404); + }); + + it('should reject negative block IDs', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/-123'); + + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid block_id parameter'); + }); +}); diff --git a/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts new file mode 100644 index 00000000000..2636ec726d9 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts @@ -0,0 +1,142 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export function describeBlobStore(getBlobStore: () => BlobStore) { + let blobStore: BlobStore; + + beforeEach(() => { + blobStore = getBlobStore(); + }); + + it('should store and retrieve a blob', async () => { + // Create a test blob with random fields + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '0x12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + + // Store the blob + await blobStore.addBlobSidecars(blockId, [blobWithIndex]); + + // Retrieve the blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + + // Verify the blob was retrieved and matches + expect(retrievedBlob).toBeDefined(); + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('Should allow requesting a specific index of blob', async () => { + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '0x12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + const blobWithIndex2 = new BlobWithIndex(blob, 1); + + await blobStore.addBlobSidecars(blockId, [blobWithIndex, blobWithIndex2]); + + const retrievedBlobs = await blobStore.getBlobSidecars(blockId, [0]); + const [retrievedBlob] = retrievedBlobs!; + + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + + const retrievedBlobs2 = await blobStore.getBlobSidecars(blockId, [1]); + const [retrievedBlob2] = retrievedBlobs2!; + + expect(retrievedBlob2.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('Differentiate between blockHash and slot', async () => { + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const testFieldsSlot = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blobSlot = Blob.fromFields(testFieldsSlot); + const blockId = '0x12345'; + const slot = '12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + const blobWithIndexSlot = new BlobWithIndex(blobSlot, 0); + + await blobStore.addBlobSidecars(blockId, [blobWithIndex]); + await blobStore.addBlobSidecars(slot, [blobWithIndexSlot]); + + const retrievedBlobs = await blobStore.getBlobSidecars(blockId, [0]); + const [retrievedBlob] = retrievedBlobs!; + + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + + const retrievedBlobs2 = await blobStore.getBlobSidecars(slot, [0]); + const [retrievedBlob2] = retrievedBlobs2!; + + expect(retrievedBlob2.blob.fieldsHash.toString()).toBe(blobSlot.fieldsHash.toString()); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blobSlot.commitment.toString('hex')); + }); + + it('should return undefined for non-existent blob', async () => { + const nonExistentBlob = await blobStore.getBlobSidecars('999999'); + expect(nonExistentBlob).toBeUndefined(); + }); + + it('should handle multiple blobs with different block IDs', async () => { + // Create two different blobs + const blob1 = Blob.fromFields([Fr.random(), Fr.random()]); + const blob2 = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + // Store both blobs + await blobStore.addBlobSidecars('1', [blobWithIndex1]); + await blobStore.addBlobSidecars('2', [blobWithIndex2]); + + // Retrieve and verify both blobs + const retrieved1 = await blobStore.getBlobSidecars('1'); + const retrieved2 = await blobStore.getBlobSidecars('2'); + const [retrievedBlob1] = retrieved1!; + const [retrievedBlob2] = retrieved2!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should overwrite blob when using same block ID', async () => { + // Create two different blobs + const originalBlob = Blob.fromFields([Fr.random()]); + const newBlob = Blob.fromFields([Fr.random(), Fr.random()]); + const blockId = '1'; + const originalBlobWithIndex = new BlobWithIndex(originalBlob, 0); + const newBlobWithIndex = new BlobWithIndex(newBlob, 0); + + // Store original blob + await blobStore.addBlobSidecars(blockId, [originalBlobWithIndex]); + + // Overwrite with new blob + await blobStore.addBlobSidecars(blockId, [newBlobWithIndex]); + + // Retrieve and verify it's the new blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(newBlob.commitment.toString('hex')); + expect(retrievedBlob.blob.commitment.toString('hex')).not.toBe(originalBlob.commitment.toString('hex')); + }); + + it('should handle multiple blobs with the same block ID', async () => { + const blob1 = Blob.fromFields([Fr.random()]); + const blob2 = Blob.fromFields([Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + await blobStore.addBlobSidecars('1', [blobWithIndex1, blobWithIndex2]); + const retrievedBlobs = await blobStore.getBlobSidecars('1'); + const [retrievedBlob1, retrievedBlob2] = retrievedBlobs!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); +} diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts new file mode 100644 index 00000000000..8b523dbaef1 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts @@ -0,0 +1,8 @@ +import { openTmpStore } from '@aztec/kv-store/lmdb'; + +import { describeBlobStore } from './blob_store_test_suite.js'; +import { DiskBlobStore } from './disk_blob_store.js'; + +describe('DiskBlobStore', () => { + describeBlobStore(() => new DiskBlobStore(openTmpStore())); +}); diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts new file mode 100644 index 00000000000..63e4dc10ab6 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts @@ -0,0 +1,32 @@ +import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; + +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class DiskBlobStore implements BlobStore { + blobs: AztecMap; + + constructor(store: AztecKVStore) { + this.blobs = store.openMap('blobs'); + } + + public getBlobSidecars(blockId: string, indices?: number[]): Promise { + const blobBuffer = this.blobs.get(`${blockId}`); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + + const blobsWithIndexes = BlobsWithIndexes.fromBuffer(blobBuffer); + if (indices) { + // If indices are provided, return the blobs at the specified indices + return Promise.resolve(blobsWithIndexes.getBlobsFromIndices(indices)); + } + // If no indices are provided, return all blobs + return Promise.resolve(blobsWithIndexes.blobs); + } + + public async addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + await this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/blobstore/index.ts b/yarn-project/blob-sink/src/blobstore/index.ts new file mode 100644 index 00000000000..fd3901930cf --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/index.ts @@ -0,0 +1,3 @@ +export * from './memory_blob_store.js'; +export * from './disk_blob_store.js'; +export * from './interface.js'; diff --git a/yarn-project/blob-sink/src/blobstore/interface.ts b/yarn-project/blob-sink/src/blobstore/interface.ts new file mode 100644 index 00000000000..27d7fac25c2 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/interface.ts @@ -0,0 +1,12 @@ +import { type BlobWithIndex } from '../types/index.js'; + +export interface BlobStore { + /** + * Get a blob by block id + */ + getBlobSidecars: (blockId: string, indices?: number[]) => Promise; + /** + * Add a blob to the store + */ + addBlobSidecars: (blockId: string, blobSidecars: BlobWithIndex[]) => Promise; +} diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts new file mode 100644 index 00000000000..2f13926cd1a --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts @@ -0,0 +1,6 @@ +import { describeBlobStore } from './blob_store_test_suite.js'; +import { MemoryBlobStore } from './memory_blob_store.js'; + +describe('MemoryBlobStore', () => { + describeBlobStore(() => new MemoryBlobStore()); +}); diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts new file mode 100644 index 00000000000..efe013f9b01 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts @@ -0,0 +1,25 @@ +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class MemoryBlobStore implements BlobStore { + private blobs: Map = new Map(); + + public getBlobSidecars(blockId: string, indices?: number[]): Promise { + const blobBuffer = this.blobs.get(blockId); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + const blobsWithIndexes = BlobsWithIndexes.fromBuffer(blobBuffer); + if (indices) { + // If indices are provided, return the blobs at the specified indices + return Promise.resolve(blobsWithIndexes.getBlobsFromIndices(indices)); + } + // If no indices are provided, return all blobs + return Promise.resolve(blobsWithIndexes.blobs); + } + + public addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/config.ts b/yarn-project/blob-sink/src/config.ts new file mode 100644 index 00000000000..e18311f9f1d --- /dev/null +++ b/yarn-project/blob-sink/src/config.ts @@ -0,0 +1,7 @@ +import { type DataStoreConfig } from '@aztec/kv-store/config'; + +export interface BlobSinkConfig { + port?: number; + dataStoreConfig?: DataStoreConfig; + otelMetricsCollectorUrl?: string; +} diff --git a/yarn-project/blob-sink/src/factory.ts b/yarn-project/blob-sink/src/factory.ts new file mode 100644 index 00000000000..43a0df8e6c3 --- /dev/null +++ b/yarn-project/blob-sink/src/factory.ts @@ -0,0 +1,27 @@ +import { type AztecKVStore } from '@aztec/kv-store'; +import { createStore } from '@aztec/kv-store/lmdb'; +import { type TelemetryClient } from '@aztec/telemetry-client'; + +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkServer } from './server.js'; + +// If data store settings are provided, the store is created and returned. +// Otherwise, undefined is returned and an in memory store will be used. +async function getDataStoreConfig(config?: BlobSinkConfig): Promise { + if (!config?.dataStoreConfig) { + return undefined; + } + return await createStore('blob-sink', config.dataStoreConfig); +} + +/** + * Creates a blob sink service from the provided config. + */ +export async function createBlobSinkServer( + config?: BlobSinkConfig, + telemetry?: TelemetryClient, +): Promise { + const store = await getDataStoreConfig(config); + + return new BlobSinkServer(config, store, telemetry); +} diff --git a/yarn-project/blob-sink/src/index.ts b/yarn-project/blob-sink/src/index.ts new file mode 100644 index 00000000000..25844130c2f --- /dev/null +++ b/yarn-project/blob-sink/src/index.ts @@ -0,0 +1,3 @@ +export * from './server.js'; +export * from './config.js'; +export * from './factory.js'; diff --git a/yarn-project/blob-sink/src/metrics.ts b/yarn-project/blob-sink/src/metrics.ts new file mode 100644 index 00000000000..28e2b6308c0 --- /dev/null +++ b/yarn-project/blob-sink/src/metrics.ts @@ -0,0 +1,27 @@ +import { type Histogram, Metrics, type TelemetryClient, type UpDownCounter } from '@aztec/telemetry-client'; + +import { type BlobWithIndex } from './types/blob_with_index.js'; + +export class BlobSinkMetrics { + /** The number of blobs in the blob store */ + private objectsInBlobStore: UpDownCounter; + + /** Tracks blob size */ + private blobSize: Histogram; + + constructor(telemetry: TelemetryClient) { + const name = 'BlobSink'; + this.objectsInBlobStore = telemetry.getMeter(name).createUpDownCounter(Metrics.BLOB_SINK_OBJECTS_IN_BLOB_STORE, { + description: 'The current number of blobs in the blob store', + }); + + this.blobSize = telemetry.getMeter(name).createHistogram(Metrics.BLOB_SINK_BLOB_SIZE, { + description: 'The non zero size of blobs in the blob store', + }); + } + + public recordBlobReciept(blobs: BlobWithIndex[]) { + this.objectsInBlobStore.add(blobs.length); + blobs.forEach(b => this.blobSize.record(b.blob.getSize())); + } +} diff --git a/yarn-project/blob-sink/src/server.ts b/yarn-project/blob-sink/src/server.ts new file mode 100644 index 00000000000..45c79f6991d --- /dev/null +++ b/yarn-project/blob-sink/src/server.ts @@ -0,0 +1,170 @@ +import { Blob } from '@aztec/foundation/blob'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import { type AztecKVStore } from '@aztec/kv-store'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +import express, { type Express, type Request, type Response, json } from 'express'; +import { type Server } from 'http'; +import { z } from 'zod'; + +import { type BlobStore, DiskBlobStore } from './blobstore/index.js'; +import { MemoryBlobStore } from './blobstore/memory_blob_store.js'; +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkMetrics } from './metrics.js'; +import { type PostBlobSidecarRequest, blockIdSchema, indicesSchema } from './types/api.js'; +import { BlobWithIndex } from './types/index.js'; + +/** + * Example usage: + * const service = new BlobSinkService({ port: 5052 }); + * await service.start(); + * ... later ... + * await service.stop(); + */ +export class BlobSinkServer { + public readonly port: number; + + private app: Express; + private server: Server | null = null; + private blobStore: BlobStore; + private metrics: BlobSinkMetrics; + private log: Logger = createLogger('aztec:blob-sink'); + + constructor(config?: BlobSinkConfig, store?: AztecKVStore, telemetry: TelemetryClient = new NoopTelemetryClient()) { + this.port = config?.port ?? 5052; // 5052 is beacon chain default http port + this.app = express(); + + // Setup middleware + this.app.use(json({ limit: '1mb' })); // Increase the limit to allow for a blob to be sent + + this.metrics = new BlobSinkMetrics(telemetry); + + this.blobStore = store === undefined ? new MemoryBlobStore() : new DiskBlobStore(store); + + // Setup routes + this.setupRoutes(); + } + + private setupRoutes() { + this.app.get('/eth/v1/beacon/blob_sidecars/:block_id', this.handleGetBlobSidecar.bind(this)); + this.app.post('/blob_sidecar', this.handlePostBlobSidecar.bind(this)); + } + + private async handleGetBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id } = req.params; + const { indices } = req.query; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.safeParse(block_id); + if (!parsedBlockId.success) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + const parsedIndices = indicesSchema.safeParse(indices); + if (!parsedIndices.success) { + res.status(400).json({ + error: 'Invalid indices parameter', + }); + return; + } + + const blobs = await this.blobStore.getBlobSidecars(parsedBlockId.data.toString(), parsedIndices.data); + + if (!blobs) { + res.status(404).json({ error: 'Blob not found' }); + return; + } + + res.json({ + version: 'deneb', + data: blobs.map(blob => blob.toJSON()), + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: 'Invalid block_id parameter', + details: error.errors, + }); + } else { + res.status(500).json({ + error: 'Internal server error', + }); + } + } + } + + private async handlePostBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id, blobs } = req.body; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.parse(block_id); + if (!parsedBlockId) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + this.log.info(`Received blob sidecar for block ${parsedBlockId}`); + + const blobObjects: BlobWithIndex[] = this.parseBlobData(blobs); + + await this.blobStore.addBlobSidecars(parsedBlockId.toString(), blobObjects); + this.metrics.recordBlobReciept(blobObjects); + + this.log.info(`Blob sidecar stored successfully for block ${parsedBlockId}`); + + res.json({ message: 'Blob sidecar stored successfully' }); + } catch (error) { + res.status(400).json({ + error: 'Invalid blob data', + }); + } + } + + private parseBlobData(blobs: PostBlobSidecarRequest['blobs']): BlobWithIndex[] { + return blobs.map(({ index, blob }) => new BlobWithIndex(Blob.fromBuffer(Buffer.from(blob.data)), index)); + } + + public start(): Promise { + return new Promise(resolve => { + this.server = this.app.listen(this.port, () => { + this.log.info(`Server is running on http://localhost:${this.port}`); + resolve(); + }); + }); + } + + public stop(): Promise { + this.log.info('Stopping blob sink'); + return new Promise((resolve, reject) => { + if (!this.server) { + resolve(); + this.log.info('Blob sink already stopped'); + return; + } + + this.server.close(err => { + if (err) { + reject(err); + return; + } + this.server = null; + this.log.info('Blob sink stopped'); + resolve(); + }); + }); + } + + public getApp(): Express { + return this.app; + } +} diff --git a/yarn-project/blob-sink/src/types/api.ts b/yarn-project/blob-sink/src/types/api.ts new file mode 100644 index 00000000000..cd408ecdedb --- /dev/null +++ b/yarn-project/blob-sink/src/types/api.ts @@ -0,0 +1,49 @@ +import { z } from 'zod'; + +export interface PostBlobSidecarRequest { + // eslint-disable-next-line camelcase + block_id: string; + blobs: Array<{ + index: number; + blob: { + type: string; + data: string; + }; + }>; +} + +export const blockRootSchema = z + .string() + .regex(/^0x[0-9a-fA-F]{0,64}$/) + .max(66); +export const slotSchema = z.number().int().positive(); + +// Define the Zod schema for an array of numbers +export const indicesSchema = z.optional( + z + .string() + .refine(str => str.split(',').every(item => !isNaN(Number(item))), { + message: 'All items in the query must be valid numbers.', + }) + .transform(str => str.split(',').map(Number)), +); // Convert to an array of numbers + +// Validation schemas +// Block identifier. Can be one of: , . +// Note the spec https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars does allows for "head", "genesis", "finalized" as valid block ids, +// but we explicitly do not support these values. +export const blockIdSchema = blockRootSchema.or(slotSchema); + +export const postBlobSidecarSchema = z.object({ + // eslint-disable-next-line camelcase + block_id: blockIdSchema, + blobs: z.array( + z.object({ + index: z.number(), + blob: z.object({ + type: z.string(), + data: z.string(), + }), + }), + ), +}); diff --git a/yarn-project/blob-sink/src/types/blob_with_index.test.ts b/yarn-project/blob-sink/src/types/blob_with_index.test.ts new file mode 100644 index 00000000000..d29c6b98b88 --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.test.ts @@ -0,0 +1,31 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex, BlobsWithIndexes } from './blob_with_index.js'; + +describe('BlobWithIndex Serde', () => { + it('should serialize and deserialize', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex = new BlobWithIndex(blob, 0); + const serialized = blobWithIndex.toBuffer(); + + const deserialized = BlobWithIndex.fromBuffer(serialized); + + expect(blobWithIndex).toEqual(deserialized); + }); +}); + +describe('BlobsWithIndexes Serde', () => { + it('should serialize and deserialize', () => { + const blobs = [ + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 0), + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 1), + ]; + const blobsWithIndexes = new BlobsWithIndexes(blobs); + + const serialized = blobsWithIndexes.toBuffer(); + const deserialized = BlobsWithIndexes.fromBuffer(serialized); + + expect(deserialized).toEqual(blobsWithIndexes); + }); +}); diff --git a/yarn-project/blob-sink/src/types/blob_with_index.ts b/yarn-project/blob-sink/src/types/blob_with_index.ts new file mode 100644 index 00000000000..60446f2ff16 --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.ts @@ -0,0 +1,51 @@ +import { Blob } from '@aztec/foundation/blob'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +/** Serialized an array of blobs with their indexes to be stored at a given block id */ +export class BlobsWithIndexes { + constructor(public blobs: BlobWithIndex[]) {} + + public toBuffer(): Buffer { + return serializeToBuffer(this.blobs.length, this.blobs); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobsWithIndexes { + const reader = BufferReader.asReader(buffer); + return new BlobsWithIndexes(reader.readArray(reader.readNumber(), BlobWithIndex)); + } + + public getBlobsFromIndices(indices: number[]): BlobWithIndex[] { + return this.blobs.filter((_, index) => indices.includes(index)); + } +} + +/** We store blobs alongside their index in the block */ +export class BlobWithIndex { + constructor( + /** The blob */ + public blob: Blob, + /** The index of the blob in the block */ + public index: number, + ) {} + + public toBuffer(): Buffer { + return serializeToBuffer([this.blob, this.index]); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobWithIndex { + const reader = BufferReader.asReader(buffer); + return new BlobWithIndex(reader.readObject(Blob), reader.readNumber()); + } + + // Follows the structure the beacon node api expects + public toJSON(): { blob: string; index: number; kzg_commitment: string; kzg_proof: string } { + return { + blob: this.blob.toBuffer().toString('hex'), + index: this.index, + // eslint-disable-next-line camelcase + kzg_commitment: this.blob.commitment.toString('hex'), + // eslint-disable-next-line camelcase + kzg_proof: this.blob.proof.toString('hex'), + }; + } +} diff --git a/yarn-project/blob-sink/src/types/index.ts b/yarn-project/blob-sink/src/types/index.ts new file mode 100644 index 00000000000..396b8fc805e --- /dev/null +++ b/yarn-project/blob-sink/src/types/index.ts @@ -0,0 +1 @@ +export * from './blob_with_index.js'; diff --git a/yarn-project/blob-sink/tsconfig.json b/yarn-project/blob-sink/tsconfig.json new file mode 100644 index 00000000000..535eabe5863 --- /dev/null +++ b/yarn-project/blob-sink/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "..", + "compilerOptions": { + "outDir": "dest", + "rootDir": "src", + "tsBuildInfoFile": ".tsbuildinfo" + }, + "references": [ + { + "path": "../circuit-types" + }, + { + "path": "../foundation" + }, + { + "path": "../kv-store" + }, + { + "path": "../telemetry-client" + } + ], + "include": ["src"] +} diff --git a/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts b/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts index 7624b088a5e..aa996cae5a9 100644 --- a/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts +++ b/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts @@ -20,7 +20,7 @@ describe('BlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blob = new Blob(Array(400).fill(new Fr(3))); + const blob = Blob.fromFields(Array(400).fill(new Fr(3))); const converted = BlobPublicInputs.fromBlob(blob); expect(converted.z).toEqual(blob.challengeZ); expect(Buffer.from(converted.y.toString(16), 'hex')).toEqual(blob.evaluationY); @@ -55,7 +55,7 @@ describe('BlockBlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => new Blob(Array(400).fill(new Fr(i + 1)))); + const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => Blob.fromFields(Array(400).fill(new Fr(i + 1)))); const converted = BlockBlobPublicInputs.fromBlobs(blobs); converted.inner.forEach((blobPI, i) => { expect(blobPI.z).toEqual(blobs[i].challengeZ); diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index b84ebb2f27e..a9b1f9d2d68 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -32,6 +32,7 @@ "@aztec/aztec-node": "workspace:^", "@aztec/aztec.js": "workspace:^", "@aztec/bb-prover": "workspace:^", + "@aztec/blob-sink": "workspace:^", "@aztec/bot": "workspace:^", "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 7b61fc01c02..de355e1fae6 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -71,6 +71,9 @@ config.l1RpcUrl = config.l1RpcUrl || 'http://127.0.0.1:8545'; const numberOfConsecutiveBlocks = 2; +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + describe('L1Publisher integration', () => { let publicClient: PublicClient; let walletClient: WalletClient; @@ -182,6 +185,7 @@ describe('L1Publisher integration', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: config.ethereumSlotDuration, + blobSinkUrl: BLOB_SINK_URL, }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 50857c56baf..a618bc6ec2e 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -360,13 +360,23 @@ describe('e2e_synching', () => { return; } - const { teardown, logger, deployL1ContractsValues, config, cheatCodes, aztecNode, sequencer, watcher, pxe } = - await setup(0, { - salt: SALT, - l1StartTime: START_TIME, - skipProtocolContracts: true, - assumeProvenThrough, - }); + const { + teardown, + logger, + deployL1ContractsValues, + config, + cheatCodes, + aztecNode, + sequencer, + watcher, + pxe, + blobSink, + } = await setup(0, { + salt: SALT, + l1StartTime: START_TIME, + skipProtocolContracts: true, + assumeProvenThrough, + }); await (aztecNode as any).stop(); await (sequencer as any).stop(); @@ -383,6 +393,7 @@ describe('e2e_synching', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: ETHEREUM_SLOT_DURATION, + blobSinkUrl: `http://localhost:${blobSink?.port ?? 5052}`, }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 929768285c7..42663cf4cc0 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -14,6 +14,7 @@ import { type Wallet, } from '@aztec/aztec.js'; import { deployInstance, registerContractClass } from '@aztec/aztec.js/deployment'; +import { type BlobSinkServer, createBlobSinkServer } from '@aztec/blob-sink'; import { type DeployL1ContractsArgs, createL1Clients, getL1ContractsConfigEnvVars, l1Artifacts } from '@aztec/ethereum'; import { EthCheatCodesWithState, startAnvil } from '@aztec/ethereum/test'; import { asyncMap } from '@aztec/foundation/async-map'; @@ -29,6 +30,7 @@ import { type Anvil } from '@viem/anvil'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { copySync, removeSync } from 'fs-extra/esm'; import fs from 'fs/promises'; +import getPort from 'get-port'; import { tmpdir } from 'os'; import path, { join } from 'path'; import { type Hex, getContract } from 'viem'; @@ -53,6 +55,7 @@ export type SubsystemsContext = { watcher: AnvilTestWatcher; cheatCodes: CheatCodes; dateProvider: TestDateProvider; + blobSink: BlobSinkServer; directoryToCleanup?: string; }; @@ -254,6 +257,7 @@ async function teardown(context: SubsystemsContext | undefined) { await context.bbConfig?.cleanup(); await context.anvil.stop(); await context.watcher.stop(); + await context.blobSink.stop(); if (context.directoryToCleanup) { await fs.rm(context.directoryToCleanup, { recursive: true, force: true }); } @@ -278,6 +282,8 @@ async function setupFromFresh( ): Promise { logger.verbose(`Initializing state...`); + const blobSinkPort = await getPort(); + // Fetch the AztecNode config. // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. const aztecNodeConfig: AztecNodeConfig & SetupOptions = { ...getConfigEnvVars(), ...opts }; @@ -291,6 +297,17 @@ async function setupFromFresh( } else { aztecNodeConfig.dataDirectory = statePath; } + aztecNodeConfig.blobSinkUrl = `http://localhost:${blobSinkPort}`; + + // Setup blob sink service + const blobSink = await createBlobSinkServer({ + port: blobSinkPort, + dataStoreConfig: { + dataDirectory: aztecNodeConfig.dataDirectory, + dataStoreMapSizeKB: aztecNodeConfig.dataStoreMapSizeKB, + }, + }); + await blobSink.start(); // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. logger.verbose('Starting anvil...'); @@ -407,6 +424,7 @@ async function setupFromFresh( watcher, cheatCodes, dateProvider, + blobSink, directoryToCleanup, }; } @@ -420,12 +438,25 @@ async function setupFromState(statePath: string, logger: Logger): Promise Promise; }; @@ -382,6 +387,11 @@ export async function setup( return await setupWithRemoteEnvironment(publisherHdAccount!, config, logger, numberOfAccounts); } + // Blob sink service - blobs get posted here and served from here + const blobSinkPort = await getPort(); + const blobSink = await createBlobSinkServer({ port: blobSinkPort }); + config.blobSinkUrl = `http://127.0.0.1:${blobSinkPort}`; + const deployL1ContractsValues = opts.deployL1ContractsValues ?? (await setupL1Contracts(config.l1RpcUrl, publisherHdAccount!, logger, opts, chain)); @@ -494,6 +504,7 @@ export async function setup( await anvil?.stop(); await watcher.stop(); + await blobSink?.stop(); if (directoryToCleanup) { logger.verbose(`Cleaning up data directory at ${directoryToCleanup}`); @@ -514,6 +525,7 @@ export async function setup( sequencer, watcher, dateProvider, + blobSink, teardown, }; } diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index 08932fbdb4a..a8117b5a5db 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -21,6 +21,9 @@ { "path": "../bb-prover" }, + { + "path": "../blob-sink" + }, { "path": "../bot" }, diff --git a/yarn-project/foundation/src/blob/blob.test.ts b/yarn-project/foundation/src/blob/blob.test.ts index e4a5746ec06..da4caa8fc74 100644 --- a/yarn-project/foundation/src/blob/blob.test.ts +++ b/yarn-project/foundation/src/blob/blob.test.ts @@ -78,15 +78,19 @@ describe('blob', () => { // This test ensures that the Blob class correctly matches the c-kzg lib // The values here are used to test Noir's blob evaluation in noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr -> test_400 const blobItems = Array(400).fill(new Fr(3)); - const ourBlob = new Blob(blobItems); + const ourBlob = Blob.fromFields(blobItems); const blobItemsHash = poseidon2Hash(Array(400).fill(new Fr(3))); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); - expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); + + // We add zeros before getting commitment as we do not store the blob along with + // all of the zeros + const dataWithZeros = Buffer.concat([ourBlob.data], BYTES_PER_BLOB); + expect(blobToKzgCommitment(dataWithZeros)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); expect(z).toEqual(ourBlob.challengeZ); - const res = computeKzgProof(ourBlob.data, ourBlob.challengeZ.toBuffer()); + const res = computeKzgProof(dataWithZeros, ourBlob.challengeZ.toBuffer()); expect(res[0]).toEqual(ourBlob.proof); expect(res[1]).toEqual(ourBlob.evaluationY); @@ -112,8 +116,9 @@ describe('blob', () => { const blobItemsHash = poseidon2Hash(blobItems); const blobs = Blob.getBlobs(blobItems); blobs.forEach(ourBlob => { - // const ourBlob = new Blob(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); + // const ourBlob = Blob.fromFields(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); + expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); @@ -132,4 +137,11 @@ describe('blob', () => { expect(isValid).toBe(true); }); }); + + it('Should serialise and deserialise a blob', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobBuffer = blob.toBuffer(); + const deserialisedBlob = Blob.fromBuffer(blobBuffer); + expect(blob.fieldsHash.equals(deserialisedBlob.fieldsHash)).toBe(true); + }); }); diff --git a/yarn-project/foundation/src/blob/index.ts b/yarn-project/foundation/src/blob/index.ts index 6c1651f4c56..dddb124f1d3 100644 --- a/yarn-project/foundation/src/blob/index.ts +++ b/yarn-project/foundation/src/blob/index.ts @@ -3,7 +3,7 @@ import type { Blob as BlobBuffer } from 'c-kzg'; import { poseidon2Hash, sha256 } from '../crypto/index.js'; import { Fr } from '../fields/index.js'; -import { serializeToBuffer } from '../serialize/index.js'; +import { BufferReader, serializeToBuffer } from '../serialize/index.js'; // Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err: /* eslint-disable import/no-named-as-default-member */ @@ -36,48 +36,47 @@ export const VERSIONED_HASH_VERSION_KZG = 0x01; * A class to create, manage, and prove EVM blobs. */ export class Blob { - /** The blob to be broadcast on L1 in bytes form. */ - public readonly data: BlobBuffer; - /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ - public readonly fieldsHash: Fr; - /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ - public readonly challengeZ: Fr; - /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ - public readonly evaluationY: Buffer; - /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ - public readonly commitment: Buffer; - /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ - public readonly proof: Buffer; - constructor( - /** All fields to be broadcast in the blob. */ - fields: Fr[], - /** If we want to broadcast more fields than fit into a blob, we hash those and used it as the fieldsHash across all blobs. - * This is much simpler and cheaper in the circuit to do, but MUST BE CHECKED before injecting here. - */ - multiBlobFieldsHash?: Fr, - ) { + /** The blob to be broadcast on L1 in bytes form. */ + public readonly data: BlobBuffer, + /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ + public readonly fieldsHash: Fr, + /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ + public readonly challengeZ: Fr, + /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ + public readonly evaluationY: Buffer, + /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ + public readonly commitment: Buffer, + /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ + public readonly proof: Buffer, + ) {} + + static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob { if (fields.length > FIELD_ELEMENTS_PER_BLOB) { throw new Error( `Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`, ); } - this.data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB); + const dataWithoutZeros = serializeToBuffer(fields); + const data = Buffer.concat([dataWithoutZeros], BYTES_PER_BLOB); + // This matches the output of SpongeBlob.squeeze() in the blob circuit - this.fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); - this.commitment = Buffer.from(blobToKzgCommitment(this.data)); - this.challengeZ = poseidon2Hash([this.fieldsHash, ...this.commitmentToFields()]); - const res = computeKzgProof(this.data, this.challengeZ.toBuffer()); - if (!verifyKzgProof(this.commitment, this.challengeZ.toBuffer(), res[1], res[0])) { + const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); + const commitment = Buffer.from(blobToKzgCommitment(data)); + const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]); + const res = computeKzgProof(data, challengeZ.toBuffer()); + if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) { throw new Error(`KZG proof did not verify.`); } - this.proof = Buffer.from(res[0]); - this.evaluationY = Buffer.from(res[1]); + const proof = Buffer.from(res[0]); + const evaluationY = Buffer.from(res[1]); + + return new Blob(dataWithoutZeros, fieldsHash, challengeZ, evaluationY, commitment, proof); } // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] commitmentToFields(): [Fr, Fr] { - return [new Fr(this.commitment.subarray(0, 31)), new Fr(this.commitment.subarray(31, 48))]; + return commitmentToFields(this.commitment); } // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers @@ -93,6 +92,49 @@ export class Blob { return hash; } + toBuffer(): Buffer { + return Buffer.from( + serializeToBuffer( + this.data.length, + this.data, + this.fieldsHash, + this.challengeZ, + this.evaluationY.length, + this.evaluationY, + this.commitment.length, + this.commitment, + this.proof.length, + this.proof, + ), + ); + } + + static fromBuffer(buf: Buffer | BufferReader): Blob { + const reader = BufferReader.asReader(buf); + return new Blob( + reader.readUint8Array(), + reader.readObject(Fr), + reader.readObject(Fr), + reader.readBuffer(), + reader.readBuffer(), + reader.readBuffer(), + ); + } + + /** + * Pad the blob data to it's full size before posting + */ + get dataWithZeros(): BlobBuffer { + return Buffer.concat([this.data], BYTES_PER_BLOB); + } + + /** + * Get the size of the blob in bytes + */ + getSize() { + return this.data.length; + } + // Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile: // * input[:32] - versioned_hash // * input[32:64] - z @@ -145,8 +187,13 @@ export class Blob { const res = []; for (let i = 0; i < numBlobs; i++) { const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB; - res.push(new Blob(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); + res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); } return res; } } + +// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] +function commitmentToFields(commitment: Buffer): [Fr, Fr] { + return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))]; +} diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 4ce23b9946e..3fa142074e0 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -100,6 +100,7 @@ export type EnvVar = | 'P2P_UDP_ANNOUNCE_ADDR' | 'P2P_UDP_LISTEN_ADDR' | 'PEER_ID_PRIVATE_KEY' + | 'PROVER_BLOB_SINK_URL' | 'PROOF_VERIFIER_L1_START_BLOCK' | 'PROOF_VERIFIER_POLL_INTERVAL_MS' | 'PROVER_AGENT_ENABLED' @@ -136,6 +137,7 @@ export type EnvVar = | 'REGISTRY_CONTRACT_ADDRESS' | 'ROLLUP_CONTRACT_ADDRESS' | 'SEQ_ALLOWED_SETUP_FN' + | 'SEQ_BLOB_SINK_URL' | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' | 'SEQ_MAX_TX_PER_BLOCK' | 'SEQ_MIN_TX_PER_BLOCK' diff --git a/yarn-project/foundation/src/serialize/buffer_reader.ts b/yarn-project/foundation/src/serialize/buffer_reader.ts index 7abe3f59336..84b2ea86277 100644 --- a/yarn-project/foundation/src/serialize/buffer_reader.ts +++ b/yarn-project/foundation/src/serialize/buffer_reader.ts @@ -307,6 +307,20 @@ export class BufferReader { return this.readBytes(size); } + /** + * Reads a buffer from the current position of the reader and advances the index. + * The method first reads the size (number) of bytes to be read, and then returns + * a Buffer with that size containing the bytes. Useful for reading variable-length + * binary data encoded as (size, data) format. + * + * @returns A Buffer containing the read bytes. + */ + public readUint8Array(): Uint8Array { + const size = this.readNumber(); + this.#rangeCheck(size); + return this.readBytes(size); + } + /** * Reads and constructs a map object from the current buffer using the provided deserializer. * The method reads the number of entries in the map, followed by iterating through each key-value pair. diff --git a/yarn-project/foundation/src/serialize/serialize.ts b/yarn-project/foundation/src/serialize/serialize.ts index 6698a7081e2..fc2638ac3e7 100644 --- a/yarn-project/foundation/src/serialize/serialize.ts +++ b/yarn-project/foundation/src/serialize/serialize.ts @@ -109,6 +109,7 @@ export function deserializeField(buf: Buffer, offset = 0) { export type Bufferable = | boolean | Buffer + | Uint8Array | number | bigint | string diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts index dfc5df7f105..cb3abd077e3 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts @@ -4,7 +4,7 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { AztecKVTxPool } from './aztec_kv_tx_pool.js'; import { describeTxPool } from './tx_pool_test_suite.js'; -describe('In-Memory TX pool', () => { +describe('KV TX pool', () => { let txPool: AztecKVTxPool; beforeEach(() => { txPool = new AztecKVTxPool(openTmpStore(), new NoopTelemetryClient()); diff --git a/yarn-project/package.json b/yarn-project/package.json index 0a2e6fbc9eb..68b32f53c60 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -26,6 +26,7 @@ "aztec-node", "validator-client", "bb-prover", + "blob-sink", "bot", "builder", "pxe", diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index 6721e3080b5..fb874975daa 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -64,6 +64,7 @@ "@types/node": "^18.7.23", "concurrently": "^7.6.0", "eslint": "^8.37.0", + "express": "^4.21.1", "jest": "^29.5.0", "jest-mock-extended": "^3.0.3", "levelup": "^5.1.1", diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index 367f2aa6677..d77efa57ca2 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -24,6 +24,11 @@ export type PublisherConfig = L1TxUtilsConfig & { * The interval to wait between publish retries. */ l1PublishRetryIntervalMS: number; + + /** + * The URL of the blob sink. + */ + blobSinkUrl?: string; }; export const getTxSenderConfigMappings: ( @@ -72,6 +77,11 @@ export const getPublisherConfigMappings: ( description: 'The interval to wait between publish retries.', }, ...l1TxUtilsConfigMappings, + blobSinkUrl: { + env: `${scope}_BLOB_SINK_URL`, + description: 'The URL of the blob sink.', + parseEnv: (val?: string) => val, + }, }); export function getPublisherConfigFromEnv(scope: 'PROVER' | 'SEQ'): PublisherConfig { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 64ac88119d5..689c03c71d1 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -13,6 +13,9 @@ import { sleep } from '@aztec/foundation/sleep'; import { RollupAbi } from '@aztec/l1-artifacts'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { jest } from '@jest/globals'; +import express, { json } from 'express'; +import { type Server } from 'http'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type GetTransactionReceiptReturnType, @@ -68,6 +71,9 @@ class MockRollupContract { } } +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + describe('L1Publisher', () => { let rollupContractRead: MockProxy; let rollupContractWrite: MockProxy; @@ -85,11 +91,16 @@ describe('L1Publisher', () => { let blockHash: Buffer; let body: Buffer; + let mockBlobSinkServer: Server | undefined = undefined; + + // An l1 publisher with some private methods exposed let publisher: L1Publisher; const GAS_GUESS = 300_000n; beforeEach(() => { + mockBlobSinkServer = undefined; + l2Block = L2Block.random(42); header = l2Block.header.toBuffer(); @@ -112,6 +123,7 @@ describe('L1Publisher', () => { publicClient = mock(); l1TxUtils = mock(); const config = { + blobSinkUrl: BLOB_SINK_URL, l1RpcUrl: `http://127.0.0.1:8545`, l1ChainId: 1, publisherPrivateKey: `0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80`, @@ -138,19 +150,66 @@ describe('L1Publisher', () => { (l1TxUtils as any).estimateGas.mockResolvedValue(GAS_GUESS); }); + const closeServer = (server: Server): Promise => { + return new Promise((resolve, reject) => { + server.close(err => { + if (err) { + reject(err); + return; + } + resolve(); + }); + }); + }; + + afterEach(async () => { + if (mockBlobSinkServer) { + await closeServer(mockBlobSinkServer); + mockBlobSinkServer = undefined; + } + }); + + // Run a mock blob sink in the background, and test that the correct data is sent to it + const runBlobSinkServer = (blobs: Blob[]) => { + const app = express(); + app.use(json({ limit: '10mb' })); + + app.post('/blob_sidecar', (req, res) => { + const blobsBuffers = req.body.blobs.map((b: { index: number; blob: { type: string; data: string } }) => + Blob.fromBuffer(Buffer.from(b.blob.data)), + ); + + expect(blobsBuffers).toEqual(blobs); + res.status(200).send(); + }); + + return new Promise(resolve => { + mockBlobSinkServer = app.listen(BLOB_SINK_PORT, () => { + // Resolve when the server is listening + resolve(); + }); + }); + }; + it('publishes and propose l2 block to l1', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - const result = await publisher.proposeL2Block(l2Block); + const kzg = Blob.getViemKzgInstance(); - expect(result).toEqual(true); + const expectedBlobs = Blob.getBlobs(l2Block.body.toBlobFields()); - const kzg = Blob.getViemKzgInstance(); + // Check the blobs were forwarded to the blob sink service + const sendToBlobSinkSpy = jest.spyOn(publisher as any, 'sendBlobsToBlobSink'); - const blobs = Blob.getBlobs(l2Block.body.toBlobFields()); + // Expect the blob sink server to receive the blobs + await runBlobSinkServer(expectedBlobs); - const blobInput = Blob.getEthBlobEvaluationInputs(blobs); + const result = await publisher.proposeL2Block(l2Block); + + expect(result).toEqual(true); + + const blobInput = Blob.getEthBlobEvaluationInputs(expectedBlobs); const args = [ { @@ -173,8 +232,14 @@ describe('L1Publisher', () => { data: encodeFunctionData({ abi: rollupContract.abi, functionName: 'propose', args }), }, { fixedGas: GAS_GUESS + L1Publisher.PROPOSE_GAS_GUESS }, - { blobs: blobs.map(b => b.data), kzg, maxFeePerBlobGas: 10000000000n }, + { blobs: expectedBlobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n }, ); + + expect(sendToBlobSinkSpy).toHaveBeenCalledTimes(1); + // If this does not return true, then the mocked server will have errored, and + // the expects that run there will have failed + const returnValuePromise = sendToBlobSinkSpy.mock.results[0].value; + expect(await returnValuePromise).toBe(true); }); it('does not retry if sending a propose tx fails', async () => { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index c27d4710031..1d82c69b33d 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -95,6 +95,8 @@ export type MinimalTransactionReceipt = { logs: any[]; /** Block number in which this tx was mined. */ blockNumber: bigint; + /** The block hash in which this tx was mined */ + blockHash: `0x${string}`; }; /** Arguments to the process method of the rollup contract */ @@ -175,6 +177,8 @@ export class L1Publisher { protected account: PrivateKeyAccount; protected ethereumSlotDuration: bigint; + private blobSinkUrl: string | undefined; + // @note - with blobs, the below estimate seems too large. // Total used for full block from int_l1_pub e2e test: 1m (of which 86k is 1x blob) // Total used for emptier block from above test: 429k (of which 84k is 1x blob) @@ -189,6 +193,7 @@ export class L1Publisher { ) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; this.ethereumSlotDuration = BigInt(config.ethereumSlotDuration); + this.blobSinkUrl = config.blobSinkUrl; this.metrics = new L1PublisherMetrics(client, 'L1Publisher'); const { l1RpcUrl: rpcUrl, l1ChainId: chainId, publisherPrivateKey, l1Contracts } = config; @@ -594,15 +599,18 @@ export class L1Publisher { const consensusPayload = new ConsensusPayload(block.header, block.archive.root, txHashes ?? []); const digest = getHashedSignaturePayload(consensusPayload, SignatureDomainSeperator.blockAttestation); + + const blobs = Blob.getBlobs(block.body.toBlobFields()); const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), blockHash: block.header.hash().toBuffer(), body: block.body.toBuffer(), - blobs: Blob.getBlobs(block.body.toBlobFields()), + blobs, attestations, txHashes: txHashes ?? [], }; + // Publish body and propose block (if not already published) if (this.interrupted) { this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); @@ -647,6 +655,12 @@ export class L1Publisher { }; this.log.verbose(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); this.metrics.recordProcessBlockTx(timer.ms(), stats); + + // Send the blobs to the blob sink + this.sendBlobsToBlobSink(receipt.blockHash, blobs).catch(_err => { + this.log.error('Failed to send blobs to blob sink'); + }); + return true; } @@ -661,7 +675,7 @@ export class L1Publisher { address: this.rollupContract.address, }, { - blobs: proposeTxArgs.blobs.map(b => b.data), + blobs: proposeTxArgs.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, }, @@ -966,7 +980,7 @@ export class L1Publisher { }, {}, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1056,7 +1070,7 @@ export class L1Publisher { fixedGas: gas, }, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1095,7 +1109,7 @@ export class L1Publisher { }, { fixedGas: gas }, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1137,6 +1151,7 @@ export class L1Publisher { gasPrice: receipt.effectiveGasPrice, logs: receipt.logs, blockNumber: receipt.blockNumber, + blockHash: receipt.blockHash, }; } @@ -1152,9 +1167,51 @@ export class L1Publisher { protected async sleepOrInterrupted() { await this.interruptibleSleep.sleep(this.sleepTimeMs); } + + /** + * Send blobs to the blob sink + * + * If a blob sink url is configured, then we send blobs to the blob sink + * - for now we use the blockHash as the identifier for the blobs; + * In the future this will move to be the beacon block id - which takes a bit more work + * to calculate and will need to be mocked in e2e tests + */ + protected async sendBlobsToBlobSink(blockHash: string, blobs: Blob[]): Promise { + // TODO(md): for now we are assuming the indexes of the blobs will be 0, 1, 2 + // When in reality they will not, but for testing purposes this is fine + if (!this.blobSinkUrl) { + this.log.verbose('No blob sink url configured'); + return false; + } + + this.log.verbose(`Sending ${blobs.length} blobs to blob sink`); + try { + const res = await fetch(`${this.blobSinkUrl}/blob_sidecar`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + // eslint-disable-next-line camelcase + block_id: blockHash, + blobs: blobs.map((b, i) => ({ blob: b.toBuffer(), index: i })), + }), + }); + + if (res.ok) { + return true; + } + + this.log.error('Failed to send blobs to blob sink', res.status); + return false; + } catch (err) { + this.log.error(`Error sending blobs to blob sink`, err); + return false; + } + } } -/** +/* * Returns cost of calldata usage in Ethereum. * @param data - Calldata. * @returns 4 for each zero byte, 16 for each nonzero. diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 5e2d33befe8..d68b39399c6 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -6,6 +6,9 @@ * @see {@link https://opentelemetry.io/docs/specs/semconv/general/metrics/ | OpenTelemetry Metrics} for naming conventions. */ +export const BLOB_SINK_OBJECTS_IN_BLOB_STORE = 'aztec.blob_sink.objects_in_blob_store'; +export const BLOB_SINK_BLOB_SIZE = 'aztec.blob_sink.blob_size'; + /** How long it takes to simulate a circuit */ export const CIRCUIT_SIMULATION_DURATION = 'aztec.circuit.simulation.duration'; export const CIRCUIT_SIMULATION_INPUT_SIZE = 'aztec.circuit.simulation.input_size'; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index c4806a57b34..e1fa34cd008 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -334,6 +334,32 @@ __metadata: languageName: node linkType: soft +"@aztec/blob-sink@workspace:^, @aztec/blob-sink@workspace:blob-sink": + version: 0.0.0-use.local + resolution: "@aztec/blob-sink@workspace:blob-sink" + dependencies: + "@aztec/circuit-types": "workspace:^" + "@aztec/foundation": "workspace:^" + "@aztec/kv-store": "workspace:*" + "@aztec/telemetry-client": "workspace:*" + "@jest/globals": "npm:^29.5.0" + "@types/jest": "npm:^29.5.0" + "@types/memdown": "npm:^3.0.0" + "@types/node": "npm:^18.7.23" + "@types/source-map-support": "npm:^0.5.10" + "@types/supertest": "npm:^6.0.2" + express: "npm:^4.21.1" + jest: "npm:^29.5.0" + jest-mock-extended: "npm:^3.0.3" + source-map-support: "npm:^0.5.21" + supertest: "npm:^7.0.0" + ts-node: "npm:^10.9.1" + tslib: "npm:^2.4.0" + typescript: "npm:^5.0.4" + zod: "npm:^3.23.8" + languageName: unknown + linkType: soft + "@aztec/bot@workspace:^, @aztec/bot@workspace:bot": version: 0.0.0-use.local resolution: "@aztec/bot@workspace:bot" @@ -531,6 +557,7 @@ __metadata: "@aztec/aztec-node": "workspace:^" "@aztec/aztec.js": "workspace:^" "@aztec/bb-prover": "workspace:^" + "@aztec/blob-sink": "workspace:^" "@aztec/bot": "workspace:^" "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" @@ -795,7 +822,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": +"@aztec/kv-store@workspace:*, @aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": version: 0.0.0-use.local resolution: "@aztec/kv-store@workspace:kv-store" dependencies: @@ -1188,6 +1215,7 @@ __metadata: "@types/node": "npm:^18.7.23" concurrently: "npm:^7.6.0" eslint: "npm:^8.37.0" + express: "npm:^4.21.1" jest: "npm:^29.5.0" jest-mock-extended: "npm:^3.0.3" levelup: "npm:^5.1.1" @@ -1240,7 +1268,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": +"@aztec/telemetry-client@workspace:*, @aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": version: 0.0.0-use.local resolution: "@aztec/telemetry-client@workspace:telemetry-client" dependencies: @@ -5713,6 +5741,18 @@ __metadata: languageName: node linkType: hard +"@types/superagent@npm:^8.1.0": + version: 8.1.9 + resolution: "@types/superagent@npm:8.1.9" + dependencies: + "@types/cookiejar": "npm:^2.1.5" + "@types/methods": "npm:^1.1.4" + "@types/node": "npm:*" + form-data: "npm:^4.0.0" + checksum: 10/6d9687b0bc3d693b900ef76000b02437a70879c3219b28606879c086d786bb1e48429813e72e32dd0aafc94c053a78a2aa8be67c45bc8e6b968ca62d6d5cc554 + languageName: node + linkType: hard + "@types/supertest@npm:^2.0.12": version: 2.0.16 resolution: "@types/supertest@npm:2.0.16" @@ -5722,6 +5762,16 @@ __metadata: languageName: node linkType: hard +"@types/supertest@npm:^6.0.2": + version: 6.0.2 + resolution: "@types/supertest@npm:6.0.2" + dependencies: + "@types/methods": "npm:^1.1.4" + "@types/superagent": "npm:^8.1.0" + checksum: 10/4b67fb2d1bfbb7ff0a7dfaaf190cdf2e0014522615fb2dc53c214bdac95b4ee42696dd1df13332c90a7765cc52934c9cc0c428bf0f9e8189167aef01042e7448 + languageName: node + linkType: hard + "@types/wrap-ansi@npm:^3.0.0": version: 3.0.0 resolution: "@types/wrap-ansi@npm:3.0.0" @@ -10699,7 +10749,7 @@ __metadata: languageName: node linkType: hard -"express@npm:^4.19.2": +"express@npm:^4.19.2, express@npm:^4.21.1": version: 4.21.1 resolution: "express@npm:4.21.1" dependencies: @@ -11072,6 +11122,17 @@ __metadata: languageName: node linkType: hard +"formidable@npm:^3.5.1": + version: 3.5.2 + resolution: "formidable@npm:3.5.2" + dependencies: + dezalgo: "npm:^1.0.4" + hexoid: "npm:^2.0.0" + once: "npm:^1.4.0" + checksum: 10/b9d87af44be8ba82f8f4955c240e65c559aedb84fecce6b294d97b256db66e6a20d50e799776fdf29ee46cb83857231d12c416c735696b18d3895b85620704f4 + languageName: node + linkType: hard + "forwarded@npm:0.2.0": version: 0.2.0 resolution: "forwarded@npm:0.2.0" @@ -11670,6 +11731,13 @@ __metadata: languageName: node linkType: hard +"hexoid@npm:^2.0.0": + version: 2.0.0 + resolution: "hexoid@npm:2.0.0" + checksum: 10/73d8e135bdd9326d0fa9ea05356741d48a3e67fbd3b2ce14c4f7b523a1cdabe70fa42f2c53447244886a0aecdf7873d4124abc30093a72d15188805f7a7ee406 + languageName: node + linkType: hard + "hmac-drbg@npm:^1.0.1": version: 1.0.1 resolution: "hmac-drbg@npm:1.0.1" @@ -18300,6 +18368,23 @@ __metadata: languageName: node linkType: hard +"superagent@npm:^9.0.1": + version: 9.0.2 + resolution: "superagent@npm:9.0.2" + dependencies: + component-emitter: "npm:^1.3.0" + cookiejar: "npm:^2.1.4" + debug: "npm:^4.3.4" + fast-safe-stringify: "npm:^2.1.1" + form-data: "npm:^4.0.0" + formidable: "npm:^3.5.1" + methods: "npm:^1.1.2" + mime: "npm:2.6.0" + qs: "npm:^6.11.0" + checksum: 10/d3c0c9051ceec84d5b431eaa410ad81bcd53255cea57af1fc66d683a24c34f3ba4761b411072a9bf489a70e3d5b586a78a0e6f2eac6a561067e7d196ddab0907 + languageName: node + linkType: hard + "supertest@npm:^6.3.3": version: 6.3.4 resolution: "supertest@npm:6.3.4" @@ -18310,6 +18395,16 @@ __metadata: languageName: node linkType: hard +"supertest@npm:^7.0.0": + version: 7.0.0 + resolution: "supertest@npm:7.0.0" + dependencies: + methods: "npm:^1.1.2" + superagent: "npm:^9.0.1" + checksum: 10/73bf2a37e13856a1b3e6a37b9df5cec8e506aa0360a5f5ecd989d1f4b0edf168883e306012e81e371d5252c17d4c7bef4ba30633dbf3877cbf52fc7af51cca9b + languageName: node + linkType: hard + "supports-color@npm:^2.0.0": version: 2.0.0 resolution: "supports-color@npm:2.0.0" From 351d7c258479573cd835f14fcdb384ccd2eea1a2 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 4 Jan 2025 02:22:42 +0000 Subject: [PATCH 17/20] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "2354a10f3f" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "2354a10f3f" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 6a44a1aa0c7..287677752e5 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = e50b5c03a005fc294414b3b9c103bb17e42598fa - parent = 9eaa10983b26616876099896accb0e3093ae8d20 + commit = 2354a10f3f13cbe651d9cc4a5f09ce29ee67b07a + parent = 94b6c8641d4dd5b2788bc91c735e82a48504400a method = merge cmdver = 0.4.6 From 694343df2cee4285a004399962c4219e0d739a82 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 4 Jan 2025 02:23:20 +0000 Subject: [PATCH 18/20] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..f5b64704d0d 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.69.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From 501eab71b98388e7a01c6d3e1467d4fc2bb71ad6 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 4 Jan 2025 02:23:20 +0000 Subject: [PATCH 19/20] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 28f80a81d36..412fa231ad3 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ remote = https://github.com/AztecProtocol/aztec-nr commit = e852135a829883f177a69ccbd1df29fd445fbfc1 method = merge cmdver = 0.4.6 - parent = 33475e46f8265f8ff7e6d8980a46b240c1af8656 + parent = 9c747da2de1c613c7c5ad7853a15f218aff0afda From 3b983a45c023020e3a384a52053c9991ebe40595 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 4 Jan 2025 02:23:26 +0000 Subject: [PATCH 20/20] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "1a0b33e347" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "1a0b33e347" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 412fa231ad3..e63b1697f86 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = e852135a829883f177a69ccbd1df29fd445fbfc1 + commit = 1a0b33e3472ffa6bad9729adb16862fda90a80b1 method = merge cmdver = 0.4.6 - parent = 9c747da2de1c613c7c5ad7853a15f218aff0afda + parent = 715d4f1471524c9ccdee8038d629e1e1f6819172 diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index f5b64704d0d..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.69.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" }