Skip to content

Commit

Permalink
Merge pull request #3749 from ethereum/dev
Browse files Browse the repository at this point in the history
Release `v1.5.0-alpha.2`
  • Loading branch information
hwwhww authored May 7, 2024
2 parents c254443 + e96c070 commit 2c1f677
Show file tree
Hide file tree
Showing 20 changed files with 289 additions and 391 deletions.
80 changes: 37 additions & 43 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,41 +24,42 @@ on:
- cron: '0 0 * * *'

jobs:
preclear:
runs-on: self-hosted
if: always()
steps:
- name: 'Cleanup build folder'
run: |
ls -la ./
rm -rf ./* || true
rm -rf ./.??* || true
ls -la ./
table_of_contents:
runs-on: self-hosted
needs: preclear
runs-on: [self-hosted-ghr-custom, size-s-x64, profile-consensusSpecs]
steps:
- name: Checkout this repo
uses: actions/[email protected]
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: ''
- name: Check table of contents
run: sudo npm install -g [email protected] && make check_toc
run: npm install -g [email protected] && make check_toc

codespell:
runs-on: self-hosted
needs: preclear
runs-on: [self-hosted-ghr-custom, size-s-x64, profile-consensusSpecs]
steps:
- name: Checkout this repo
uses: actions/[email protected]
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
cache: ''
- name: Check codespell
run: pip install 'codespell<3.0.0,>=2.0.0' --user && make codespell
run: make codespell

lint:
runs-on: self-hosted
needs: preclear
runs-on: [self-hosted-ghr-custom, size-l-x64, profile-consensusSpecs]
steps:
- name: Checkout this repo
uses: actions/[email protected]
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
cache: ''
- name: Install pyspec requirements
run: make install_test
- name: Run linter for pyspec
Expand All @@ -67,14 +68,19 @@ jobs:
run: make lint_generators

pyspec-tests:
runs-on: self-hosted
needs: [preclear,lint,codespell,table_of_contents]
runs-on: [self-hosted-ghr-custom, size-xl-x64, profile-consensusSpecs]
needs: [lint,codespell,table_of_contents]
strategy:
matrix:
version: ["phase0", "altair", "bellatrix", "capella", "deneb", "electra", "whisk", "eip7594"]
steps:
- name: Checkout this repo
uses: actions/[email protected]
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
cache: ''
- name: set TEST_PRESET_TYPE
if: github.event.inputs.test_preset_type != ''
run: |
Expand All @@ -95,20 +101,8 @@ jobs:
run: make install_test
- name: test-${{ matrix.version }}
run: make citest fork=${{ matrix.version }} TEST_PRESET_TYPE=${{env.spec_test_preset_type}}
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
if: always()
with:
name: test-${{ matrix.version }}
name: test-reports-${{ matrix.version }}
path: tests/core/pyspec/test-reports

cleanup:
runs-on: self-hosted
needs: [preclear,pyspec-tests,codespell,lint,table_of_contents]
if: always()
steps:
- name: 'Cleanup build folder'
run: |
ls -la ./
rm -rf ./* || true
rm -rf ./.??* || true
ls -la ./
8 changes: 4 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -107,13 +107,13 @@ pyspec:

# check the setup tool requirements
preinstallation:
python3 -m venv venv; . venv/bin/activate; \
python3 -m venv venv && \
. venv/bin/activate && \
python3 -m pip install -r requirements_preinstallation.txt

# installs the packages to run pyspec tests
install_test: preinstallation
python3 -m venv venv; . venv/bin/activate; \
python3 -m pip install -e .[lint]; python3 -m pip install -e .[test]
. venv/bin/activate && \
python3 -m pip install -e .[lint,test]

# Testing against `minimal` or `mainnet` config by default
test: pyspec
Expand Down
19 changes: 10 additions & 9 deletions specs/_features/eip7594/das-core.md
Original file line number Diff line number Diff line change
Expand Up @@ -105,19 +105,20 @@ class DataColumnSidecar(Container):
def get_custody_columns(node_id: NodeID, custody_subnet_count: uint64) -> Sequence[ColumnIndex]:
assert custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT

subnet_ids = []
i = 0
subnet_ids: List[uint64] = []
current_id = uint256(node_id)
while len(subnet_ids) < custody_subnet_count:
if node_id == UINT256_MAX:
node_id = 0

subnet_id = (
bytes_to_uint64(hash(uint_to_bytes(uint256(node_id + i)))[0:8])
bytes_to_uint64(hash(uint_to_bytes(uint256(current_id)))[0:8])
% DATA_COLUMN_SIDECAR_SUBNET_COUNT
)
if subnet_id not in subnet_ids:
subnet_ids.append(subnet_id)
i += 1
if current_id == UINT256_MAX:
# Overflow prevention
current_id = NodeID(0)
current_id += 1

assert len(subnet_ids) == len(set(subnet_ids))

columns_per_subnet = NUMBER_OF_COLUMNS // DATA_COLUMN_SIDECAR_SUBNET_COUNT
Expand Down Expand Up @@ -154,10 +155,10 @@ def recover_matrix(cells_dict: Dict[Tuple[BlobIndex, CellID], Cell], blob_count:
This helper demonstrates how to apply ``recover_all_cells``.
The data structure for storing cells is implementation-dependent.
"""
extended_matrix = []
extended_matrix: List[Cell] = []
for blob_index in range(blob_count):
cell_ids = [cell_id for b_index, cell_id in cells_dict.keys() if b_index == blob_index]
cells = [cells_dict[(blob_index, cell_id)] for cell_id in cell_ids]
cells = [cells_dict[(BlobIndex(blob_index), cell_id)] for cell_id in cell_ids]

all_cells_for_row = recover_all_cells(cell_ids, cells)
extended_matrix.extend(all_cells_for_row)
Expand Down
92 changes: 87 additions & 5 deletions specs/_features/eip7594/p2p-interface.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
- [The Req/Resp domain](#the-reqresp-domain)
- [Messages](#messages)
- [DataColumnSidecarsByRoot v1](#datacolumnsidecarsbyroot-v1)
- [DataColumnSidecarsByRange v1](#datacolumnsidecarsbyrange-v1)
- [The discovery domain: discv5](#the-discovery-domain-discv5)
- [ENR structure](#enr-structure)
- [`custody_subnet_count`](#custody_subnet_count)
Expand Down Expand Up @@ -71,15 +72,17 @@ def verify_data_column_sidecar_kzg_proofs(sidecar: DataColumnSidecar) -> bool:
"""
assert sidecar.index < NUMBER_OF_COLUMNS
assert len(sidecar.column) == len(sidecar.kzg_commitments) == len(sidecar.kzg_proofs)
row_ids = [RowIndex(i) for i in range(len(sidecar.column))]

row_indices = [RowIndex(i) for i in range(len(sidecar.column))]
column_indices = [sidecar.index] * len(sidecar.column)

# KZG batch verifies that the cells match the corresponding commitments and proofs
return verify_cell_kzg_proof_batch(
row_commitments=sidecar.kzg_commitments,
row_indices=row_ids, # all rows
column_indices=[sidecar.index],
row_commitments_bytes=sidecar.kzg_commitments,
row_indices=row_indices, # all rows
column_indices=column_indices, # specific column
cells=sidecar.column,
proofs=sidecar.kzg_proofs,
proofs_bytes=sidecar.kzg_proofs,
)
```

Expand Down Expand Up @@ -198,6 +201,85 @@ Clients SHOULD include a sidecar in the response as soon as it passes the gossip
Clients SHOULD NOT respond with sidecars related to blocks that fail gossip validation rules.
Clients SHOULD NOT respond with sidecars related to blocks that fail the beacon chain state transition

##### DataColumnSidecarsByRange v1

**Protocol ID:** `/eth2/beacon_chain/req/data_column_sidecars_by_range/1/`

The `<context-bytes>` field is calculated as `context = compute_fork_digest(fork_version, genesis_validators_root)`:

[1]: # (eth2spec: skip)

| `fork_version` | Chunk SSZ type |
|--------------------------|-------------------------------|
| `EIP7594_FORK_VERSION` | `eip7594.DataColumnSidecar` |

Request Content:
```
(
start_slot: Slot
count: uint64
columns: List[ColumnIndex]
)
```

Response Content:
```
(
List[DataColumnSidecar, MAX_REQUEST_DATA_COLUMN_SIDECARS]
)
```

Requests data column sidecars in the slot range `[start_slot, start_slot + count)` of the given `columns`, leading up to the current head block as selected by fork choice.

Before consuming the next response chunk, the response reader SHOULD verify the data column sidecar is well-formatted, has valid inclusion proof, and is correct w.r.t. the expected KZG commitments through `verify_data_column_sidecar_kzg_proofs`.

`DataColumnSidecarsByRange` is primarily used to sync data columns that may have been missed on gossip and to sync within the `MIN_EPOCHS_FOR_DATA_COLUMN_SIDECARS_REQUESTS` window.

The request MUST be encoded as an SSZ-container.

The response MUST consist of zero or more `response_chunk`.
Each _successful_ `response_chunk` MUST contain a single `DataColumnSidecar` payload.

Let `data_column_serve_range` be `[max(current_epoch - MIN_EPOCHS_FOR_DATA_COLUMN_SIDECARS_REQUESTS, EIP7594_FORK_EPOCH), current_epoch]`.
Clients MUST keep a record of data column sidecars seen on the epoch range `data_column_serve_range`
where `current_epoch` is defined by the current wall-clock time,
and clients MUST support serving requests of data columns on this range.

Peers that are unable to reply to data column sidecar requests within the
range `data_column_serve_range` SHOULD respond with error code `3: ResourceUnavailable`.
Such peers that are unable to successfully reply to this range of requests MAY get descored
or disconnected at any time.

*Note*: The above requirement implies that nodes that start from a recent weak subjectivity checkpoint
MUST backfill the local data columns database to at least the range `data_column_serve_range`
to be fully compliant with `DataColumnSidecarsByRange` requests.

*Note*: Although clients that bootstrap from a weak subjectivity checkpoint can begin
participating in the networking immediately, other peers MAY
disconnect and/or temporarily ban such an un-synced or semi-synced client.

Clients MUST respond with at least the data column sidecars of the first blob-carrying block that exists in the range, if they have it, and no more than `MAX_REQUEST_DATA_COLUMN_SIDECARS` sidecars.

Clients MUST include all data column sidecars of each block from which they include data column sidecars.

The following data column sidecars, where they exist, MUST be sent in `(slot, column_index)` order.

Slots that do not contain known data columns MUST be skipped, mimicking the behaviour
of the `BlocksByRange` request. Only response chunks with known data columns should
therefore be sent.

Clients MAY limit the number of data column sidecars in the response.

The response MUST contain no more than `count * NUMBER_OF_COLUMNS` data column sidecars.

Clients MUST respond with data columns sidecars from their view of the current fork choice
-- that is, data column sidecars as included by blocks from the single chain defined by the current head.
Of note, blocks from slots before the finalization MUST lead to the finalized block reported in the `Status` handshake.

Clients MUST respond with data column sidecars that are consistent from a single chain within the context of the request.

After the initial data column sidecar, clients MAY stop in the process of responding if their fork choice changes the view of the chain in the context of the request.

### The discovery domain: discv5

#### ENR structure
Expand Down
10 changes: 5 additions & 5 deletions specs/_features/eip7594/polynomial-commitments-sampling.md
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def divide_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff) -> Polynomial
Long polynomial division for two coefficient form polynomials ``a`` and ``b``
"""
a = a.copy() # Make a copy since `a` is passed by reference
o = []
o: List[BLSFieldElement] = []
apos = len(a) - 1
bpos = len(b) - 1
diff = apos - bpos
Expand Down Expand Up @@ -441,7 +441,7 @@ def compute_cells_and_kzg_proofs(blob: Blob) -> Tuple[
proofs = []

for i in range(CELLS_PER_EXT_BLOB):
coset = coset_for_cell(i)
coset = coset_for_cell(CellID(i))
proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset)
cells.append(coset_evals_to_cell(ys))
proofs.append(proof)
Expand Down Expand Up @@ -470,7 +470,7 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]:
for cell_id in range(CELLS_PER_EXT_BLOB):
start = cell_id * FIELD_ELEMENTS_PER_CELL
end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL
cells.append(coset_evals_to_cell(extended_data_rbo[start:end]))
cells.append(coset_evals_to_cell(CosetEvals(extended_data_rbo[start:end])))
return cells
```

Expand Down Expand Up @@ -572,7 +572,7 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[
])

# Extend vanishing polynomial to full domain using the closed form of the vanishing polynomial over a coset
zero_poly_coeff = [0] * FIELD_ELEMENTS_PER_EXT_BLOB
zero_poly_coeff = [BLSFieldElement(0)] * FIELD_ELEMENTS_PER_EXT_BLOB
for i, coeff in enumerate(short_zero_poly):
zero_poly_coeff[i * FIELD_ELEMENTS_PER_CELL] = coeff

Expand Down Expand Up @@ -690,7 +690,7 @@ def recover_all_cells(cell_ids: Sequence[CellID], cells: Sequence[Cell]) -> Sequ
# Convert cells to coset evals
cosets_evals = [cell_to_coset_evals(cell) for cell in cells]

missing_cell_ids = [cell_id for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids]
missing_cell_ids = [CellID(cell_id) for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids]
zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_ids)

eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data(
Expand Down
4 changes: 2 additions & 2 deletions specs/electra/beacon-chain.md
Original file line number Diff line number Diff line change
Expand Up @@ -514,8 +514,8 @@ def is_partially_withdrawable_validator(validator: Validator, balance: Gwei) ->
#### `get_committee_indices`

```python
def get_committee_indices(commitee_bits: Bitvector) -> Sequence[CommitteeIndex]:
return [CommitteeIndex(index) for index, bit in enumerate(commitee_bits) if bit]
def get_committee_indices(committee_bits: Bitvector) -> Sequence[CommitteeIndex]:
return [CommitteeIndex(index) for index, bit in enumerate(committee_bits) if bit]
```

#### `get_validator_max_effective_balance`
Expand Down
4 changes: 2 additions & 2 deletions specs/phase0/p2p-interface.md
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ to subscribing nodes (typically validators) to be included in future blocks.
We define the following variables for convenience:
- `aggregate_and_proof = signed_aggregate_and_proof.message`
- `aggregate = aggregate_and_proof.aggregate`
- `index = aggregate.index`
- `index = aggregate.data.index`
- `aggregation_bits = attestation.aggregation_bits`

The following validations MUST pass before forwarding the `signed_aggregate_and_proof` on the network.
Expand Down Expand Up @@ -436,7 +436,7 @@ The `beacon_attestation_{subnet_id}` topics are used to propagate unaggregated a
to the subnet `subnet_id` (typically beacon and persistent committees) to be aggregated before being gossiped to `beacon_aggregate_and_proof`.

We define the following variables for convenience:
- `index = attestation.index`
- `index = attestation.data.index`
- `aggregation_bits = attestation.aggregation_bits`

The following validations MUST pass before forwarding the `attestation` on the subnet.
Expand Down
2 changes: 1 addition & 1 deletion tests/core/pyspec/eth2spec/VERSION.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.5.0-alpha.1
1.5.0-alpha.2
Loading

0 comments on commit 2c1f677

Please sign in to comment.