Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add peerdas metrics by Katya #44

Open
wants to merge 6 commits into
base: das
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions eip_7594/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ thiserror = { workspace = true }
try_from_iterator = { workspace = true }
typenum = { workspace = true }
types = { workspace = true }
prometheus_metrics = { workspace = true }

[dev-dependencies]
duplicate = { workspace = true }
Expand Down
31 changes: 29 additions & 2 deletions eip_7594/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@ use helper_functions::predicates::is_valid_merkle_branch;
use itertools::Itertools;
use kzg as _;
use num_traits::One as _;
use prometheus_metrics::Metrics;
use sha2::{Digest as _, Sha256};
use ssz::{ByteVector, ContiguousList, ContiguousVector, SszHash, Uint256};
use std::sync::Arc;
use thiserror::Error;
use try_from_iterator::TryFromIterator as _;
use typenum::Unsigned;
Expand Down Expand Up @@ -56,7 +58,14 @@ pub enum ExtendedSampleError {
AllowedFailtureOutOfRange { allowed_failures: u64 },
}

pub fn verify_kzg_proofs<P: Preset>(data_column_sidecar: &DataColumnSidecar<P>) -> Result<bool> {
pub fn verify_kzg_proofs<P: Preset>(
data_column_sidecar: &DataColumnSidecar<P>,
metrics: &Option<Arc<Metrics>>,
) -> Result<bool> {
if let Some(metrics) = metrics.as_ref() {
let _timer = metrics.data_column_sidecar_verification_times.start_timer();
}

let DataColumnSidecar {
index,
column,
Expand Down Expand Up @@ -118,7 +127,12 @@ pub fn verify_kzg_proofs<P: Preset>(data_column_sidecar: &DataColumnSidecar<P>)

pub fn verify_sidecar_inclusion_proof<P: Preset>(
data_column_sidecar: &DataColumnSidecar<P>,
metrics: &Option<Arc<Metrics>>
) -> bool {
if let Some(metrics) = metrics.as_ref() {
let _timer = metrics.data_column_sidecar_inclusion_proof_verification.start_timer();
}

let DataColumnSidecar {
kzg_commitments,
signed_block_header,
Expand Down Expand Up @@ -198,7 +212,14 @@ fn get_data_columns_for_subnet(subnet_id: SubnetId) -> impl Iterator<Item = Colu
*
* This helper demonstrates the relationship between blobs and the matrix of cells/proofs.
*/
pub fn compute_matrix(blobs: Vec<CKzgBlob>) -> Result<Vec<MatrixEntry>> {
pub fn compute_matrix(
blobs: Vec<CKzgBlob>,
metrics: &Option<Arc<Metrics>>,
) -> Result<Vec<MatrixEntry>> {
if let Some(metrics) = metrics.as_ref() {
let _timer = metrics.data_column_sidecar_computation.start_timer();
}

let kzg_settings = settings();

let mut matrix = vec![];
Expand All @@ -222,10 +243,16 @@ pub fn compute_matrix(blobs: Vec<CKzgBlob>) -> Result<Vec<MatrixEntry>> {
*
* This helper demonstrates how to apply ``recover_cells_and_kzg_proofs``.
*/
// TODO: implement reconstructed_columns metric
pub fn recover_matrix(
partial_matrix: Vec<MatrixEntry>,
blob_count: usize,
metrics: &Option<Arc<Metrics>>
) -> Result<Vec<MatrixEntry>> {
if let Some(metrics) = metrics.as_ref() {
let _timer = metrics.columns_reconstruction_time.start_timer();
}

let kzg_settings = settings();

let mut matrix = vec![];
Expand Down
5 changes: 1 addition & 4 deletions fork_choice_control/src/mutator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1153,10 +1153,7 @@ where
if let Some(gossip_id) = origin.gossip_id() {
P2pMessage::Accept(gossip_id).send(&self.p2p_tx);
}

if let Some(metrics) = self.metrics.as_ref() {
metrics.verified_gossip_data_column_sidecar.inc();
}

self.accept_data_column_sidecar(&wait_group, data_column_sidecar);
}
Ok(DataColumnSidecarAction::Ignore) => {
Expand Down
27 changes: 20 additions & 7 deletions fork_choice_store/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1799,7 +1799,11 @@ impl<P: Preset> Store<P> {
mut verifier: impl Verifier + Send,
metrics: &Option<Arc<Metrics>>,
) -> Result<DataColumnSidecarAction<P>> {
let _timer = metrics
if let Some(metrics) = metrics.as_ref() {
metrics.data_column_sidecars_submitted_for_processing.inc();
}

let _data_column_sidecar_verification_timer = metrics
.as_ref()
.map(|metrics| metrics.data_column_sidecar_verification_times.start_timer());

Expand Down Expand Up @@ -1860,19 +1864,24 @@ impl<P: Preset> Store<P> {

// [REJECT] The sidecar's kzg_commitments field inclusion proof is valid as verified by verify_data_column_sidecar_inclusion_proof(sidecar).
ensure!(
verify_sidecar_inclusion_proof(&data_column_sidecar),
verify_sidecar_inclusion_proof(&data_column_sidecar, metrics),
Error::DataColumnSidecarInvalidInclusionProof {
data_column_sidecar
}
);

// [REJECT] The sidecar's column data is valid as verified by verify_data_column_sidecar_kzg_proofs(sidecar).
verify_kzg_proofs(&data_column_sidecar).map_err(|error| {
Error::DataColumnSidecarInvalid {
data_column_sidecar: data_column_sidecar.clone_arc(),
error,
verify_kzg_proofs(
&data_column_sidecar,
metrics
)
.map_err(|error| {
Error::DataColumnSidecarInvalid {
data_column_sidecar: data_column_sidecar.clone_arc(),
error,
}
}
})?;
)?;

// [REJECT] The sidecar's block's parent (defined by block_header.parent_root) passes validation.
// Part 1/2:
Expand Down Expand Up @@ -1963,6 +1972,10 @@ impl<P: Preset> Store<P> {
}
);

if let Some(metrics) = metrics.as_ref() {
metrics.verified_gossip_data_column_sidecar.inc();
}

Ok(DataColumnSidecarAction::Accept(data_column_sidecar))
}

Expand Down
84 changes: 77 additions & 7 deletions prometheus_metrics/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,19 @@ pub struct Metrics {
pub received_sync_contribution_subsets: IntCounter,
pub received_aggregated_attestation_subsets: IntCounter,

// Custody Subnets / Data Column Verification times
// Custody Subnets / PeerDAS
column_subnet_peers: IntGaugeVec,
pub data_column_sidecars_submitted_for_processing: IntCounter,
pub verified_gossip_data_column_sidecar: IntCounter,
pub data_column_sidecar_verification_times: Histogram,

pub reconstructed_columns: IntCounter, // TODO
pub columns_reconstruction_time: Histogram,
pub data_column_sidecar_computation: Histogram,
pub data_column_sidecar_inclusion_proof_verification: Histogram,
pub data_column_sidecar_kzg_verification_single: Histogram, // TODO?
pub data_column_sidecar_kzg_verification_batch: Histogram,
pub beacon_custody_columns_count_total: IntCounter, // TODO

// Extra Network stats
gossip_block_slot_start_delay_time: Histogram,

Expand Down Expand Up @@ -266,21 +274,62 @@ impl Metrics {
"Number of received aggregated attestations that are subsets of already known aggregates"
)?,

// Custody Subnets / PeerDAS
column_subnet_peers: IntGaugeVec::new(
opts!("PEERS_PER_COLUMN_SUBNET", "Number of connected peers per column subnet"),
&["subnet_id"],
)?,

data_column_sidecars_submitted_for_processing: IntCounter::new(
"beacon_data_column_sidecar_processing_requests_total",
"Number of data column sidecars submitted for processing"
)?,

verified_gossip_data_column_sidecar: IntCounter::new(
"VERIFIED_GOSSIP_DATA_COLUMN_SIDECAR",
"Number of gossip data column sidecar verified for propagation"
"beacon_data_column_sidecar_processing_successes_total",
"Number of data column sidecars verified for gossip"
)?,

data_column_sidecar_verification_times: Histogram::with_opts(histogram_opts!(
"DATA_COLUMN_SIDECAR_VERIFICATION_TIMES",
"Time takes to verify a data column sidecar"
"beacon_data_column_sidecar_gossip_verification_seconds",
"Full runtime of data column sidecars gossip verification"
))?,

reconstructed_columns: IntCounter::new(
"beacon_data_availability_reconstructed_columns_total",
"Total count of reconstructed columns"
)?,

columns_reconstruction_time: Histogram::with_opts(histogram_opts!(
"beacon_data_availability_reconstruction_time_seconds",
"Time taken to reconstruct columns"
))?,

data_column_sidecar_computation: Histogram::with_opts(histogram_opts!(
"beacon_data_column_sidecar_computation_seconds",
"Time taken to compute data column sidecar, including cells, proofs and inclusion proof"
))?,

data_column_sidecar_inclusion_proof_verification: Histogram::with_opts(histogram_opts!(
"beacon_data_column_sidecar_inclusion_proof_verification_seconds",
"Time taken to verify data column sidecar inclusion proof"
))?,

data_column_sidecar_kzg_verification_single: Histogram::with_opts(histogram_opts!(
"beacon_kzg_verification_data_column_single_seconds",
"Runtime of single data column kzg verification"
))?,

data_column_sidecar_kzg_verification_batch: Histogram::with_opts(histogram_opts!(
"beacon_kzg_verification_data_column_batch_seconds",
"Runtime of batched data column kzg verification"
))?,

beacon_custody_columns_count_total: IntCounter::new(
"beacon_custody_columns_count_total",
"Total count of columns in custody within the data availability boundary"
)?,

// Extra Network stats
gossip_block_slot_start_delay_time: Histogram::with_opts(histogram_opts!(
"beacon_block_gossip_slot_start_delay_time",
Expand Down Expand Up @@ -766,10 +815,30 @@ impl Metrics {
self.received_aggregated_attestation_subsets.clone(),
))?;
default_registry.register(Box::new(self.column_subnet_peers.clone()))?;
default_registry.register(Box::new(self.data_column_sidecars_submitted_for_processing.clone()))?;
default_registry.register(Box::new(self.verified_gossip_data_column_sidecar.clone()))?;
default_registry.register(Box::new(
self.data_column_sidecar_verification_times.clone(),
))?;
default_registry.register(Box::new(self.reconstructed_columns.clone(),))?;
default_registry.register(Box::new(
self.columns_reconstruction_time.clone(),
))?;
default_registry.register(Box::new(
self.data_column_sidecar_computation.clone(),
))?;
default_registry.register(Box::new(
self.data_column_sidecar_inclusion_proof_verification.clone(),
))?;
default_registry.register(Box::new(
self.data_column_sidecar_kzg_verification_single.clone(),
))?;
default_registry.register(Box::new(
self.data_column_sidecar_kzg_verification_batch.clone(),
))?;
default_registry.register(Box::new(
self.beacon_custody_columns_count_total.clone(),
))?;
default_registry.register(Box::new(self.gossip_block_slot_start_delay_time.clone()))?;
default_registry.register(Box::new(self.mutator_attestations.clone()))?;
default_registry.register(Box::new(self.mutator_aggregate_and_proofs.clone()))?;
Expand Down Expand Up @@ -997,6 +1066,7 @@ impl Metrics {
}
}

// Custody Subnets / PeerDAS
pub fn set_column_subnet_peers(&self, subnet_id: &str, num_peers: usize) {
match self
.column_subnet_peers
Expand All @@ -1011,7 +1081,7 @@ impl Metrics {
}
}
}

// Extra Network stats
pub fn observe_block_duration_to_slot(&self, block_slot_timestamp: UnixSeconds) {
match helpers::duration_from_now_to(block_slot_timestamp) {
Expand Down