Skip to content

Commit

Permalink
Merge branch 'development' of github.com:tari-project/sha-p2pool into…
Browse files Browse the repository at this point in the history
… development
  • Loading branch information
stringhandler committed Oct 21, 2024
2 parents 3c666ac + bb3e9d5 commit dddf35b
Show file tree
Hide file tree
Showing 8 changed files with 160 additions and 33 deletions.
3 changes: 3 additions & 0 deletions src/cli/args.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,9 @@ pub struct StartArgs {

#[arg(long)]
pub peer_publish_interval: Option<u64>,

#[arg(long)]
pub debug_print_chain: bool,
}

#[derive(Clone, Parser, Debug)]
Expand Down
1 change: 1 addition & 0 deletions src/cli/commands/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ pub async fn server(
.unwrap_or_else(|| "tari-p2pool".to_string()),
);
config_builder.with_peer_publish_interval(args.peer_publish_interval);
config_builder.with_debug_print_chain(args.debug_print_chain);
if let Some(stats_server_port) = args.stats_server_port {
config_builder.with_stats_server_port(stats_server_port);
}
Expand Down
5 changes: 5 additions & 0 deletions src/server/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,11 @@ impl ConfigBuilder {
self
}

pub fn with_debug_print_chain(&mut self, config: bool) -> &mut Self {
self.config.p2p_service.debug_print_chain = config;
self
}

pub fn build(&self) -> Config {
self.config.clone()
}
Expand Down
80 changes: 79 additions & 1 deletion src/server/p2p/network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@
use std::{
collections::HashMap,
fmt::Display,
fs,
hash::Hash,
io::Write,
num::NonZeroU32,
ops::ControlFlow,
path::PathBuf,
sync::Arc,
time::{Duration, Instant},
time::{Duration, Instant, SystemTime, UNIX_EPOCH},
};

use convert_case::{Case, Casing};
Expand Down Expand Up @@ -152,6 +154,7 @@ pub struct Config {
pub user_agent: String,
pub grey_list_clear_interval: Duration,
pub is_seed_peer: bool,
pub debug_print_chain: bool,
}

impl Default for Config {
Expand All @@ -169,6 +172,7 @@ impl Default for Config {
user_agent: "tari-p2pool".to_string(),
grey_list_clear_interval: Duration::from_secs(2 * 60),
is_seed_peer: false,
debug_print_chain: false,
}
}
}
Expand Down Expand Up @@ -1233,6 +1237,14 @@ where S: ShareChain
publish_peer_info_interval.set_missed_tick_behavior(MissedTickBehavior::Skip);

let mut grey_list_clear_interval = tokio::time::interval(self.config.grey_list_clear_interval);

let mut debug_chain_graph = if self.config.debug_print_chain {
tokio::time::interval(Duration::from_secs(30))
} else {
// only once a day, but even then will be skipped
tokio::time::interval(Duration::from_secs(60 * 60 * 24))
};
debug_chain_graph.set_missed_tick_behavior(MissedTickBehavior::Skip);
// TODO: Not sure why this is done on a loop instead of just once....
// let mut kademlia_bootstrap_interval = tokio::time::interval(Duration::from_secs(12 * 60 * 60));
// kademlia_bootstrap_interval.set_missed_tick_behavior(MissedTickBehavior::Skip);
Expand Down Expand Up @@ -1324,10 +1336,76 @@ where S: ShareChain
_ = grey_list_clear_interval.tick() => {
self.network_peer_store.clear_grey_list();
},
_ = debug_chain_graph.tick() => {
if self.config.debug_print_chain {
self.print_debug_chain_graph().await;
}
},
}
}
}

async fn print_debug_chain_graph(&self) {
self.print_debug_chain_graph_inner(&self.share_chain_random_x, "randomx")
.await;
self.print_debug_chain_graph_inner(&self.share_chain_sha3x, "sha3x")
.await;
}

async fn print_debug_chain_graph_inner(&self, chain: &S, prefix: &str) {
let time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs();

let mut file = fs::OpenOptions::new()
.create(true)
.write(true)
.append(true)
.open(format!("{}_blocks_{}.txt", prefix, time))
.unwrap();

file.write(b"@startuml\n").unwrap();
file.write(b"digraph B {\n").unwrap();
let blocks = chain.all_blocks().await.expect("errored");
for b in blocks {
file.write(
format!(
"B{} [label=\"{} - {}\"]\n",
&b.hash.to_hex()[0..8],
&b.height,
&b.hash.to_hex()[0..8]
)
.as_bytes(),
)
.unwrap();
file.write(format!("B{} -> B{}\n", &b.hash.to_hex()[0..8], &b.prev_hash.to_hex()[0..8]).as_bytes())
.unwrap();
for u in b.uncles.iter().take(3) {
file.write(
format!(
"B{} -> B{} [style=dotted]\n",
&b.hash.to_hex()[0..8],
&u.1.to_hex()[0..8]
)
.as_bytes(),
)
.unwrap();
}
if b.uncles.len() > 3 {
file.write(
format!(
"B{} -> B{}others [style=dotted, label=\"{} others\"]\n",
&b.hash.to_hex()[0..8],
&b.hash.to_hex()[0..8],
b.uncles.len() - 3
)
.as_bytes(),
)
.unwrap();
}
}

file.write(b"}\n").unwrap();
}

async fn parse_seed_peers(&mut self) -> Result<HashMap<PeerId, Multiaddr>, Error> {
let mut seed_peers_result = HashMap::new();

Expand Down
2 changes: 2 additions & 0 deletions src/sharechain/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ pub enum Error {
BlockParentDoesNotExist { missing_parents: Vec<(u64, FixedHash)> },
#[error("Missing block validation params!")]
MissingBlockValidationParams,
#[error("Block is not an uncle of the main chain. Height: {height}, Hash: {hash}")]
UncleInMainChain { height: u64, hash: FixedHash },
}

#[derive(Error, Debug)]
Expand Down
40 changes: 29 additions & 11 deletions src/sharechain/in_memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ impl InMemoryShareChain {
}

// Check if already added.
if let Some(level) = p2_chain.get_at_height(new_block_p2pool_height) {
if let Some(level) = p2_chain.level_at_height(new_block_p2pool_height) {
if level.blocks.contains_key(&block.hash) {
info!(target: LOG_TARGET, "[{:?}] ✅ Block already added: {:?}", self.pow_algo, block.height);
return Ok(());
Expand Down Expand Up @@ -226,7 +226,7 @@ impl InMemoryShareChain {
);
for uncle in cur_block.uncles.iter() {
let uncle_block = p2_chain
.get_at_height(uncle.0)
.level_at_height(uncle.0)
.ok_or_else(|| Error::UncleBlockNotFound)?
.blocks
.get(&uncle.1)
Expand All @@ -249,7 +249,7 @@ impl InMemoryShareChain {
);
for uncle in cur_block.uncles.iter() {
let uncle_block = p2_chain
.get_at_height(uncle.0)
.level_at_height(uncle.0)
.ok_or_else(|| Error::UncleBlockNotFound)?
.blocks
.get(&uncle.1)
Expand Down Expand Up @@ -368,7 +368,7 @@ impl ShareChain for InMemoryShareChain {
);
for uncle in new_tip_block.uncles.iter() {
let uncle_block = chain_read_lock
.get_at_height(uncle.0)
.level_at_height(uncle.0)
.ok_or_else(|| Error::UncleBlockNotFound)?
.blocks
.get(&uncle.1)
Expand Down Expand Up @@ -430,19 +430,25 @@ impl ShareChain for InMemoryShareChain {
let mut excluded_uncles = vec![];
let mut uncles = vec![];
for height in new_height.saturating_sub(3)..new_height {
let older_level = chain_read_lock.get_at_height(height).ok_or(Error::BlockLevelNotFound)?;
excluded_uncles.push(older_level.chain_block.clone());
let older_level = chain_read_lock
.level_at_height(height)
.ok_or(Error::BlockLevelNotFound)?;
let chain_block = older_level.block_in_main_chain().ok_or(Error::BlockNotFound)?;
// Blocks in the main chain can't be uncles
excluded_uncles.push(chain_block.hash);
for uncle in chain_block.uncles.iter() {
excluded_uncles.push(uncle.1);
}
for block in older_level.blocks.iter() {
if !excluded_uncles.contains(&block.0) {
uncles.push((height, block.0.clone()));
}
uncles.push((height, block.0.clone()));
}
}

// Remove excluded.
for excluded in excluded_uncles.iter() {
uncles.retain(|uncle| &uncle.1 != excluded);
}

Ok(P2Block::builder()
.with_timestamp(EpochTime::now())
.with_prev_hash(last_block_hash)
Expand All @@ -458,7 +464,7 @@ impl ShareChain for InMemoryShareChain {
let mut blocks = Vec::new();

for block in requested_blocks {
if let Some(level) = p2_chain_read_lock.get_at_height(block.0) {
if let Some(level) = p2_chain_read_lock.level_at_height(block.0) {
if let Some(block) = level.blocks.get(&block.1) {
blocks.push(block.clone());
} else {
Expand Down Expand Up @@ -562,6 +568,18 @@ impl ShareChain for InMemoryShareChain {
let difficulty = chain_read_lock.lwma.get_difficulty().unwrap_or(Difficulty::min());
cmp::max(min, cmp::min(max, difficulty))
}

// For debugging only
async fn all_blocks(&self) -> Result<Vec<Arc<P2Block>>, Error> {
let chain_read_lock = self.p2_chain.read().await;
let mut res = Vec::new();
for level in &chain_read_lock.levels {
for block in level.blocks.values() {
res.push(block.clone());
}
}
Ok(res)
}
}

#[cfg(test)]
Expand Down Expand Up @@ -703,7 +721,7 @@ pub mod test {
.p2_chain
.read()
.await
.get_at_height(i as u64 - 2)
.level_at_height(i as u64 - 2)
.unwrap()
.chain_block;
// lets create an uncle block
Expand Down
2 changes: 2 additions & 0 deletions src/sharechain/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,4 +131,6 @@ pub(crate) trait ShareChain: Send + Sync + 'static {
async fn miners_with_shares(&self, squad: Squad) -> Result<HashMap<String, (u64, Vec<u8>)>, Error>;

async fn get_target_difficulty(&self, height: u64) -> Difficulty;

async fn all_blocks(&self) -> Result<Vec<Arc<P2Block>>, Error>;
}
Loading

0 comments on commit dddf35b

Please sign in to comment.