Skip to content

Commit

Permalink
clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
stringhandler committed Dec 9, 2024
1 parent 4c37810 commit 3b7aa71
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 93 deletions.
1 change: 1 addition & 0 deletions src/server/grpc/p2pool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ where S: ShareChain
impl<S> ShaP2PoolGrpc<S>
where S: ShareChain
{
#[allow(clippy::too_many_arguments)]
pub async fn new(
local_peer_id: PeerId,
base_node_address: String,
Expand Down
1 change: 0 additions & 1 deletion src/server/p2p/network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1319,7 +1319,6 @@ where S: ShareChain
}
} else {
warn!(target: SYNC_REQUEST_LOG_TARGET, squad = &self.config.squad; "Received a response for a request that we didn't send: {peer:?} -> {response:?}");
return;
}
},
},
Expand Down
6 changes: 3 additions & 3 deletions src/server/p2p/peer_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use std::{
use anyhow::Error;
use libp2p::PeerId;
use log::warn;
use tari_core::proof_of_work::{AccumulatedDifficulty, PowAlgorithm};
use tari_core::proof_of_work::PowAlgorithm;
use tari_utilities::epoch_time::EpochTime;

use crate::server::{http::stats_collector::StatsBroadcastClient, p2p::messages::PeerInfo, PROTOCOL_VERSION};
Expand Down Expand Up @@ -138,15 +138,15 @@ impl PeerStore {
if achieved_pow > max_pow {
max_pow = achieved_pow;
max_height = record.peer_info.current_random_x_height;
peer_with_highest = Some(record.peer_id.clone());
peer_with_highest = Some(record.peer_id);
}
},
PowAlgorithm::Sha3x => {
let achieved_pow = record.peer_info.current_sha3x_pow;
if achieved_pow > max_pow {
max_pow = achieved_pow;
max_height = record.peer_info.current_sha3x_height;
peer_with_highest = Some(record.peer_id.clone());
peer_with_highest = Some(record.peer_id);
}
},
}
Expand Down
89 changes: 0 additions & 89 deletions src/sharechain/p2chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,6 @@ const LOG_TARGET: &str = "tari::p2pool::sharechain::chain";
pub const SAFETY_MARGIN: u64 = 20;
// this is the max extra lenght the chain can grow in front of our tip
pub const MAX_EXTRA_SYNC: u64 = 2000;
// this is the max bocks we store that are more than MAX_EXTRA_SYNC in front of our tip
pub const MAX_SYNC_STORE: usize = 200;
// this is the max missing parents we allow to process before we stop processing a chain and wait for more parents
pub const MAX_MISSING_PARENTS: usize = 100;

Expand Down Expand Up @@ -132,8 +130,6 @@ pub struct P2Chain {
share_window: u64,
current_tip: u64,
pub lwma: LinearWeightedMovingAverage,
sync_store: HashMap<FixedHash, Arc<P2Block>>,
sync_store_fifo_list: VecDeque<FixedHash>,
}

impl P2Chain {
Expand Down Expand Up @@ -194,8 +190,6 @@ impl P2Chain {
share_window,
current_tip: 0,
lwma,
sync_store: HashMap::new(),
sync_store_fifo_list: VecDeque::new(),
}
}

Expand Down Expand Up @@ -654,89 +648,6 @@ impl P2Chain {
}

pub fn add_block_to_chain(&mut self, block: Arc<P2Block>) -> Result<ChainAddResult, ShareChainError> {
let new_block_height = block.height;
let block_hash = block.hash;

// lets check where this is, do we need to store it in the sync store
// let first_index = self.lowest_level_height().unwrap_or(0);
// if new_block_height >= first_index + self.total_size + SAFETY_MARGIN + MAX_EXTRA_SYNC {
// if self.sync_store.len() > MAX_SYNC_STORE {
// // lets remove the oldest block
// if let Some(hash) = self.sync_store_fifo_list.pop_back() {
// self.sync_store.remove(&hash);
// }
// }
// self.sync_store.insert(block_hash, block.clone());
// self.sync_store_fifo_list.push_front(block_hash);

// // lets see how long a chain we can build with this block
// let mut current_block_hash = block.prev_hash;
// let mut blocks_to_add = vec![block.hash];

// while let Some(parent) = self.sync_store.get(&current_block_hash) {
// blocks_to_add.push(current_block_hash);
// current_block_hash = parent.prev_hash;
// }
// // lets go forward
// current_block_hash = block.hash;
// 'outer_loop: loop {
// for orphan_block in &self.sync_store {
// if orphan_block.1.prev_hash == current_block_hash {
// blocks_to_add.push(current_block_hash);
// current_block_hash = orphan_block.1.hash;
// continue 'outer_loop;
// }
// }
// break 'outer_loop;
// }

// let mut new_tip = ChainAddResult::default();
// if blocks_to_add.len() > 150 {
// // we have a potential long chain, lets see if we can do anything with it.
// for block in &blocks_to_add {
// let p2_block = self
// .sync_store
// .get(block)
// .ok_or(ShareChainError::BlockNotFound)?
// .clone();
// match self.add_block_inner(p2_block) {
// Err(e) => return Err(e),
// Ok(tip) => {
// new_tip.combine(tip);
// },
// }
// }
// }

// let mut is_parent_in_main_chain = false;
// if let Some(parent_block) = self.get_block_at_height(new_block_height.saturating_sub(1),
// &block.prev_hash) { is_parent_in_main_chain =
// self.level_at_height(parent_block.height).unwrap().chain_block == block.prev_hash;
// } else {
// new_tip
// .missing_blocks
// .insert(block.prev_hash, new_block_height.saturating_sub(1));
// }
// // now lets check the uncles
// for uncle in &block.uncles {
// if self.get_block_at_height(uncle.0, &uncle.1).is_some() {
// if let Some(level) = self.level_at_height(uncle.0) {
// if level.chain_block == uncle.1 && is_parent_in_main_chain {
// // Uncle in main chain is ok if this block is not on the main chain
// return Err(ShareChainError::UncleInMainChain {
// height: uncle.0,
// hash: uncle.1,
// });
// }
// }
// } else {
// new_tip.missing_blocks.insert(uncle.1, uncle.0);
// }
// }

// return Ok(new_tip);
// }

// Uncle cannot be the same as prev_hash
if block.uncles.iter().any(|(_, hash)| hash == &block.prev_hash) {
return Err(ShareChainError::InvalidBlock {
Expand Down

0 comments on commit 3b7aa71

Please sign in to comment.