Skip to content

Commit

Permalink
Testing parallel octree
Browse files Browse the repository at this point in the history
  • Loading branch information
tbetcke committed Oct 4, 2024
1 parent baa5bb9 commit 6f01f6c
Show file tree
Hide file tree
Showing 4 changed files with 162 additions and 74 deletions.
75 changes: 64 additions & 11 deletions examples/mpi_complete_tree.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
//! Test the computation of a global bounding box across MPI ranks.
//! Test the computation of a complete octree.
use bempp_octree::{
constants::DEEPEST_LEVEL,
octree::{complete_tree, is_complete_linear_tree, linearize, points_to_morton},
tools::generate_random_points,
morton::MortonKey,
octree::{is_complete_linear_and_balanced, KeyType, Octree},
tools::{gather_to_all, generate_random_points},
};
use itertools::Itertools;
use mpi::traits::Communicator;
use rand::prelude::*;
use rand_chacha::ChaCha8Rng;
Expand All @@ -20,21 +21,73 @@ pub fn main() {
let mut rng = ChaCha8Rng::seed_from_u64(comm.rank() as u64);

// Create `npoints` per rank.
let npoints = 10;
let npoints = 10000;

// Generate random points.

let points = generate_random_points(npoints, &mut rng, &comm);

// Compute the Morton keys on the deepest level
let (keys, _) = points_to_morton(&points, DEEPEST_LEVEL as usize, &comm);
let tree = Octree::new(&points, 15, 50, &comm);

let linear_keys = linearize(&keys, &mut rng, &comm);
// We now check that each node of the tree has all its neighbors available.

// Generate a complete tree
let distributed_complete_tree = complete_tree(&linear_keys, &comm);
let leaf_tree = tree.leaf_tree();
let all_keys = tree.all_keys();

assert!(is_complete_linear_tree(&distributed_complete_tree, &comm));
assert!(is_complete_linear_and_balanced(leaf_tree, &comm));
for &key in leaf_tree {
let mut parent = key;
while parent.level() > 0 {
// Check that the key itself is there.
assert!(all_keys.contains_key(&key));
// Check that all its neighbours are there.
for neighbor in parent.neighbours().iter().filter(|&key| key.is_valid()) {
if !all_keys.contains_key(neighbor) {
println!(
"Missing neighbor: {}. Key type {:#?}",
neighbor,
all_keys.get(&parent).unwrap()
);
}
assert!(all_keys.contains_key(neighbor));
}
parent = parent.parent();
// Check that the parent is there.
assert!(all_keys.contains_key(&parent));
}
}

// At the end check that the root of the tree is also contained.
assert!(all_keys.contains_key(&MortonKey::root()));

// Count the number of ghosts on each rank

// Count the number of global keys on each rank.

// Assert that all ghosts are from a different rank and count them.

let nghosts = all_keys

Check failure on line 69 in examples/mpi_complete_tree.rs

View workflow job for this annotation

GitHub Actions / Rust style checks

unused variable: `nghosts`
.iter()
.filter_map(|(_, &value)| {
if let KeyType::Ghost(rank) = value {
assert!(rank != comm.size() as usize);
Some(rank)
} else {
None
}
})
.count();

let nglobal = all_keys
.iter()
.filter(|(_, &value)| matches!(value, KeyType::Global))
.count();

// Assert that all globals across all ranks have the same count.

let nglobals = gather_to_all(std::slice::from_ref(&nglobal), &comm);

assert_eq!(nglobals.iter().unique().count(), 1);

if comm.rank() == 0 {
println!("Distributed tree is complete and linear.");
Expand Down
6 changes: 4 additions & 2 deletions src/octree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use crate::{
};

/// Stores what type of key it is.
#[derive(PartialEq, Eq, Hash, Copy, Clone)]
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
pub enum KeyType {
/// A local leaf.
LocalLeaf,
Expand Down Expand Up @@ -55,7 +55,10 @@ impl<'o, C: CommunicatorCollectives> Octree<'o, C> {
let linear_keys = linearize(&point_keys, &mut rng, comm);

// Compute the first version of the coarse tree without load balancing.
// We want to ensure that it is 2:1 balanced.
let coarse_tree = compute_coarse_tree(&linear_keys, comm);

let coarse_tree = balance(&coarse_tree, &mut rng, comm);
debug_assert!(is_complete_linear_tree(&coarse_tree, comm));

// We now compute the weights for the initial coarse tree.
Expand All @@ -66,7 +69,6 @@ impl<'o, C: CommunicatorCollectives> Octree<'o, C> {
// that is used from now on.

let coarse_tree = load_balance(&coarse_tree, &weights, comm);

// We also want to redistribute the fine keys with respect to the load balanced coarse trees.

let fine_keys =
Expand Down
151 changes: 90 additions & 61 deletions src/octree/parallel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -559,6 +559,13 @@ pub fn balance<R: Rng, C: CommunicatorCollectives>(
rng: &mut R,
comm: &C,
) -> Vec<MortonKey> {
// Treat the case that the length of the keys is one and is only the root.
// This would lead to an empty output below as we only iterate up to level 1.

if linear_keys.len() == 1 && *linear_keys.first().unwrap() == MortonKey::root() {
return vec![MortonKey::root()];
}

let deepest_level = deepest_level(linear_keys, comm);

// Start with keys at deepest level
Expand Down Expand Up @@ -602,7 +609,6 @@ pub fn balance<R: Rng, C: CommunicatorCollectives>(
);

work_list = new_work_list;
// Now extend the work list with the
}

let result = linearize(&result, rng, comm);
Expand Down Expand Up @@ -653,6 +659,10 @@ pub fn redistribute_points_with_respect_to_coarse_tree<C: CommunicatorCollective
coarse_tree: &[MortonKey],
comm: &C,
) -> (Vec<Point>, Vec<MortonKey>) {
if comm.size() == 1 {
return (points.to_vec(), morton_keys_for_points.to_vec());
}

pub fn argsort<T: Ord + Copy>(arr: &[T]) -> Vec<usize> {
let mut sort_indices = (0..arr.len()).collect_vec();
sort_indices.sort_unstable_by_key(|&index| arr[index]);
Expand Down Expand Up @@ -878,28 +888,32 @@ pub fn generate_all_keys<C: CommunicatorCollectives>(
let mut all_keys = HashMap::<MortonKey, KeyType>::new();
let leaf_keys: HashSet<MortonKey> = HashSet::from_iter(leaf_tree.iter().copied());

let mut global_keys = HashSet::<MortonKey>::new();
// If size == 1 we simply create locally the keys, so don't need to treat the global keys.

// First deal with the parents of the coarse tree. These are different
// as they may exist on multiple nodes, so receive a different label.
if size > 1 {
let mut global_keys = HashSet::<MortonKey>::new();

for &key in coarse_tree {
let mut parent = key.parent();
while parent.level() > 0 && !all_keys.contains_key(&parent) {
global_keys.insert(parent);
parent = parent.parent();
// First deal with the parents of the coarse tree. These are different
// as they may exist on multiple nodes, so receive a different label.

for &key in coarse_tree {
let mut parent = key.parent();
while parent.level() > 0 && !all_keys.contains_key(&parent) {
global_keys.insert(parent);
parent = parent.parent();
}
}
}

// We now send around the parents of the coarse tree to every node. These will
// be global keys.
// We now send around the parents of the coarse tree to every node. These will
// be global keys.

let global_keys = gather_to_all(&global_keys.iter().copied().collect_vec(), comm);
let global_keys = gather_to_all(&global_keys.iter().copied().collect_vec(), comm);

// We can now insert the global keys into `all_keys` with the `Global` label.
// We can now insert the global keys into `all_keys` with the `Global` label.

for &key in &global_keys {
all_keys.entry(key).or_insert(KeyType::Global);
for &key in &global_keys {
all_keys.entry(key).or_insert(KeyType::Global);
}
}

// We now deal with the fine leafs and their ancestors.
Expand All @@ -917,58 +931,73 @@ pub fn generate_all_keys<C: CommunicatorCollectives>(
}
}

// This maps from rank to the keys that we want to send to the ranks
let mut rank_send_ghost = HashMap::<usize, Vec<KeyWithRank>>::new();
for index in 0..size - 1 {
rank_send_ghost.insert(index, Vec::<KeyWithRank>::new());
}
// Need to explicitly add the root at the end.
all_keys.entry(MortonKey::root()).or_insert(KeyType::Global);

for (&key, &status) in all_keys.iter() {
// We need not send around global keys to neighbors.
if status == KeyType::Global {
continue;
}
for &neighbor in key.neighbours().iter().filter(|&&key| key.is_valid()) {
// If the neighbour is a global key then continue.
if let Some(&value) = all_keys.get(&neighbor) {
if value == KeyType::Global {
continue;
}
}
// Get rank of the neighbour
let neighbor_rank = get_key_index(coarse_tree_bounds, neighbor);
rank_send_ghost
.entry(neighbor_rank)
.and_modify(|keys| keys.push(KeyWithRank { key, rank }));
}
}
// We only need to deal with ghosts if the size is larger than 1.

// We now know which key needs to be sent to which rank.
// Turn to array, get the counts and send around.
if size > 1 {
// This maps from rank to the keys that we want to send to the ranks

let (arr, counts) = {
let mut arr = Vec::<KeyWithRank>::new();
let mut counts = Vec::<i32>::new();
for index in 0..size - 1 {
let keys = rank_send_ghost.get(&index).unwrap();
arr.extend(keys.iter());
counts.push(keys.len() as i32);
let mut rank_send_ghost = HashMap::<usize, Vec<KeyWithRank>>::new();
for index in 0..size {
rank_send_ghost.insert(index, Vec::<KeyWithRank>::new());
}
(arr, counts)
};

// These are all the keys that are neighbors to our keys. We now go through
// and store those that do not live on our tree as into `all_keys` with a label
// of `Ghost`.
let ghost_keys = redistribute(&arr, &counts, comm);
let mut send_to_all = Vec::<KeyWithRank>::new();

for key in &ghost_keys {
if key.rank == rank {
// Don't need to add the keys that are already on the rank.
continue;
for (&key, &status) in all_keys.iter() {
// We need not send around global keys to neighbors.
if status == KeyType::Global {
continue;
}
for &neighbor in key.neighbours().iter().filter(|&&key| key.is_valid()) {
// If the neighbour is a global key then continue.
if all_keys
.get(&neighbor)
.is_some_and(|&value| value == KeyType::Global)
{
// Global keys exist on all nodes, so need to send their neighbors to all nodes.
send_to_all.push(KeyWithRank { key, rank });
} else {
// Get rank of the neighbour
let neighbor_rank = get_key_index(coarse_tree_bounds, neighbor);
rank_send_ghost
.entry(neighbor_rank)
.and_modify(|keys| keys.push(KeyWithRank { key, rank }));
}
}
}

let send_ghost_to_all = gather_to_all(&send_to_all, comm);
// We now know which key needs to be sent to which rank.
// Turn to array, get the counts and send around.

let (arr, counts) = {
let mut arr = Vec::<KeyWithRank>::new();
let mut counts = Vec::<i32>::new();
for index in 0..size {
let keys = rank_send_ghost.get(&index).unwrap();
arr.extend(keys.iter());
counts.push(keys.len() as i32);
}
(arr, counts)
};

// These are all the keys that are neighbors to our keys. We now go through
// and store those that do not live on our tree as into `all_keys` with a label
// of `Ghost`.
let mut ghost_keys = redistribute(&arr, &counts, comm);
// Add the neighbors of any global key.
ghost_keys.extend(send_ghost_to_all.iter());

for key in &ghost_keys {
if key.rank == rank {
// Don't need to add the keys that are already on the rank.
continue;
}
all_keys.insert(key.key, KeyType::Ghost(key.rank));
}
debug_assert!(!all_keys.contains_key(&key.key));
all_keys.insert(key.key, KeyType::Ghost(key.rank));
}

all_keys
Expand Down
4 changes: 4 additions & 0 deletions src/tools.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,10 @@ pub fn communicate_back<T: Equivalence, C: CommunicatorCollectives>(
let rank = comm.rank();
let size = comm.size();

if size == 1 {
return None;
}

if rank == size - 1 {
comm.process_at_rank(rank - 1).send(arr.first().unwrap());
None
Expand Down

0 comments on commit 6f01f6c

Please sign in to comment.