From 98892bacc9df86405c06967373d0fb1577022a55 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Tue, 4 Feb 2025 23:19:12 -0500 Subject: [PATCH 01/45] basic structure --- crates/bevy_utils/src/attomic_staging.rs | 208 +++++++++++++++++++++++ crates/bevy_utils/src/lib.rs | 3 + 2 files changed, 211 insertions(+) create mode 100644 crates/bevy_utils/src/attomic_staging.rs diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs new file mode 100644 index 0000000000000..41def0a9ea132 --- /dev/null +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -0,0 +1,208 @@ +//! Provides an abstracted system for staging modifications attomically. + +use bevy_platform_support::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; + +/// Signifies that this type represents staged changes to [`Cold`](Self::Cold). +pub trait StagedChanges { + /// The more compact data structure that these changes compact into. + type Cold; + + /// This applies these changes to the passed [`Cold`](Self::Cold). When this is done, there should be no more staged changes, and [`any_staged`](Self::any_staged) should be false. + fn apply_staged(&mut self, storage: &mut Self::Cold); + + /// Returns true if and only if there are staged changes that could be applied. + fn any_staged(&self) -> bool; +} + +/// A struct that allows staging changes while reading from cold storage. +pub struct Stager<'a, T: StagedChanges> { + /// The storage that is read optimized. + pub cold: &'a T::Cold, + /// The staged changes. + pub staged: &'a mut T, +} + +/// A struct that allows accessing changes while reading from cold storage. +pub struct StagedRef<'a, T: StagedChanges> { + /// The storage that is read optimized. + pub cold: &'a T::Cold, + /// The staged changes. + pub staged: &'a T, +} + +/// A locked version of [`Stager`] +pub struct StagerLocked<'a, T: StagedChanges> { + cold: RwLockReadGuard<'a, T::Cold>, + staged: RwLockWriteGuard<'a, T>, +} + +/// A locked version of [`StagedRef`] +pub struct StagedRefLocked<'a, T: StagedChanges> { + cold: RwLockReadGuard<'a, T::Cold>, + staged: RwLockReadGuard<'a, T>, +} + +/// A general purpose enum for representing data that may or may not need to be staged. +pub enum MaybeStaged { + /// There is staging necessary. + Staged(S), + /// There is no staging necessary. + Cold(C), +} + +/// A struct that allows read-optimized operations while still allowing mutation. +#[derive(Default)] +pub struct StageOnWrite { + /// Cold data is read optimized. This lives behind a [`RwLock`], but it is only written to for applying changes in + /// a non-blocking way. In other worlds this locks, but almost never blocks. (It can technically block if a thread + /// tries to read from it while it is having changes applied, but that is extremely rare.) + cold: RwLock, + /// Staged data stores recent modifications to cold. It's [`RwLock`] coordinates mutations. + staged: RwLock, +} + +impl StageOnWrite { + /// Creates a new [`StageOnWrite`] + #[inline] + pub fn new(current: T::Cold) -> Self { + Self { + cold: RwLock::new(current), + staged: RwLock::default(), + } + } +} + +impl StageOnWrite { + /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. + #[inline] + pub fn full(&mut self) -> Option<&mut T::Cold> { + if self.staged.get_mut().unwrap().any_staged() { + None + } else { + Some(self.cold.get_mut().unwrap()) + } + } + + /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. + #[inline] + pub fn full_locked(&self) -> Option<&RwLock> { + if self.any_staged() { + None + } else { + Some(&self.cold) + } + } + + /// Returns true if and only if there are staged changes that could be applied. + #[inline] + pub fn any_staged(&self) -> bool { + self.staged.read().unwrap().any_staged() + } + + /// Applies any staged changes before returning the full value with all changes applied. Immediately after this, [`any_staged`](Self::any_staged) will be false. + #[inline] + pub fn apply_staged_for_full(&mut self) -> &mut T::Cold { + let staged = self.staged.get_mut().unwrap(); + let cold = self.cold.get_mut().unwrap(); + if staged.any_staged() { + staged.apply_staged(cold); + } + cold + } + + /// A version of [`apply_staged_for_full`](Self::apply_staged_for_full) that locks (and may block). + /// Returns `None` if no changes needed to be made, and the stage could be skipped. + #[inline] + pub fn apply_staged_lock(&self) -> Option> { + let mut staged = self.staged.write().unwrap(); + if staged.any_staged() { + let mut cold = self.cold.write().unwrap(); + staged.apply_staged(&mut cold); + Some(cold) + } else { + None + } + } + + /// A version of [`apply_staged_for_full`](Self::apply_staged_for_full) that locks and never blocks. + /// If a read on another thread is immediately hit, that may block, but this will not. Returns `None` + /// if either their were no changes to be made and the stage could be skipped, or if the operation would block. + #[inline] + pub fn apply_staged_non_blocking(&self) -> Option> { + let mut staged = self.staged.write().unwrap(); + if staged.any_staged() { + let mut cold = self.cold.try_write().ok()?; + staged.apply_staged(&mut cold); + Some(cold) + } else { + None + } + } + + /// Constructs a [`Stager`] that will stage changes. + #[inline] + pub fn stage(&mut self) -> Stager<'_, T> { + Stager { + cold: self.cold.get_mut().unwrap(), + staged: self.staged.get_mut().unwrap(), + } + } + + /// Constructs a [`StagerLocked`], locking internally. + #[inline] + pub fn stage_lock(&self) -> StagerLocked<'_, T> { + StagerLocked { + cold: self.cold.read().unwrap(), + staged: self.staged.write().unwrap(), + } + } + + /// Constructs a [`Stager`] that will stage changes. + #[inline] + pub fn read(&mut self) -> StagedRef<'_, T> { + StagedRef { + cold: self.cold.get_mut().unwrap(), + staged: self.staged.get_mut().unwrap(), + } + } + + /// Constructs a [`StagerLocked`], locking internally. + #[inline] + pub fn read_lock(&self) -> StagedRefLocked<'_, T> { + StagedRefLocked { + cold: self.cold.read().unwrap(), + staged: self.staged.read().unwrap(), + } + } +} + +impl StagerLocked<'_, T> { + /// Allows a user to view this as a [`Stager`]. + #[inline] + pub fn as_stager(&mut self) -> Stager<'_, T> { + Stager { + cold: &self.cold, + staged: &mut self.staged, + } + } + + /// Allows a user to view this as a [`StagedRef`]. + #[inline] + pub fn as_staged_ref(&self) -> StagedRef<'_, T> { + StagedRef { + cold: &self.cold, + staged: &self.staged, + } + } +} + +impl StagedRefLocked<'_, T> { + /// Allows a user to view this as a [`StagedRef`]. + #[inline] + pub fn as_staged_ref(&self) -> StagedRef<'_, T> { + StagedRef { + cold: &self.cold, + staged: &self.staged, + } + } +} diff --git a/crates/bevy_utils/src/lib.rs b/crates/bevy_utils/src/lib.rs index 4b7d91a40eee2..4796f25a73500 100644 --- a/crates/bevy_utils/src/lib.rs +++ b/crates/bevy_utils/src/lib.rs @@ -22,6 +22,7 @@ pub mod prelude { pub use crate::default; } +pub mod attomic_staging; pub mod synccell; pub mod syncunsafecell; @@ -187,3 +188,5 @@ mod tests { ); } } + +impl attomic_staging::Stager<'_, T> {} From de042ac7c47a1851f6cfe0a9a46d74639a3dfff7 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Tue, 4 Feb 2025 23:20:08 -0500 Subject: [PATCH 02/45] removed dead test --- crates/bevy_utils/src/lib.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/bevy_utils/src/lib.rs b/crates/bevy_utils/src/lib.rs index 4796f25a73500..4f07403cfca03 100644 --- a/crates/bevy_utils/src/lib.rs +++ b/crates/bevy_utils/src/lib.rs @@ -188,5 +188,3 @@ mod tests { ); } } - -impl attomic_staging::Stager<'_, T> {} From 011dd433c505347a6f54f45c003af6d32f4c8659 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Tue, 4 Feb 2025 23:36:45 -0500 Subject: [PATCH 03/45] added some utils --- crates/bevy_utils/src/attomic_staging.rs | 50 +++++++++++++++++++++++- 1 file changed, 48 insertions(+), 2 deletions(-) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs index 41def0a9ea132..2c2c98cbb158c 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -1,5 +1,7 @@ //! Provides an abstracted system for staging modifications attomically. +use core::ops::{Deref, DerefMut}; + use bevy_platform_support::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). @@ -44,10 +46,19 @@ pub struct StagedRefLocked<'a, T: StagedChanges> { /// A general purpose enum for representing data that may or may not need to be staged. pub enum MaybeStaged { - /// There is staging necessary. - Staged(S), /// There is no staging necessary. Cold(C), + /// There is staging necessary. + Staged(S), +} + +/// A general purpose enum for representing data that may or may not need to be locked. +/// This is very useful when synchronizing staging. +pub enum MaybeLocked { + /// There is a lock held + Locked(L), + /// There is no lock held + Free(F), } /// A struct that allows read-optimized operations while still allowing mutation. @@ -174,6 +185,21 @@ impl StageOnWrite { staged: self.staged.read().unwrap(), } } + + /// Runs different logic depending on if additional changes are already staged. This can be faster than greedily applying staged changes if there are already staged changes. + pub fn maybe_stage( + &mut self, + for_full: impl FnOnce(&mut T::Cold) -> C, + for_staged: impl FnOnce(Stager) -> S, + ) -> MaybeStaged { + let staged = self.staged.get_mut().unwrap(); + let cold = self.cold.get_mut().unwrap(); + if staged.any_staged() { + MaybeStaged::Staged(for_staged(Stager { cold, staged })) + } else { + MaybeStaged::Cold(for_full(cold)) + } + } } impl StagerLocked<'_, T> { @@ -206,3 +232,23 @@ impl StagedRefLocked<'_, T> { } } } + +impl, F> Deref for MaybeLocked { + type Target = F; + + fn deref(&self) -> &Self::Target { + match self { + MaybeLocked::Locked(v) => v, + MaybeLocked::Free(v) => v, + } + } +} + +impl + DerefMut, F> DerefMut for MaybeLocked { + fn deref_mut(&mut self) -> &mut Self::Target { + match self { + MaybeLocked::Locked(v) => v, + MaybeLocked::Free(v) => v, + } + } +} From b15153809212dbe15d6e6345f92258a2290da0ca Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Tue, 4 Feb 2025 23:45:28 -0500 Subject: [PATCH 04/45] scoping functions --- crates/bevy_utils/src/attomic_staging.rs | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs index 2c2c98cbb158c..4bdc4d4f62848 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -168,6 +168,20 @@ impl StageOnWrite { } } + /// Easily run a stager function to stage changes easily. + #[inline] + pub fn stage_scope_locked(&self, f: impl FnOnce(&mut Stager) -> R) -> R { + f(&mut self.stage_lock().as_stager()) + } + + /// Easily run a stager function to stage changes easily. Then, try to apply those staged changes if it can do so without blocking. + #[inline] + pub fn stage_scope_locked_eager(&self, f: impl FnOnce(&mut Stager) -> R) -> R { + let v = f(&mut self.stage_lock().as_stager()); + self.apply_staged_non_blocking(); + v + } + /// Constructs a [`Stager`] that will stage changes. #[inline] pub fn read(&mut self) -> StagedRef<'_, T> { @@ -186,6 +200,12 @@ impl StageOnWrite { } } + /// Easily run a [`StagedRef`] function. + #[inline] + pub fn read_scope_locked(&self, f: impl FnOnce(&StagedRef) -> R) -> R { + f(&self.read_lock().as_staged_ref()) + } + /// Runs different logic depending on if additional changes are already staged. This can be faster than greedily applying staged changes if there are already staged changes. pub fn maybe_stage( &mut self, From 57f2500abf5161c18b224828ae8290973b4d1e8d Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Tue, 4 Feb 2025 23:47:50 -0500 Subject: [PATCH 05/45] removed maybe locked both cold and staged are behind locks, so this is redundant --- crates/bevy_utils/src/attomic_staging.rs | 31 ------------------------ 1 file changed, 31 deletions(-) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs index 4bdc4d4f62848..7f60754e6a1ea 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -1,7 +1,5 @@ //! Provides an abstracted system for staging modifications attomically. -use core::ops::{Deref, DerefMut}; - use bevy_platform_support::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). @@ -52,15 +50,6 @@ pub enum MaybeStaged { Staged(S), } -/// A general purpose enum for representing data that may or may not need to be locked. -/// This is very useful when synchronizing staging. -pub enum MaybeLocked { - /// There is a lock held - Locked(L), - /// There is no lock held - Free(F), -} - /// A struct that allows read-optimized operations while still allowing mutation. #[derive(Default)] pub struct StageOnWrite { @@ -252,23 +241,3 @@ impl StagedRefLocked<'_, T> { } } } - -impl, F> Deref for MaybeLocked { - type Target = F; - - fn deref(&self) -> &Self::Target { - match self { - MaybeLocked::Locked(v) => v, - MaybeLocked::Free(v) => v, - } - } -} - -impl + DerefMut, F> DerefMut for MaybeLocked { - fn deref_mut(&mut self) -> &mut Self::Target { - match self { - MaybeLocked::Locked(v) => v, - MaybeLocked::Free(v) => v, - } - } -} From 4001876d5fb85a947a0d278955b5387be609a8f5 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Tue, 4 Feb 2025 23:50:02 -0500 Subject: [PATCH 06/45] dedupe code --- crates/bevy_utils/src/attomic_staging.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs index 7f60754e6a1ea..d8f8778339045 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -166,7 +166,7 @@ impl StageOnWrite { /// Easily run a stager function to stage changes easily. Then, try to apply those staged changes if it can do so without blocking. #[inline] pub fn stage_scope_locked_eager(&self, f: impl FnOnce(&mut Stager) -> R) -> R { - let v = f(&mut self.stage_lock().as_stager()); + let v = self.stage_scope_locked(f); self.apply_staged_non_blocking(); v } From 024b1e1dca76e180bf4fc9e0b13c166476ac86bf Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Tue, 4 Feb 2025 23:53:54 -0500 Subject: [PATCH 07/45] locked scope --- crates/bevy_utils/src/attomic_staging.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs index d8f8778339045..c4c5799fbe6b9 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -157,13 +157,13 @@ impl StageOnWrite { } } - /// Easily run a stager function to stage changes easily. + /// Easily run a stager function to stage changes. #[inline] pub fn stage_scope_locked(&self, f: impl FnOnce(&mut Stager) -> R) -> R { f(&mut self.stage_lock().as_stager()) } - /// Easily run a stager function to stage changes easily. Then, try to apply those staged changes if it can do so without blocking. + /// Easily run a stager function to stage changes. Then, try to apply those staged changes if it can do so without blocking. #[inline] pub fn stage_scope_locked_eager(&self, f: impl FnOnce(&mut Stager) -> R) -> R { let v = self.stage_scope_locked(f); @@ -171,6 +171,12 @@ impl StageOnWrite { v } + /// Easily run a stager function to stage changes and return locked data. + #[inline] + pub fn stage_locked_scope(&self, f: impl FnOnce(StagerLocked) -> R) -> R { + f(self.stage_lock()) + } + /// Constructs a [`Stager`] that will stage changes. #[inline] pub fn read(&mut self) -> StagedRef<'_, T> { From 4574bd25518850e8d2e77410437bd458982da077 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Tue, 4 Feb 2025 23:55:49 -0500 Subject: [PATCH 08/45] clone for ref --- crates/bevy_utils/src/attomic_staging.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs index c4c5799fbe6b9..270bb22af73c6 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -23,6 +23,7 @@ pub struct Stager<'a, T: StagedChanges> { } /// A struct that allows accessing changes while reading from cold storage. +#[derive(Clone, Copy)] pub struct StagedRef<'a, T: StagedChanges> { /// The storage that is read optimized. pub cold: &'a T::Cold, From 33374f35a8efa041a2d20258e316c0a9146965e2 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 00:00:13 -0500 Subject: [PATCH 09/45] Arcs --- crates/bevy_utils/src/attomic_staging.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs index 270bb22af73c6..f441497ffd745 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -1,6 +1,6 @@ //! Provides an abstracted system for staging modifications attomically. -use bevy_platform_support::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; +use bevy_platform_support::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard}; /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). pub trait StagedChanges { @@ -62,6 +62,9 @@ pub struct StageOnWrite { staged: RwLock, } +/// A type alias for putting [`StageOnWrite`] in an [`Arc`]. +pub type AttomicStageOnWrite = Arc>; + impl StageOnWrite { /// Creates a new [`StageOnWrite`] #[inline] From 068cc8dc71d47d7b46073eb63449ba0e971d6c1c Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 21:36:32 -0500 Subject: [PATCH 10/45] generalized stage locking --- crates/bevy_utils/src/attomic_staging.rs | 41 ++++++++++++++++++------ 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs index f441497ffd745..a9d1b70126ce4 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -1,5 +1,7 @@ //! Provides an abstracted system for staging modifications attomically. +use core::ops::Deref; + use bevy_platform_support::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard}; /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). @@ -14,6 +16,9 @@ pub trait StagedChanges { fn any_staged(&self) -> bool; } +/// A trait that signifies that it holds an immutable reference to a cold type (ie. [`StagedChanges::Cold`]). +pub trait ColdStorage: Deref {} + /// A struct that allows staging changes while reading from cold storage. pub struct Stager<'a, T: StagedChanges> { /// The storage that is read optimized. @@ -23,7 +28,7 @@ pub struct Stager<'a, T: StagedChanges> { } /// A struct that allows accessing changes while reading from cold storage. -#[derive(Clone, Copy)] +#[derive(Copy)] pub struct StagedRef<'a, T: StagedChanges> { /// The storage that is read optimized. pub cold: &'a T::Cold, @@ -32,14 +37,14 @@ pub struct StagedRef<'a, T: StagedChanges> { } /// A locked version of [`Stager`] -pub struct StagerLocked<'a, T: StagedChanges> { - cold: RwLockReadGuard<'a, T::Cold>, +pub struct StagerLocked<'a, T: StagedChanges, C: ColdStorage> { + cold: C, staged: RwLockWriteGuard<'a, T>, } /// A locked version of [`StagedRef`] -pub struct StagedRefLocked<'a, T: StagedChanges> { - cold: RwLockReadGuard<'a, T::Cold>, +pub struct StagedRefLocked<'a, T: StagedChanges, C: ColdStorage> { + cold: C, staged: RwLockReadGuard<'a, T>, } @@ -154,7 +159,7 @@ impl StageOnWrite { /// Constructs a [`StagerLocked`], locking internally. #[inline] - pub fn stage_lock(&self) -> StagerLocked<'_, T> { + pub fn stage_lock(&self) -> StagerLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { StagerLocked { cold: self.cold.read().unwrap(), staged: self.staged.write().unwrap(), @@ -177,7 +182,10 @@ impl StageOnWrite { /// Easily run a stager function to stage changes and return locked data. #[inline] - pub fn stage_locked_scope(&self, f: impl FnOnce(StagerLocked) -> R) -> R { + pub fn stage_locked_scope( + &self, + f: impl FnOnce(StagerLocked>) -> R, + ) -> R { f(self.stage_lock()) } @@ -192,7 +200,7 @@ impl StageOnWrite { /// Constructs a [`StagerLocked`], locking internally. #[inline] - pub fn read_lock(&self) -> StagedRefLocked<'_, T> { + pub fn read_lock(&self) -> StagedRefLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { StagedRefLocked { cold: self.cold.read().unwrap(), staged: self.staged.read().unwrap(), @@ -221,7 +229,7 @@ impl StageOnWrite { } } -impl StagerLocked<'_, T> { +impl> StagerLocked<'_, T, C> { /// Allows a user to view this as a [`Stager`]. #[inline] pub fn as_stager(&mut self) -> Stager<'_, T> { @@ -241,7 +249,7 @@ impl StagerLocked<'_, T> { } } -impl StagedRefLocked<'_, T> { +impl> StagedRefLocked<'_, T, C> { /// Allows a user to view this as a [`StagedRef`]. #[inline] pub fn as_staged_ref(&self) -> StagedRef<'_, T> { @@ -251,3 +259,16 @@ impl StagedRefLocked<'_, T> { } } } + +impl<'a, T: StagedChanges> Clone for StagedRef<'a, T> { + fn clone(&self) -> Self { + Self { + staged: self.staged, + cold: self.cold, + } + } +} + +impl ColdStorage for RwLockReadGuard<'_, T::Cold> {} + +impl ColdStorage for &'_ T::Cold {} From 1ad1c4a6d4a3458c4c85d200a19892b3401d4585 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 21:40:43 -0500 Subject: [PATCH 11/45] Separate atomic operations --- crates/bevy_utils/src/attomic_staging.rs | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/attomic_staging.rs index a9d1b70126ce4..44f6fd86e027e 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/attomic_staging.rs @@ -59,6 +59,14 @@ pub enum MaybeStaged { /// A struct that allows read-optimized operations while still allowing mutation. #[derive(Default)] pub struct StageOnWrite { + /// Cold data is read optimized. + cold: T::Cold, + /// Staged data stores recent modifications to cold. It's [`RwLock`] coordinates mutations. + staged: RwLock, +} + +#[derive(Default)] +struct AttomicStageOnWriteInner { /// Cold data is read optimized. This lives behind a [`RwLock`], but it is only written to for applying changes in /// a non-blocking way. In other worlds this locks, but almost never blocks. (It can technically block if a thread /// tries to read from it while it is having changes applied, but that is extremely rare.) @@ -67,19 +75,9 @@ pub struct StageOnWrite { staged: RwLock, } -/// A type alias for putting [`StageOnWrite`] in an [`Arc`]. -pub type AttomicStageOnWrite = Arc>; - -impl StageOnWrite { - /// Creates a new [`StageOnWrite`] - #[inline] - pub fn new(current: T::Cold) -> Self { - Self { - cold: RwLock::new(current), - staged: RwLock::default(), - } - } -} +/// A struct that allows read-optimized operations while still allowing mutation. +#[derive(Clone)] +pub struct AttomicStageOnWrite(Arc>); impl StageOnWrite { /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. From 92b5c797d17e1863c28be544179e4568366ea0fa Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 21:41:38 -0500 Subject: [PATCH 12/45] rename --- .../bevy_utils/src/{attomic_staging.rs => atomic_staging.rs} | 4 ++-- crates/bevy_utils/src/lib.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) rename crates/bevy_utils/src/{attomic_staging.rs => atomic_staging.rs} (98%) diff --git a/crates/bevy_utils/src/attomic_staging.rs b/crates/bevy_utils/src/atomic_staging.rs similarity index 98% rename from crates/bevy_utils/src/attomic_staging.rs rename to crates/bevy_utils/src/atomic_staging.rs index 44f6fd86e027e..170031e56178c 100644 --- a/crates/bevy_utils/src/attomic_staging.rs +++ b/crates/bevy_utils/src/atomic_staging.rs @@ -66,7 +66,7 @@ pub struct StageOnWrite { } #[derive(Default)] -struct AttomicStageOnWriteInner { +struct AtomicStageOnWriteInner { /// Cold data is read optimized. This lives behind a [`RwLock`], but it is only written to for applying changes in /// a non-blocking way. In other worlds this locks, but almost never blocks. (It can technically block if a thread /// tries to read from it while it is having changes applied, but that is extremely rare.) @@ -77,7 +77,7 @@ struct AttomicStageOnWriteInner { /// A struct that allows read-optimized operations while still allowing mutation. #[derive(Clone)] -pub struct AttomicStageOnWrite(Arc>); +pub struct AtomicStageOnWrite(Arc>); impl StageOnWrite { /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. diff --git a/crates/bevy_utils/src/lib.rs b/crates/bevy_utils/src/lib.rs index 4f07403cfca03..4767b44fda103 100644 --- a/crates/bevy_utils/src/lib.rs +++ b/crates/bevy_utils/src/lib.rs @@ -22,7 +22,7 @@ pub mod prelude { pub use crate::default; } -pub mod attomic_staging; +pub mod atomic_staging; pub mod synccell; pub mod syncunsafecell; From b2e89de30aa8e8fbdc6d30e8a6731ad565390abc Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 21:55:02 -0500 Subject: [PATCH 13/45] redid with a focus on non-atomic --- crates/bevy_utils/src/atomic_staging.rs | 147 +++++++++++++++--------- 1 file changed, 92 insertions(+), 55 deletions(-) diff --git a/crates/bevy_utils/src/atomic_staging.rs b/crates/bevy_utils/src/atomic_staging.rs index 170031e56178c..d7227deff54b5 100644 --- a/crates/bevy_utils/src/atomic_staging.rs +++ b/crates/bevy_utils/src/atomic_staging.rs @@ -86,44 +86,114 @@ impl StageOnWrite { if self.staged.get_mut().unwrap().any_staged() { None } else { - Some(self.cold.get_mut().unwrap()) + Some(&mut self.cold) } } - /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. + /// Applies any staged changes before returning the full value with all changes applied. Immediately after this, [`any_staged`](Self::any_staged) will be false. #[inline] - pub fn full_locked(&self) -> Option<&RwLock> { - if self.any_staged() { - None - } else { - Some(&self.cold) + pub fn apply_staged_for_full(&mut self) -> &mut T::Cold { + let staged = self.staged.get_mut().unwrap(); + if staged.any_staged() { + staged.apply_staged(&mut self.cold); } + &mut self.cold } /// Returns true if and only if there are staged changes that could be applied. #[inline] - pub fn any_staged(&self) -> bool { - self.staged.read().unwrap().any_staged() + pub fn any_staged(&mut self) -> bool { + self.staged.get_mut().unwrap().any_staged() } - /// Applies any staged changes before returning the full value with all changes applied. Immediately after this, [`any_staged`](Self::any_staged) will be false. + /// Constructs a [`Stager`] that will stage changes. #[inline] - pub fn apply_staged_for_full(&mut self) -> &mut T::Cold { + pub fn stage(&mut self) -> Stager<'_, T> { + Stager { + cold: &mut self.cold, + staged: self.staged.get_mut().unwrap(), + } + } + + /// Constructs a [`StagerLocked`], locking internally. + #[inline] + pub fn stage_lock(&self) -> StagerLocked<'_, T, &T::Cold> { + StagerLocked { + cold: &self.cold, + staged: self.staged.write().unwrap(), + } + } + + /// Constructs a [`Stager`] that will stage changes. + #[inline] + pub fn read(&mut self) -> StagedRef<'_, T> { + StagedRef { + cold: &self.cold, + staged: self.staged.get_mut().unwrap(), + } + } + + /// Constructs a [`StagedRefLocked`], locking internally. + #[inline] + pub fn read_lock(&self) -> StagedRefLocked<'_, T, &T::Cold> { + StagedRefLocked { + cold: &self.cold, + staged: self.staged.read().unwrap(), + } + } + + /// Runs different logic depending on if additional changes are already staged. This can be faster than greedily applying staged changes if there are already staged changes. + pub fn maybe_stage( + &mut self, + for_full: impl FnOnce(&mut T::Cold) -> C, + for_staged: impl FnOnce(Stager) -> S, + ) -> MaybeStaged { let staged = self.staged.get_mut().unwrap(); - let cold = self.cold.get_mut().unwrap(); + let cold = &mut self.cold; if staged.any_staged() { - staged.apply_staged(cold); + MaybeStaged::Staged(for_staged(Stager { cold, staged })) + } else { + MaybeStaged::Cold(for_full(cold)) } - cold + } + + /// Easily run a stager function to stage changes. + #[inline] + pub fn stage_scope_locked(&self, f: impl FnOnce(&mut Stager) -> R) -> R { + f(&mut self.stage_lock().as_stager()) + } + + /// Easily run a [`StagedRef`] function. + #[inline] + pub fn read_scope_locked(&self, f: impl FnOnce(&StagedRef) -> R) -> R { + f(&self.read_lock().as_staged_ref()) + } +} + +impl AtomicStageOnWrite { + /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. + #[inline] + pub fn full_locked(&self) -> Option<&RwLock> { + if self.any_staged() { + None + } else { + Some(&self.0.cold) + } + } + + /// Returns true if and only if there are staged changes that could be applied. + #[inline] + pub fn any_staged(&self) -> bool { + self.0.staged.read().unwrap().any_staged() } /// A version of [`apply_staged_for_full`](Self::apply_staged_for_full) that locks (and may block). /// Returns `None` if no changes needed to be made, and the stage could be skipped. #[inline] pub fn apply_staged_lock(&self) -> Option> { - let mut staged = self.staged.write().unwrap(); + let mut staged = self.0.staged.write().unwrap(); if staged.any_staged() { - let mut cold = self.cold.write().unwrap(); + let mut cold = self.0.cold.write().unwrap(); staged.apply_staged(&mut cold); Some(cold) } else { @@ -136,9 +206,9 @@ impl StageOnWrite { /// if either their were no changes to be made and the stage could be skipped, or if the operation would block. #[inline] pub fn apply_staged_non_blocking(&self) -> Option> { - let mut staged = self.staged.write().unwrap(); + let mut staged = self.0.staged.write().unwrap(); if staged.any_staged() { - let mut cold = self.cold.try_write().ok()?; + let mut cold = self.0.cold.try_write().ok()?; staged.apply_staged(&mut cold); Some(cold) } else { @@ -146,21 +216,12 @@ impl StageOnWrite { } } - /// Constructs a [`Stager`] that will stage changes. - #[inline] - pub fn stage(&mut self) -> Stager<'_, T> { - Stager { - cold: self.cold.get_mut().unwrap(), - staged: self.staged.get_mut().unwrap(), - } - } - /// Constructs a [`StagerLocked`], locking internally. #[inline] pub fn stage_lock(&self) -> StagerLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { StagerLocked { - cold: self.cold.read().unwrap(), - staged: self.staged.write().unwrap(), + cold: self.0.cold.read().unwrap(), + staged: self.0.staged.write().unwrap(), } } @@ -187,21 +248,12 @@ impl StageOnWrite { f(self.stage_lock()) } - /// Constructs a [`Stager`] that will stage changes. - #[inline] - pub fn read(&mut self) -> StagedRef<'_, T> { - StagedRef { - cold: self.cold.get_mut().unwrap(), - staged: self.staged.get_mut().unwrap(), - } - } - /// Constructs a [`StagerLocked`], locking internally. #[inline] pub fn read_lock(&self) -> StagedRefLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { StagedRefLocked { - cold: self.cold.read().unwrap(), - staged: self.staged.read().unwrap(), + cold: self.0.cold.read().unwrap(), + staged: self.0.staged.read().unwrap(), } } @@ -210,21 +262,6 @@ impl StageOnWrite { pub fn read_scope_locked(&self, f: impl FnOnce(&StagedRef) -> R) -> R { f(&self.read_lock().as_staged_ref()) } - - /// Runs different logic depending on if additional changes are already staged. This can be faster than greedily applying staged changes if there are already staged changes. - pub fn maybe_stage( - &mut self, - for_full: impl FnOnce(&mut T::Cold) -> C, - for_staged: impl FnOnce(Stager) -> S, - ) -> MaybeStaged { - let staged = self.staged.get_mut().unwrap(); - let cold = self.cold.get_mut().unwrap(); - if staged.any_staged() { - MaybeStaged::Staged(for_staged(Stager { cold, staged })) - } else { - MaybeStaged::Cold(for_full(cold)) - } - } } impl> StagerLocked<'_, T, C> { From dc0fccfd5932d90381c771671060ff406ba5a1e9 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 22:57:26 -0500 Subject: [PATCH 14/45] implemented atomic stage on write --- crates/bevy_utils/src/atomic_staging.rs | 147 ++++++++++++++++-------- 1 file changed, 99 insertions(+), 48 deletions(-) diff --git a/crates/bevy_utils/src/atomic_staging.rs b/crates/bevy_utils/src/atomic_staging.rs index d7227deff54b5..b5210d7130f26 100644 --- a/crates/bevy_utils/src/atomic_staging.rs +++ b/crates/bevy_utils/src/atomic_staging.rs @@ -90,7 +90,8 @@ impl StageOnWrite { } } - /// Applies any staged changes before returning the full value with all changes applied. Immediately after this, [`any_staged`](Self::any_staged) will be false. + /// Applies any staged changes before returning the full value with all changes applied. + /// Immediately after this, [`any_staged`](Self::any_staged) will be false. #[inline] pub fn apply_staged_for_full(&mut self) -> &mut T::Cold { let staged = self.staged.get_mut().unwrap(); @@ -142,16 +143,17 @@ impl StageOnWrite { } } - /// Runs different logic depending on if additional changes are already staged. This can be faster than greedily applying staged changes if there are already staged changes. + /// Runs different logic depending on if additional changes are already staged. + /// This can be faster than greedily applying staged changes if there are already staged changes. pub fn maybe_stage( &mut self, for_full: impl FnOnce(&mut T::Cold) -> C, - for_staged: impl FnOnce(Stager) -> S, + for_staged: impl FnOnce(&mut Stager) -> S, ) -> MaybeStaged { let staged = self.staged.get_mut().unwrap(); let cold = &mut self.cold; if staged.any_staged() { - MaybeStaged::Staged(for_staged(Stager { cold, staged })) + MaybeStaged::Staged(for_staged(&mut Stager { cold, staged })) } else { MaybeStaged::Cold(for_full(cold)) } @@ -173,88 +175,137 @@ impl StageOnWrite { impl AtomicStageOnWrite { /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. #[inline] - pub fn full_locked(&self) -> Option<&RwLock> { - if self.any_staged() { + pub fn full_locked(&mut self) -> Option> { + if self.0.staged.read().unwrap().any_staged() { None } else { - Some(&self.0.cold) + Some(self.0.cold.write().unwrap()) } } - /// Returns true if and only if there are staged changes that could be applied. + /// Applies any staged changes before returning the full value with all changes applied. + /// Immediately after this, [`any_staged`](Self::any_staged) will be false. #[inline] - pub fn any_staged(&self) -> bool { - self.0.staged.read().unwrap().any_staged() + pub fn apply_staged_for_full_locked(&mut self) -> RwLockWriteGuard<'_, T::Cold> { + let mut staged = self.0.staged.write().unwrap(); + let mut cold = self.0.cold.write().unwrap(); + if staged.any_staged() { + staged.apply_staged(&mut cold); + } + cold } - /// A version of [`apply_staged_for_full`](Self::apply_staged_for_full) that locks (and may block). - /// Returns `None` if no changes needed to be made, and the stage could be skipped. + /// Gets the inner cold data if it is safe. #[inline] - pub fn apply_staged_lock(&self) -> Option> { - let mut staged = self.0.staged.write().unwrap(); + pub fn full_non_blocking(&mut self) -> Option> { + let staged = self.0.staged.try_read().ok()?; if staged.any_staged() { - let mut cold = self.0.cold.write().unwrap(); - staged.apply_staged(&mut cold); - Some(cold) - } else { None + } else { + self.0.cold.try_write().ok() } } - /// A version of [`apply_staged_for_full`](Self::apply_staged_for_full) that locks and never blocks. - /// If a read on another thread is immediately hit, that may block, but this will not. Returns `None` - /// if either their were no changes to be made and the stage could be skipped, or if the operation would block. + /// Applies any staged changes before returning the full value with all changes applied. #[inline] - pub fn apply_staged_non_blocking(&self) -> Option> { - let mut staged = self.0.staged.write().unwrap(); + pub fn apply_staged_for_full_non_blocking(&mut self) -> Option> { + let mut cold = self.0.cold.try_write().ok()?; + match self.0.staged.try_write() { + Ok(mut staged) => { + if staged.any_staged() { + staged.apply_staged(&mut cold); + } + Some(cold) + } + Err(_) => { + let staged = self.0.staged.read().unwrap(); + if staged.any_staged() { + None + } else { + Some(cold) + } + } + } + } + + /// If possible applies any staged changes. Returns true if it can guarantee there are no more staged changes. + #[inline] + pub fn apply_staged_non_blocking(&mut self) -> bool { + let Ok(mut staged) = self.0.staged.try_write() else { + return false; + }; if staged.any_staged() { - let mut cold = self.0.cold.try_write().ok()?; + let Ok(mut cold) = self.0.cold.try_write() else { + return false; + }; staged.apply_staged(&mut cold); - Some(cold) + true } else { - None + false } } + /// Returns true if and only if there are staged changes that could be applied. + #[inline] + pub fn any_staged(&self) -> bool { + self.0.staged.read().unwrap().any_staged() + } + /// Constructs a [`StagerLocked`], locking internally. #[inline] - pub fn stage_lock(&self) -> StagerLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { + pub fn stage_lock(&mut self) -> StagerLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { StagerLocked { cold: self.0.cold.read().unwrap(), staged: self.0.staged.write().unwrap(), } } - /// Easily run a stager function to stage changes. + /// Constructs a [`StagedRefLocked`], locking internally. #[inline] - pub fn stage_scope_locked(&self, f: impl FnOnce(&mut Stager) -> R) -> R { - f(&mut self.stage_lock().as_stager()) + pub fn read_lock(&self) -> StagedRefLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { + StagedRefLocked { + cold: self.0.cold.read().unwrap(), + staged: self.0.staged.read().unwrap(), + } } - /// Easily run a stager function to stage changes. Then, try to apply those staged changes if it can do so without blocking. - #[inline] - pub fn stage_scope_locked_eager(&self, f: impl FnOnce(&mut Stager) -> R) -> R { - let v = self.stage_scope_locked(f); - self.apply_staged_non_blocking(); - v + /// Runs different logic depending on if additional changes are already staged and if using cold would block. + /// This *can* be faster than greedily applying staged changes if there are already staged changes. + pub fn maybe_stage( + &mut self, + for_full: impl FnOnce(&mut T::Cold) -> C, + for_staged: impl FnOnce(&mut Stager) -> S, + ) -> MaybeStaged { + let mut staged = self.0.staged.write().unwrap(); + if staged.any_staged() { + let cold = self.0.cold.read().unwrap(); + MaybeStaged::Staged(for_staged(&mut Stager { + cold: &cold, + staged: &mut staged, + })) + } else if let Ok(mut cold) = self.0.cold.try_write() { + MaybeStaged::Cold(for_full(&mut cold)) + } else { + let cold = self.0.cold.read().unwrap(); + MaybeStaged::Staged(for_staged(&mut Stager { + cold: &cold, + staged: &mut staged, + })) + } } - /// Easily run a stager function to stage changes and return locked data. + /// Easily run a stager function to stage changes. #[inline] - pub fn stage_locked_scope( - &self, - f: impl FnOnce(StagerLocked>) -> R, - ) -> R { - f(self.stage_lock()) + pub fn stage_scope_locked(&mut self, f: impl FnOnce(&mut Stager) -> R) -> R { + f(&mut self.stage_lock().as_stager()) } - /// Constructs a [`StagerLocked`], locking internally. + /// Easily run a stager function to stage changes. Then, tries to apply those changes if doing so wouldn't lock. #[inline] - pub fn read_lock(&self) -> StagedRefLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { - StagedRefLocked { - cold: self.0.cold.read().unwrap(), - staged: self.0.staged.read().unwrap(), - } + pub fn stage_scope_locked_eager(&mut self, f: impl FnOnce(&mut Stager) -> R) -> R { + let result = self.stage_scope_locked(f); + self.apply_staged_non_blocking(); + result } /// Easily run a [`StagedRef`] function. From b2a08552ca38a34a2c1f420026f893bc56dce43e Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 23:07:31 -0500 Subject: [PATCH 15/45] docs pass --- crates/bevy_utils/src/atomic_staging.rs | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/crates/bevy_utils/src/atomic_staging.rs b/crates/bevy_utils/src/atomic_staging.rs index b5210d7130f26..f8f1f756cee36 100644 --- a/crates/bevy_utils/src/atomic_staging.rs +++ b/crates/bevy_utils/src/atomic_staging.rs @@ -56,7 +56,11 @@ pub enum MaybeStaged { Staged(S), } -/// A struct that allows read-optimized operations while still allowing mutation. +/// A struct that allows read-optimized operations while still allowing mutation. When mutattions are made, +/// they are staged. Then, at user-defined times, they are applied to the read-optimized storage. This allows muttations +/// through [`RwLock`]s without needing to constantly lock old or cold data. +/// +/// This is not designed for atomic use (ie. in an [`Arc`]). See [`AtomicStageOnWrite`] for that. #[derive(Default)] pub struct StageOnWrite { /// Cold data is read optimized. @@ -75,7 +79,12 @@ struct AtomicStageOnWriteInner { staged: RwLock, } -/// A struct that allows read-optimized operations while still allowing mutation. +/// A version of [`StageOnWrite`] designed for atomic use. See [`StageOnWrite`] for details. +/// +/// This type includes a baked in [`Arc`], so it can be shared similarly. Many of it's methods take `&mut self` even though +/// it doesn't technically need the mutation. This is done to signify that the methods involve a state change of the data and to prevent deadlocks. +/// Because everything that involves a write lock requires `&mut self`, it is impossible to deadlock because doing so would require another lock guard +/// with the same lifetime, which rust will complaine about. If you do not want this behavior, see [`AtomicStageOnWriteInner`]. #[derive(Clone)] pub struct AtomicStageOnWrite(Arc>); @@ -102,6 +111,7 @@ impl StageOnWrite { } /// Returns true if and only if there are staged changes that could be applied. + /// If you only have a immutable reference, consider using [`read_scope_locked`] with [`StagedChanges::any_staged`]. #[inline] pub fn any_staged(&mut self) -> bool { self.staged.get_mut().unwrap().any_staged() @@ -174,6 +184,8 @@ impl StageOnWrite { impl AtomicStageOnWrite { /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. + /// + /// Note that this **Blocks**, so generally prefer [`full_non_blocking`](Self::full_non_blocking). #[inline] pub fn full_locked(&mut self) -> Option> { if self.0.staged.read().unwrap().any_staged() { @@ -185,6 +197,8 @@ impl AtomicStageOnWrite { /// Applies any staged changes before returning the full value with all changes applied. /// Immediately after this, [`any_staged`](Self::any_staged) will be false. + /// + /// Note that this **Blocks**, so generally prefer [`apply_staged_for_full_non_blocking`](Self::apply_staged_for_full_non_blocking). #[inline] pub fn apply_staged_for_full_locked(&mut self) -> RwLockWriteGuard<'_, T::Cold> { let mut staged = self.0.staged.write().unwrap(); @@ -269,8 +283,8 @@ impl AtomicStageOnWrite { } } - /// Runs different logic depending on if additional changes are already staged and if using cold would block. - /// This *can* be faster than greedily applying staged changes if there are already staged changes. + /// Runs different logic depending on if additional changes are already staged and if using cold directly would block. + /// This *can* be faster than greedily applying staged changes if there are no staged changes and no reads from cold. pub fn maybe_stage( &mut self, for_full: impl FnOnce(&mut T::Cold) -> C, From b6d1a240567a6bb01901fb327359cc6fac63b8b1 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 23:20:57 -0500 Subject: [PATCH 16/45] constructors --- crates/bevy_utils/src/atomic_staging.rs | 27 ++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/crates/bevy_utils/src/atomic_staging.rs b/crates/bevy_utils/src/atomic_staging.rs index f8f1f756cee36..4b9d6c71d58dd 100644 --- a/crates/bevy_utils/src/atomic_staging.rs +++ b/crates/bevy_utils/src/atomic_staging.rs @@ -5,7 +5,7 @@ use core::ops::Deref; use bevy_platform_support::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard}; /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). -pub trait StagedChanges { +pub trait StagedChanges: Default { /// The more compact data structure that these changes compact into. type Cold; @@ -89,6 +89,14 @@ struct AtomicStageOnWriteInner { pub struct AtomicStageOnWrite(Arc>); impl StageOnWrite { + /// Constructs a new [`StageOnWrite`] with the given value and no staged changes. + pub fn new(value: T::Cold) -> Self { + Self { + cold: value, + staged: RwLock::default(), + } + } + /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. #[inline] pub fn full(&mut self) -> Option<&mut T::Cold> { @@ -183,6 +191,14 @@ impl StageOnWrite { } impl AtomicStageOnWrite { + /// Constructs a new [`AtomicStageOnWrite`] with the given value and no staged changes. + pub fn new(value: T::Cold) -> Self { + Self(Arc::new(AtomicStageOnWriteInner { + cold: RwLock::new(value), + staged: RwLock::default(), + })) + } + /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. /// /// Note that this **Blocks**, so generally prefer [`full_non_blocking`](Self::full_non_blocking). @@ -369,6 +385,15 @@ impl<'a, T: StagedChanges> Clone for StagedRef<'a, T> { } } +impl Default for AtomicStageOnWrite +where + T::Cold: Default, +{ + fn default() -> Self { + Self::new(T::Cold::default()) + } +} + impl ColdStorage for RwLockReadGuard<'_, T::Cold> {} impl ColdStorage for &'_ T::Cold {} From 1952035ee073cea43cb11d2907166fad061c8f1d Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 23:24:06 -0500 Subject: [PATCH 17/45] more docs --- crates/bevy_utils/src/atomic_staging.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/bevy_utils/src/atomic_staging.rs b/crates/bevy_utils/src/atomic_staging.rs index 4b9d6c71d58dd..69bf7bbff22a3 100644 --- a/crates/bevy_utils/src/atomic_staging.rs +++ b/crates/bevy_utils/src/atomic_staging.rs @@ -19,7 +19,7 @@ pub trait StagedChanges: Default { /// A trait that signifies that it holds an immutable reference to a cold type (ie. [`StagedChanges::Cold`]). pub trait ColdStorage: Deref {} -/// A struct that allows staging changes while reading from cold storage. +/// A struct that allows staging changes while reading from cold storage. Generally, staging changes should be implemented on this type. pub struct Stager<'a, T: StagedChanges> { /// The storage that is read optimized. pub cold: &'a T::Cold, @@ -27,7 +27,7 @@ pub struct Stager<'a, T: StagedChanges> { pub staged: &'a mut T, } -/// A struct that allows accessing changes while reading from cold storage. +/// A struct that allows accessing changes while reading from cold storage. Generally, reading data should be implemented on this type. #[derive(Copy)] pub struct StagedRef<'a, T: StagedChanges> { /// The storage that is read optimized. @@ -36,13 +36,13 @@ pub struct StagedRef<'a, T: StagedChanges> { pub staged: &'a T, } -/// A locked version of [`Stager`] +/// A locked version of [`Stager`]. Use this to hold a lock guard while using [`StagerLocked::as_stager`] or similar. pub struct StagerLocked<'a, T: StagedChanges, C: ColdStorage> { cold: C, staged: RwLockWriteGuard<'a, T>, } -/// A locked version of [`StagedRef`] +/// A locked version of [`StagedRef`] Use this to hold a lock guard while using [`StagerLocked::as_staged_ref`]. pub struct StagedRefLocked<'a, T: StagedChanges, C: ColdStorage> { cold: C, staged: RwLockReadGuard<'a, T>, From a452de05aeff5891d74732dee7b8f36e3347a0d9 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 23:37:07 -0500 Subject: [PATCH 18/45] basic test example --- crates/bevy_utils/src/atomic_staging.rs | 44 +++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/crates/bevy_utils/src/atomic_staging.rs b/crates/bevy_utils/src/atomic_staging.rs index 69bf7bbff22a3..65dc094c7dcee 100644 --- a/crates/bevy_utils/src/atomic_staging.rs +++ b/crates/bevy_utils/src/atomic_staging.rs @@ -397,3 +397,47 @@ where impl ColdStorage for RwLockReadGuard<'_, T::Cold> {} impl ColdStorage for &'_ T::Cold {} + +#[cfg(test)] +mod tests { + use bevy_platform_support::{collections::HashMap, prelude::Vec}; + + use super::*; + + #[derive(Default)] + struct StagedNumVec { + added: Vec, + changed: HashMap, + } + + impl StagedChanges for StagedNumVec { + type Cold = Vec; + + fn apply_staged(&mut self, storage: &mut Self::Cold) { + storage.append(&mut self.added); + for (index, new) in self.changed.drain() { + storage[index] = new; + } + } + + fn any_staged(&self) -> bool { + !self.added.is_empty() || !self.changed.is_empty() + } + } + + // This is commented out, as it intentionally does not compile. This demonstrates how `AtomicStageOnWrite` prevents deadlock using the borrow checker. + // #[test] + // fn test_no_compile_for_deadlock() { + // let mut stage_on_write = AtomicStageOnWrite::::default(); + // let reading = stage_on_write.read_lock(); + // stage_on_write.apply_staged_non_blocking(); + // } + + #[test] + fn test_simple_stage() { + let mut data = StageOnWrite::::default(); + data.stage_scope_locked(|stager| stager.staged.added.push(5)); + let full = data.apply_staged_for_full(); + assert_eq!(full[0], 5); + } +} From 7bd0695f1d46fffd99259b7da8b9df90b44e165f Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Wed, 5 Feb 2025 23:39:16 -0500 Subject: [PATCH 19/45] deadlocking warnings --- crates/bevy_utils/src/atomic_staging.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/crates/bevy_utils/src/atomic_staging.rs b/crates/bevy_utils/src/atomic_staging.rs index 65dc094c7dcee..7d588e2faac63 100644 --- a/crates/bevy_utils/src/atomic_staging.rs +++ b/crates/bevy_utils/src/atomic_staging.rs @@ -135,6 +135,10 @@ impl StageOnWrite { } /// Constructs a [`StagerLocked`], locking internally. + /// + /// # Deadlocks + /// + /// This deadlocks if there are any other lock guards on this thread for this value. #[inline] pub fn stage_lock(&self) -> StagerLocked<'_, T, &T::Cold> { StagerLocked { @@ -153,6 +157,10 @@ impl StageOnWrite { } /// Constructs a [`StagedRefLocked`], locking internally. + /// + /// # Deadlocks + /// + /// This deadlocks if there are any write lock guards on this thread for this value. #[inline] pub fn read_lock(&self) -> StagedRefLocked<'_, T, &T::Cold> { StagedRefLocked { @@ -178,12 +186,20 @@ impl StageOnWrite { } /// Easily run a stager function to stage changes. + /// + /// # Deadlocks + /// + /// This deadlocks if there are any other lock guards on this thread for this value. #[inline] pub fn stage_scope_locked(&self, f: impl FnOnce(&mut Stager) -> R) -> R { f(&mut self.stage_lock().as_stager()) } /// Easily run a [`StagedRef`] function. + /// + /// # Deadlocks + /// + /// This deadlocks if there are any write lock guards on this thread for this value. #[inline] pub fn read_scope_locked(&self, f: impl FnOnce(&StagedRef) -> R) -> R { f(&self.read_lock().as_staged_ref()) From 5fa5d58f21416a250106f8e30e7ff4d1f79c23ea Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 00:01:05 -0500 Subject: [PATCH 20/45] module docs --- crates/bevy_utils/src/atomic_staging.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/bevy_utils/src/atomic_staging.rs b/crates/bevy_utils/src/atomic_staging.rs index 7d588e2faac63..8ca0da9c24cfa 100644 --- a/crates/bevy_utils/src/atomic_staging.rs +++ b/crates/bevy_utils/src/atomic_staging.rs @@ -1,4 +1,4 @@ -//! Provides an abstracted system for staging modifications attomically. +//! Provides an abstracted system for staging modifications to data structures that rarely change. See [`StageOnWrite`] as a starting point. use core::ops::Deref; From 296ed0343bca232aaee82d6366e748a7b6943ef0 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 00:01:49 -0500 Subject: [PATCH 21/45] rename to just staging This offers more than just atomic staging options now. --- crates/bevy_utils/src/lib.rs | 2 +- crates/bevy_utils/src/{atomic_staging.rs => staging.rs} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename crates/bevy_utils/src/{atomic_staging.rs => staging.rs} (100%) diff --git a/crates/bevy_utils/src/lib.rs b/crates/bevy_utils/src/lib.rs index 4767b44fda103..41fdc1529612e 100644 --- a/crates/bevy_utils/src/lib.rs +++ b/crates/bevy_utils/src/lib.rs @@ -22,7 +22,7 @@ pub mod prelude { pub use crate::default; } -pub mod atomic_staging; +pub mod staging; pub mod synccell; pub mod syncunsafecell; diff --git a/crates/bevy_utils/src/atomic_staging.rs b/crates/bevy_utils/src/staging.rs similarity index 100% rename from crates/bevy_utils/src/atomic_staging.rs rename to crates/bevy_utils/src/staging.rs From 026c0ea4f4c0d5dcb445c9e093ab0d5936e3b1e1 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 00:09:03 -0500 Subject: [PATCH 22/45] fixed no_std hopefully --- crates/bevy_utils/src/staging.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 8ca0da9c24cfa..be4f00ac93427 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -2,7 +2,8 @@ use core::ops::Deref; -use bevy_platform_support::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard}; +use alloc::sync::Arc; +use bevy_platform_support::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). pub trait StagedChanges: Default { From 364165c087248fa4bf0f66fe9ae46a43f4081b55 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 00:19:17 -0500 Subject: [PATCH 23/45] moved to platform_support Hopefully this also fixes no_std --- crates/bevy_platform_support/src/lib.rs | 2 ++ crates/{bevy_utils => bevy_platform_support}/src/staging.rs | 6 +++--- crates/bevy_utils/src/lib.rs | 1 - 3 files changed, 5 insertions(+), 4 deletions(-) rename crates/{bevy_utils => bevy_platform_support}/src/staging.rs (98%) diff --git a/crates/bevy_platform_support/src/lib.rs b/crates/bevy_platform_support/src/lib.rs index eada254595ab4..e24bafb36a5ef 100644 --- a/crates/bevy_platform_support/src/lib.rs +++ b/crates/bevy_platform_support/src/lib.rs @@ -21,6 +21,8 @@ pub mod time; #[cfg(feature = "alloc")] pub mod collections; +#[cfg(feature = "alloc")] +pub mod staging; /// Frequently used items which would typically be included in most contexts. /// diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_platform_support/src/staging.rs similarity index 98% rename from crates/bevy_utils/src/staging.rs rename to crates/bevy_platform_support/src/staging.rs index be4f00ac93427..12be4594028ce 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_platform_support/src/staging.rs @@ -2,8 +2,8 @@ use core::ops::Deref; -use alloc::sync::Arc; -use bevy_platform_support::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; +use crate::alloc::sync::Arc; +use crate::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). pub trait StagedChanges: Default { @@ -417,7 +417,7 @@ impl ColdStorage for &'_ T::Cold {} #[cfg(test)] mod tests { - use bevy_platform_support::{collections::HashMap, prelude::Vec}; + use crate::{collections::HashMap, prelude::Vec}; use super::*; diff --git a/crates/bevy_utils/src/lib.rs b/crates/bevy_utils/src/lib.rs index 41fdc1529612e..4b7d91a40eee2 100644 --- a/crates/bevy_utils/src/lib.rs +++ b/crates/bevy_utils/src/lib.rs @@ -22,7 +22,6 @@ pub mod prelude { pub use crate::default; } -pub mod staging; pub mod synccell; pub mod syncunsafecell; From b3267db0cc2883b394bc46a4c266af94d7c3a816 Mon Sep 17 00:00:00 2001 From: ElliottjPierce <79881080+ElliottjPierce@users.noreply.github.com> Date: Thu, 6 Feb 2025 00:32:34 -0500 Subject: [PATCH 24/45] More granular alloc feature cfgs Great suggestions. IDK why I didn't do this to start with. Co-authored-by: Zachary Harrold --- crates/bevy_platform_support/src/lib.rs | 1 - crates/bevy_platform_support/src/staging.rs | 12 +++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/crates/bevy_platform_support/src/lib.rs b/crates/bevy_platform_support/src/lib.rs index e24bafb36a5ef..57616c78985df 100644 --- a/crates/bevy_platform_support/src/lib.rs +++ b/crates/bevy_platform_support/src/lib.rs @@ -21,7 +21,6 @@ pub mod time; #[cfg(feature = "alloc")] pub mod collections; -#[cfg(feature = "alloc")] pub mod staging; /// Frequently used items which would typically be included in most contexts. diff --git a/crates/bevy_platform_support/src/staging.rs b/crates/bevy_platform_support/src/staging.rs index 12be4594028ce..a40bf11d631ba 100644 --- a/crates/bevy_platform_support/src/staging.rs +++ b/crates/bevy_platform_support/src/staging.rs @@ -1,9 +1,10 @@ //! Provides an abstracted system for staging modifications to data structures that rarely change. See [`StageOnWrite`] as a starting point. +use crate::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use core::ops::Deref; -use crate::alloc::sync::Arc; -use crate::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; +#[cfg(feature = "alloc")] +use crate::sync::Arc; /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). pub trait StagedChanges: Default { @@ -61,7 +62,8 @@ pub enum MaybeStaged { /// they are staged. Then, at user-defined times, they are applied to the read-optimized storage. This allows muttations /// through [`RwLock`]s without needing to constantly lock old or cold data. /// -/// This is not designed for atomic use (ie. in an [`Arc`]). See [`AtomicStageOnWrite`] for that. +/// This is not designed for atomic use (ie. in an `Arc`). +#[cfg_attr(feature = "alloc", doc = "See [`AtomicStageOnWrite`] for that.")] #[derive(Default)] pub struct StageOnWrite { /// Cold data is read optimized. @@ -70,6 +72,7 @@ pub struct StageOnWrite { staged: RwLock, } +#[cfg(feature = "alloc")] #[derive(Default)] struct AtomicStageOnWriteInner { /// Cold data is read optimized. This lives behind a [`RwLock`], but it is only written to for applying changes in @@ -86,6 +89,7 @@ struct AtomicStageOnWriteInner { /// it doesn't technically need the mutation. This is done to signify that the methods involve a state change of the data and to prevent deadlocks. /// Because everything that involves a write lock requires `&mut self`, it is impossible to deadlock because doing so would require another lock guard /// with the same lifetime, which rust will complaine about. If you do not want this behavior, see [`AtomicStageOnWriteInner`]. +#[cfg(feature = "alloc")] #[derive(Clone)] pub struct AtomicStageOnWrite(Arc>); @@ -207,6 +211,7 @@ impl StageOnWrite { } } +#[cfg(feature = "alloc")] impl AtomicStageOnWrite { /// Constructs a new [`AtomicStageOnWrite`] with the given value and no staged changes. pub fn new(value: T::Cold) -> Self { @@ -402,6 +407,7 @@ impl<'a, T: StagedChanges> Clone for StagedRef<'a, T> { } } +#[cfg(feature = "alloc")] impl Default for AtomicStageOnWrite where T::Cold: Default, From 6d450614b5deb517db7b176ca77387c5553c0f72 Mon Sep 17 00:00:00 2001 From: ElliottjPierce <79881080+ElliottjPierce@users.noreply.github.com> Date: Thu, 6 Feb 2025 11:07:55 -0500 Subject: [PATCH 25/45] More great suggestions from bushrat Mostly optimizing docs for better diff views in the future. Great idea. Co-authored-by: Zachary Harrold --- crates/bevy_platform_support/src/staging.rs | 60 ++++++++++++--------- 1 file changed, 36 insertions(+), 24 deletions(-) diff --git a/crates/bevy_platform_support/src/staging.rs b/crates/bevy_platform_support/src/staging.rs index a40bf11d631ba..2891f503d4df8 100644 --- a/crates/bevy_platform_support/src/staging.rs +++ b/crates/bevy_platform_support/src/staging.rs @@ -1,4 +1,5 @@ -//! Provides an abstracted system for staging modifications to data structures that rarely change. See [`StageOnWrite`] as a starting point. +//! Provides an abstracted system for staging modifications to data structures that rarely change. +//! See [`StageOnWrite`] as a starting point. use crate::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use core::ops::Deref; @@ -11,17 +12,19 @@ pub trait StagedChanges: Default { /// The more compact data structure that these changes compact into. type Cold; - /// This applies these changes to the passed [`Cold`](Self::Cold). When this is done, there should be no more staged changes, and [`any_staged`](Self::any_staged) should be false. + /// This applies these changes to the passed [`Cold`](Self::Cold). + /// When this is done, there should be no more staged changes, and [`any_staged`](Self::any_staged) should be `false`. fn apply_staged(&mut self, storage: &mut Self::Cold); - /// Returns true if and only if there are staged changes that could be applied. + /// Returns `true` if and only if there are staged changes that could be applied. fn any_staged(&self) -> bool; } /// A trait that signifies that it holds an immutable reference to a cold type (ie. [`StagedChanges::Cold`]). pub trait ColdStorage: Deref {} -/// A struct that allows staging changes while reading from cold storage. Generally, staging changes should be implemented on this type. +/// A struct that allows staging changes while reading from cold storage. +/// Generally, staging changes should be implemented on this type. pub struct Stager<'a, T: StagedChanges> { /// The storage that is read optimized. pub cold: &'a T::Cold, @@ -29,7 +32,8 @@ pub struct Stager<'a, T: StagedChanges> { pub staged: &'a mut T, } -/// A struct that allows accessing changes while reading from cold storage. Generally, reading data should be implemented on this type. +/// A struct that allows accessing changes while reading from cold storage. +/// Generally, reading data should be implemented on this type. #[derive(Copy)] pub struct StagedRef<'a, T: StagedChanges> { /// The storage that is read optimized. @@ -38,13 +42,15 @@ pub struct StagedRef<'a, T: StagedChanges> { pub staged: &'a T, } -/// A locked version of [`Stager`]. Use this to hold a lock guard while using [`StagerLocked::as_stager`] or similar. +/// A locked version of [`Stager`]. +/// Use this to hold a lock guard while using [`StagerLocked::as_stager`] or similar. pub struct StagerLocked<'a, T: StagedChanges, C: ColdStorage> { cold: C, staged: RwLockWriteGuard<'a, T>, } -/// A locked version of [`StagedRef`] Use this to hold a lock guard while using [`StagerLocked::as_staged_ref`]. +/// A locked version of [`StagedRef`]. +/// Use this to hold a lock guard while using [`StagerLocked::as_staged_ref`]. pub struct StagedRefLocked<'a, T: StagedChanges, C: ColdStorage> { cold: C, staged: RwLockReadGuard<'a, T>, @@ -58,9 +64,10 @@ pub enum MaybeStaged { Staged(S), } -/// A struct that allows read-optimized operations while still allowing mutation. When mutattions are made, -/// they are staged. Then, at user-defined times, they are applied to the read-optimized storage. This allows muttations -/// through [`RwLock`]s without needing to constantly lock old or cold data. +/// A struct that allows read-optimized operations while still allowing mutation. +/// When mutations are made, they are staged. +/// Then, at user-defined times, they are applied to the read-optimized storage. +/// This allows mutations through [`RwLock`]s without needing to constantly lock old or cold data. /// /// This is not designed for atomic use (ie. in an `Arc`). #[cfg_attr(feature = "alloc", doc = "See [`AtomicStageOnWrite`] for that.")] @@ -75,20 +82,21 @@ pub struct StageOnWrite { #[cfg(feature = "alloc")] #[derive(Default)] struct AtomicStageOnWriteInner { - /// Cold data is read optimized. This lives behind a [`RwLock`], but it is only written to for applying changes in - /// a non-blocking way. In other worlds this locks, but almost never blocks. (It can technically block if a thread - /// tries to read from it while it is having changes applied, but that is extremely rare.) + /// Cold data is read optimized. + /// This lives behind a [`RwLock`], but it is only written to for applying changes in a non-blocking way. + /// This will only block if a thread tries to read from it while it is having changes applied, but that is extremely rare. cold: RwLock, - /// Staged data stores recent modifications to cold. It's [`RwLock`] coordinates mutations. + /// Staged data stores recent modifications to cold. + /// It's [`RwLock`] coordinates mutations. staged: RwLock, } -/// A version of [`StageOnWrite`] designed for atomic use. See [`StageOnWrite`] for details. +/// A version of [`StageOnWrite`] designed for atomic use. +/// See [`StageOnWrite`] for details. /// -/// This type includes a baked in [`Arc`], so it can be shared similarly. Many of it's methods take `&mut self` even though -/// it doesn't technically need the mutation. This is done to signify that the methods involve a state change of the data and to prevent deadlocks. -/// Because everything that involves a write lock requires `&mut self`, it is impossible to deadlock because doing so would require another lock guard -/// with the same lifetime, which rust will complaine about. If you do not want this behavior, see [`AtomicStageOnWriteInner`]. +/// This type includes a baked in [`Arc`], so it can be shared across threads. +/// Many of it's methods take `&mut self` to ensure access is exclusive, preventing possible deadlocks. +/// If you do not want this behavior, see [`AtomicStageOnWriteInner`]. #[cfg(feature = "alloc")] #[derive(Clone)] pub struct AtomicStageOnWrite(Arc>); @@ -102,7 +110,8 @@ impl StageOnWrite { } } - /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. + /// Gets the inner cold data if it is safe. + /// If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. #[inline] pub fn full(&mut self) -> Option<&mut T::Cold> { if self.staged.get_mut().unwrap().any_staged() { @@ -221,7 +230,8 @@ impl AtomicStageOnWrite { })) } - /// Gets the inner cold data if it is safe. If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. + /// Gets the inner cold data if it is safe. + /// If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. /// /// Note that this **Blocks**, so generally prefer [`full_non_blocking`](Self::full_non_blocking). #[inline] @@ -280,7 +290,8 @@ impl AtomicStageOnWrite { } } - /// If possible applies any staged changes. Returns true if it can guarantee there are no more staged changes. + /// If possible applies any staged changes. + /// Returns true if it can guarantee there are no more staged changes. #[inline] pub fn apply_staged_non_blocking(&mut self) -> bool { let Ok(mut staged) = self.0.staged.try_write() else { @@ -352,7 +363,8 @@ impl AtomicStageOnWrite { f(&mut self.stage_lock().as_stager()) } - /// Easily run a stager function to stage changes. Then, tries to apply those changes if doing so wouldn't lock. + /// Easily run a stager function to stage changes. + /// Then, tries to apply those changes if doing so wouldn't lock. #[inline] pub fn stage_scope_locked_eager(&mut self, f: impl FnOnce(&mut Stager) -> R) -> R { let result = self.stage_scope_locked(f); @@ -461,6 +473,6 @@ mod tests { let mut data = StageOnWrite::::default(); data.stage_scope_locked(|stager| stager.staged.added.push(5)); let full = data.apply_staged_for_full(); - assert_eq!(full[0], 5); + assert_eq!(full[..], &[5]); } } From 94d0655677189e96c6a4ade6cd631f1cffd56988 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 11:19:07 -0500 Subject: [PATCH 26/45] cleaned up from review --- crates/bevy_platform_support/src/staging.rs | 23 ++++++++++++--------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/crates/bevy_platform_support/src/staging.rs b/crates/bevy_platform_support/src/staging.rs index 2891f503d4df8..6fcd1489d6e0e 100644 --- a/crates/bevy_platform_support/src/staging.rs +++ b/crates/bevy_platform_support/src/staging.rs @@ -95,8 +95,19 @@ struct AtomicStageOnWriteInner { /// See [`StageOnWrite`] for details. /// /// This type includes a baked in [`Arc`], so it can be shared across threads. +/// /// Many of it's methods take `&mut self` to ensure access is exclusive, preventing possible deadlocks. -/// If you do not want this behavior, see [`AtomicStageOnWriteInner`]. +/// This doesn not guarantee there are no deadlocks when working with multiple clones of this on the same thread. +/// Here's an example: +/// +/// ```compile_fail +/// use ... +/// let mut stage_on_write = AtomicStageOnWrite::::default(); +/// let reading = stage_on_write.read_lock(); +/// stage_on_write.apply_staged_non_blocking(); +/// ``` +/// +/// Remember to use [`apply_staged_non_blocking`](Self::apply_staged_non_blocking) or similar methods periodically as a best practice. #[cfg(feature = "alloc")] #[derive(Clone)] pub struct AtomicStageOnWrite(Arc>); @@ -460,19 +471,11 @@ mod tests { } } - // This is commented out, as it intentionally does not compile. This demonstrates how `AtomicStageOnWrite` prevents deadlock using the borrow checker. - // #[test] - // fn test_no_compile_for_deadlock() { - // let mut stage_on_write = AtomicStageOnWrite::::default(); - // let reading = stage_on_write.read_lock(); - // stage_on_write.apply_staged_non_blocking(); - // } - #[test] fn test_simple_stage() { let mut data = StageOnWrite::::default(); data.stage_scope_locked(|stager| stager.staged.added.push(5)); let full = data.apply_staged_for_full(); - assert_eq!(full[..], &[5]); + assert_eq!(&full[..], &[5]); } } From 885014c49c3bedbd3a3cc04f91dee26bca72d3bf Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 11:21:20 -0500 Subject: [PATCH 27/45] tiny docs clarification --- crates/bevy_platform_support/src/staging.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/bevy_platform_support/src/staging.rs b/crates/bevy_platform_support/src/staging.rs index 6fcd1489d6e0e..b8ae2dbe99980 100644 --- a/crates/bevy_platform_support/src/staging.rs +++ b/crates/bevy_platform_support/src/staging.rs @@ -121,7 +121,7 @@ impl StageOnWrite { } } - /// Gets the inner cold data if it is safe. + /// Gets the inner cold data if there are no staged changes. /// If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. #[inline] pub fn full(&mut self) -> Option<&mut T::Cold> { @@ -241,7 +241,7 @@ impl AtomicStageOnWrite { })) } - /// Gets the inner cold data if it is safe. + /// Gets the inner cold data if there are no staged changes. /// If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. /// /// Note that this **Blocks**, so generally prefer [`full_non_blocking`](Self::full_non_blocking). @@ -268,7 +268,7 @@ impl AtomicStageOnWrite { cold } - /// Gets the inner cold data if it is safe. + /// Gets the inner cold data if there are no staged changes and nobody is reading from the cold data. #[inline] pub fn full_non_blocking(&mut self) -> Option> { let staged = self.0.staged.try_read().ok()?; From c96660aab69a4ab057a6526463367dd7d0e412e8 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 11:26:01 -0500 Subject: [PATCH 28/45] Clear poison instead of crashing This opens up the possibility that invalid data is created. For example, if a thread crashes while actively staging a change, there might be an incomplete staged change. However, in the context that this will be used, that seems unlikely, and this behavior gives more flexibility, etc. --- crates/bevy_platform_support/src/staging.rs | 86 +++++++++++++++------ 1 file changed, 64 insertions(+), 22 deletions(-) diff --git a/crates/bevy_platform_support/src/staging.rs b/crates/bevy_platform_support/src/staging.rs index b8ae2dbe99980..d578abbfd6a34 100644 --- a/crates/bevy_platform_support/src/staging.rs +++ b/crates/bevy_platform_support/src/staging.rs @@ -1,7 +1,7 @@ //! Provides an abstracted system for staging modifications to data structures that rarely change. //! See [`StageOnWrite`] as a starting point. -use crate::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; +use crate::sync::{PoisonError, RwLock, RwLockReadGuard, RwLockWriteGuard}; use core::ops::Deref; #[cfg(feature = "alloc")] @@ -125,7 +125,12 @@ impl StageOnWrite { /// If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. #[inline] pub fn full(&mut self) -> Option<&mut T::Cold> { - if self.staged.get_mut().unwrap().any_staged() { + if self + .staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner) + .any_staged() + { None } else { Some(&mut self.cold) @@ -136,7 +141,10 @@ impl StageOnWrite { /// Immediately after this, [`any_staged`](Self::any_staged) will be false. #[inline] pub fn apply_staged_for_full(&mut self) -> &mut T::Cold { - let staged = self.staged.get_mut().unwrap(); + let staged = self + .staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner); if staged.any_staged() { staged.apply_staged(&mut self.cold); } @@ -147,7 +155,10 @@ impl StageOnWrite { /// If you only have a immutable reference, consider using [`read_scope_locked`] with [`StagedChanges::any_staged`]. #[inline] pub fn any_staged(&mut self) -> bool { - self.staged.get_mut().unwrap().any_staged() + self.staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner) + .any_staged() } /// Constructs a [`Stager`] that will stage changes. @@ -155,7 +166,10 @@ impl StageOnWrite { pub fn stage(&mut self) -> Stager<'_, T> { Stager { cold: &mut self.cold, - staged: self.staged.get_mut().unwrap(), + staged: self + .staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner), } } @@ -168,7 +182,7 @@ impl StageOnWrite { pub fn stage_lock(&self) -> StagerLocked<'_, T, &T::Cold> { StagerLocked { cold: &self.cold, - staged: self.staged.write().unwrap(), + staged: self.staged.write().unwrap_or_else(PoisonError::into_inner), } } @@ -177,7 +191,10 @@ impl StageOnWrite { pub fn read(&mut self) -> StagedRef<'_, T> { StagedRef { cold: &self.cold, - staged: self.staged.get_mut().unwrap(), + staged: self + .staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner), } } @@ -190,7 +207,7 @@ impl StageOnWrite { pub fn read_lock(&self) -> StagedRefLocked<'_, T, &T::Cold> { StagedRefLocked { cold: &self.cold, - staged: self.staged.read().unwrap(), + staged: self.staged.read().unwrap_or_else(PoisonError::into_inner), } } @@ -201,7 +218,10 @@ impl StageOnWrite { for_full: impl FnOnce(&mut T::Cold) -> C, for_staged: impl FnOnce(&mut Stager) -> S, ) -> MaybeStaged { - let staged = self.staged.get_mut().unwrap(); + let staged = self + .staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner); let cold = &mut self.cold; if staged.any_staged() { MaybeStaged::Staged(for_staged(&mut Stager { cold, staged })) @@ -247,10 +267,16 @@ impl AtomicStageOnWrite { /// Note that this **Blocks**, so generally prefer [`full_non_blocking`](Self::full_non_blocking). #[inline] pub fn full_locked(&mut self) -> Option> { - if self.0.staged.read().unwrap().any_staged() { + if self + .0 + .staged + .read() + .unwrap_or_else(PoisonError::into_inner) + .any_staged() + { None } else { - Some(self.0.cold.write().unwrap()) + Some(self.0.cold.write().unwrap_or_else(PoisonError::into_inner)) } } @@ -260,8 +286,12 @@ impl AtomicStageOnWrite { /// Note that this **Blocks**, so generally prefer [`apply_staged_for_full_non_blocking`](Self::apply_staged_for_full_non_blocking). #[inline] pub fn apply_staged_for_full_locked(&mut self) -> RwLockWriteGuard<'_, T::Cold> { - let mut staged = self.0.staged.write().unwrap(); - let mut cold = self.0.cold.write().unwrap(); + let mut staged = self + .0 + .staged + .write() + .unwrap_or_else(PoisonError::into_inner); + let mut cold = self.0.cold.write().unwrap_or_else(PoisonError::into_inner); if staged.any_staged() { staged.apply_staged(&mut cold); } @@ -291,7 +321,7 @@ impl AtomicStageOnWrite { Some(cold) } Err(_) => { - let staged = self.0.staged.read().unwrap(); + let staged = self.0.staged.read().unwrap_or_else(PoisonError::into_inner); if staged.any_staged() { None } else { @@ -322,15 +352,23 @@ impl AtomicStageOnWrite { /// Returns true if and only if there are staged changes that could be applied. #[inline] pub fn any_staged(&self) -> bool { - self.0.staged.read().unwrap().any_staged() + self.0 + .staged + .read() + .unwrap_or_else(PoisonError::into_inner) + .any_staged() } /// Constructs a [`StagerLocked`], locking internally. #[inline] pub fn stage_lock(&mut self) -> StagerLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { StagerLocked { - cold: self.0.cold.read().unwrap(), - staged: self.0.staged.write().unwrap(), + cold: self.0.cold.read().unwrap_or_else(PoisonError::into_inner), + staged: self + .0 + .staged + .write() + .unwrap_or_else(PoisonError::into_inner), } } @@ -338,8 +376,8 @@ impl AtomicStageOnWrite { #[inline] pub fn read_lock(&self) -> StagedRefLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { StagedRefLocked { - cold: self.0.cold.read().unwrap(), - staged: self.0.staged.read().unwrap(), + cold: self.0.cold.read().unwrap_or_else(PoisonError::into_inner), + staged: self.0.staged.read().unwrap_or_else(PoisonError::into_inner), } } @@ -350,9 +388,13 @@ impl AtomicStageOnWrite { for_full: impl FnOnce(&mut T::Cold) -> C, for_staged: impl FnOnce(&mut Stager) -> S, ) -> MaybeStaged { - let mut staged = self.0.staged.write().unwrap(); + let mut staged = self + .0 + .staged + .write() + .unwrap_or_else(PoisonError::into_inner); if staged.any_staged() { - let cold = self.0.cold.read().unwrap(); + let cold = self.0.cold.read().unwrap_or_else(PoisonError::into_inner); MaybeStaged::Staged(for_staged(&mut Stager { cold: &cold, staged: &mut staged, @@ -360,7 +402,7 @@ impl AtomicStageOnWrite { } else if let Ok(mut cold) = self.0.cold.try_write() { MaybeStaged::Cold(for_full(&mut cold)) } else { - let cold = self.0.cold.read().unwrap(); + let cold = self.0.cold.read().unwrap_or_else(PoisonError::into_inner); MaybeStaged::Staged(for_staged(&mut Stager { cold: &cold, staged: &mut staged, From 9e48a805d1afbc1c029102f3f5343fa1585c808c Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 11:35:33 -0500 Subject: [PATCH 29/45] moved back to bevy_utils lol --- crates/bevy_platform_support/src/lib.rs | 1 - crates/bevy_utils/src/lib.rs | 1 + crates/{bevy_platform_support => bevy_utils}/src/staging.rs | 6 +++--- 3 files changed, 4 insertions(+), 4 deletions(-) rename crates/{bevy_platform_support => bevy_utils}/src/staging.rs (98%) diff --git a/crates/bevy_platform_support/src/lib.rs b/crates/bevy_platform_support/src/lib.rs index 57616c78985df..eada254595ab4 100644 --- a/crates/bevy_platform_support/src/lib.rs +++ b/crates/bevy_platform_support/src/lib.rs @@ -21,7 +21,6 @@ pub mod time; #[cfg(feature = "alloc")] pub mod collections; -pub mod staging; /// Frequently used items which would typically be included in most contexts. /// diff --git a/crates/bevy_utils/src/lib.rs b/crates/bevy_utils/src/lib.rs index 4b7d91a40eee2..41fdc1529612e 100644 --- a/crates/bevy_utils/src/lib.rs +++ b/crates/bevy_utils/src/lib.rs @@ -22,6 +22,7 @@ pub mod prelude { pub use crate::default; } +pub mod staging; pub mod synccell; pub mod syncunsafecell; diff --git a/crates/bevy_platform_support/src/staging.rs b/crates/bevy_utils/src/staging.rs similarity index 98% rename from crates/bevy_platform_support/src/staging.rs rename to crates/bevy_utils/src/staging.rs index d578abbfd6a34..e5e90f4ea966e 100644 --- a/crates/bevy_platform_support/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -1,11 +1,11 @@ //! Provides an abstracted system for staging modifications to data structures that rarely change. //! See [`StageOnWrite`] as a starting point. -use crate::sync::{PoisonError, RwLock, RwLockReadGuard, RwLockWriteGuard}; +use bevy_platform_support::sync::{PoisonError, RwLock, RwLockReadGuard, RwLockWriteGuard}; use core::ops::Deref; #[cfg(feature = "alloc")] -use crate::sync::Arc; +use bevy_platform_support::sync::Arc; /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). pub trait StagedChanges: Default { @@ -488,7 +488,7 @@ impl ColdStorage for &'_ T::Cold {} #[cfg(test)] mod tests { - use crate::{collections::HashMap, prelude::Vec}; + use bevy_platform_support::{collections::HashMap, prelude::Vec}; use super::*; From 1794045bfcc8ba9eb7898221d1f02457896ae761 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 14:36:43 -0500 Subject: [PATCH 30/45] mostly working example --- crates/bevy_utils/src/staging.rs | 402 +++++++++++++++++++++++++++++-- 1 file changed, 385 insertions(+), 17 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index e5e90f4ea966e..3f9a522639839 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -2,7 +2,7 @@ //! See [`StageOnWrite`] as a starting point. use bevy_platform_support::sync::{PoisonError, RwLock, RwLockReadGuard, RwLockWriteGuard}; -use core::ops::Deref; +use core::ops::{Deref, DerefMut}; #[cfg(feature = "alloc")] use bevy_platform_support::sync::Arc; @@ -23,6 +23,27 @@ pub trait StagedChanges: Default { /// A trait that signifies that it holds an immutable reference to a cold type (ie. [`StagedChanges::Cold`]). pub trait ColdStorage: Deref {} +pub trait StagableWritesTypes { + type Staging: StagedChanges; + type ColdStorage<'a>: Deref::Cold> + where + ::Cold: 'a; +} + +pub trait StagableWrites: StagableWritesTypes { + /// Allows raw access to reading cold storage, which may still have unapplied staged changes that make this out of date. + /// Only use this if you really know what you are doing. + /// This must never deadlock if there is already a [`Self::ColdStorage`] for this value on this thread. + #[doc(hidden)] + fn raw_read_cold(&self) -> Self::ColdStorage<'_>; + + /// Allows raw access to reading staged changes, which may be missing context of cold storage. + /// Only use this if you really know what you are doing. + /// This must never deadlock if there is already a read for this value on this thread. + #[doc(hidden)] + fn raw_read_staged(&self) -> RwLockReadGuard<'_, Self::Staging>; +} + /// A struct that allows staging changes while reading from cold storage. /// Generally, staging changes should be implemented on this type. pub struct Stager<'a, T: StagedChanges> { @@ -44,16 +65,18 @@ pub struct StagedRef<'a, T: StagedChanges> { /// A locked version of [`Stager`]. /// Use this to hold a lock guard while using [`StagerLocked::as_stager`] or similar. -pub struct StagerLocked<'a, T: StagedChanges, C: ColdStorage> { - cold: C, - staged: RwLockWriteGuard<'a, T>, +pub struct StagerLocked<'a, T: StagableWrites> { + inner: &'a T, + pub cold: T::ColdStorage<'a>, + pub staged: RwLockWriteGuard<'a, T::Staging>, } /// A locked version of [`StagedRef`]. /// Use this to hold a lock guard while using [`StagerLocked::as_staged_ref`]. -pub struct StagedRefLocked<'a, T: StagedChanges, C: ColdStorage> { - cold: C, - staged: RwLockReadGuard<'a, T>, +pub struct StagedRefLocked<'a, T: StagableWrites> { + inner: &'a T, + pub cold: T::ColdStorage<'a>, + pub staged: RwLockReadGuard<'a, T::Staging>, } /// A general purpose enum for representing data that may or may not need to be staged. @@ -112,6 +135,44 @@ struct AtomicStageOnWriteInner { #[derive(Clone)] pub struct AtomicStageOnWrite(Arc>); +impl StagableWritesTypes for StageOnWrite { + type Staging = T; + + type ColdStorage<'a> + = &'a T::Cold + where + T::Cold: 'a; +} + +impl StagableWritesTypes for AtomicStageOnWrite { + type Staging = T; + + type ColdStorage<'a> + = RwLockReadGuard<'a, T::Cold> + where + T::Cold: 'a; +} + +impl StagableWrites for StageOnWrite { + fn raw_read_cold(&self) -> Self::ColdStorage<'_> { + &self.cold + } + + fn raw_read_staged(&self) -> RwLockReadGuard<'_, Self::Staging> { + self.staged.read().unwrap_or_else(PoisonError::into_inner) + } +} + +impl StagableWrites for AtomicStageOnWrite { + fn raw_read_cold(&self) -> Self::ColdStorage<'_> { + self.0.cold.read().unwrap_or_else(PoisonError::into_inner) + } + + fn raw_read_staged(&self) -> RwLockReadGuard<'_, Self::Staging> { + self.0.staged.read().unwrap_or_else(PoisonError::into_inner) + } +} + impl StageOnWrite { /// Constructs a new [`StageOnWrite`] with the given value and no staged changes. pub fn new(value: T::Cold) -> Self { @@ -152,7 +213,7 @@ impl StageOnWrite { } /// Returns true if and only if there are staged changes that could be applied. - /// If you only have a immutable reference, consider using [`read_scope_locked`] with [`StagedChanges::any_staged`]. + /// If you only have a immutable reference, consider using [`read_scope_locked`](Self::read_scope_locked) with [`StagedChanges::any_staged`]. #[inline] pub fn any_staged(&mut self) -> bool { self.staged @@ -179,8 +240,9 @@ impl StageOnWrite { /// /// This deadlocks if there are any other lock guards on this thread for this value. #[inline] - pub fn stage_lock(&self) -> StagerLocked<'_, T, &T::Cold> { + pub fn stage_lock(&self) -> StagerLocked<'_, Self> { StagerLocked { + inner: self, cold: &self.cold, staged: self.staged.write().unwrap_or_else(PoisonError::into_inner), } @@ -204,8 +266,9 @@ impl StageOnWrite { /// /// This deadlocks if there are any write lock guards on this thread for this value. #[inline] - pub fn read_lock(&self) -> StagedRefLocked<'_, T, &T::Cold> { + pub fn read_lock(&self) -> StagedRefLocked<'_, Self> { StagedRefLocked { + inner: self, cold: &self.cold, staged: self.staged.read().unwrap_or_else(PoisonError::into_inner), } @@ -361,8 +424,9 @@ impl AtomicStageOnWrite { /// Constructs a [`StagerLocked`], locking internally. #[inline] - pub fn stage_lock(&mut self) -> StagerLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { + pub fn stage_lock(&mut self) -> StagerLocked<'_, Self> { StagerLocked { + inner: self, cold: self.0.cold.read().unwrap_or_else(PoisonError::into_inner), staged: self .0 @@ -374,8 +438,9 @@ impl AtomicStageOnWrite { /// Constructs a [`StagedRefLocked`], locking internally. #[inline] - pub fn read_lock(&self) -> StagedRefLocked<'_, T, RwLockReadGuard<'_, T::Cold>> { + pub fn read_lock(&self) -> StagedRefLocked<'_, Self> { StagedRefLocked { + inner: self, cold: self.0.cold.read().unwrap_or_else(PoisonError::into_inner), staged: self.0.staged.read().unwrap_or_else(PoisonError::into_inner), } @@ -432,10 +497,10 @@ impl AtomicStageOnWrite { } } -impl> StagerLocked<'_, T, C> { +impl<'a, T: StagableWrites> StagerLocked<'a, T> { /// Allows a user to view this as a [`Stager`]. #[inline] - pub fn as_stager(&mut self) -> Stager<'_, T> { + pub fn as_stager(&mut self) -> Stager<'_, T::Staging> { Stager { cold: &self.cold, staged: &mut self.staged, @@ -444,23 +509,47 @@ impl> StagerLocked<'_, T, C> { /// Allows a user to view this as a [`StagedRef`]. #[inline] - pub fn as_staged_ref(&self) -> StagedRef<'_, T> { + pub fn as_staged_ref(&self) -> StagedRef<'_, T::Staging> { StagedRef { cold: &self.cold, staged: &self.staged, } } + + /// Releases the lock, returning the underlying [`StagableWrites`] structure. + #[inline] + pub fn release(self) -> &'a T { + self.inner + } } -impl> StagedRefLocked<'_, T, C> { +impl<'a, T: StagableWrites> StagedRefLocked<'a, T> { /// Allows a user to view this as a [`StagedRef`]. #[inline] - pub fn as_staged_ref(&self) -> StagedRef<'_, T> { + pub fn as_staged_ref(&self) -> StagedRef<'_, T::Staging> { StagedRef { cold: &self.cold, staged: &self.staged, } } + + /// Releases the lock, returning the underlying [`StagableWrites`] structure. + #[inline] + pub fn release(self) -> &'a T { + self.inner + } + + /// Allows returning a reference to the locked staged data without releasing its lock. + #[inline] + pub fn get_staged_guard(&self) -> RwLockReadGuard<'a, T::Staging> { + self.inner.raw_read_staged() + } + + /// Allows returning a reference to the cold data without releasing its lock (it it has one). + #[inline] + pub fn get_cold_guard(&self) -> T::ColdStorage<'a> { + self.inner.raw_read_cold() + } } impl<'a, T: StagedChanges> Clone for StagedRef<'a, T> { @@ -472,6 +561,16 @@ impl<'a, T: StagedChanges> Clone for StagedRef<'a, T> { } } +impl<'a, T: StagableWrites> Clone for StagedRefLocked<'a, T> { + fn clone(&self) -> Self { + Self { + inner: self.inner, + staged: self.get_staged_guard(), + cold: self.get_cold_guard(), + } + } +} + #[cfg(feature = "alloc")] impl Default for AtomicStageOnWrite where @@ -486,6 +585,26 @@ impl ColdStorage for RwLockReadGuard<'_, T::Cold> {} impl ColdStorage for &'_ T::Cold {} +impl> Deref for MaybeStaged { + type Target = C::Target; + + fn deref(&self) -> &Self::Target { + match self { + MaybeStaged::Cold(c) => c, + MaybeStaged::Staged(s) => s, + } + } +} + +impl> DerefMut for MaybeStaged { + fn deref_mut(&mut self) -> &mut Self::Target { + match self { + MaybeStaged::Cold(c) => c, + MaybeStaged::Staged(s) => s, + } + } +} + #[cfg(test)] mod tests { use bevy_platform_support::{collections::HashMap, prelude::Vec}; @@ -521,3 +640,252 @@ mod tests { assert_eq!(&full[..], &[5]); } } + +mod example { + use core::mem::take; + use core::ops::{Deref, DerefMut}; + use std::sync::RwLockReadGuard; + + use crate as bevy_utils; + use bevy_platform_support::collections::HashMap; + use bevy_platform_support::prelude::String; + use bevy_utils::staging::{MaybeStaged, StagedChanges, StagedRef}; + + use super::{ColdStorage, StagableWrites, StageOnWrite, StagedRefLocked, Stager}; + + /// Stores some arbitrary player data. + #[derive(Debug, Clone)] + struct PlayerData { + name: String, + secs_in_game: u32, + id: PlayerId, + } + + /// A unique id per player + #[derive(Hash, Debug, Clone, Copy, PartialEq, Eq)] + struct PlayerId(u32); + + /// The standard collection of players + #[derive(Default, Debug)] + struct Players(HashMap); + + /// When a change is made to a player + #[derive(Default, Debug)] + struct StagedPlayerChanges { + replacements: HashMap, + additional_time_played: HashMap, + } + + impl StagedChanges for StagedPlayerChanges { + type Cold = Players; + + fn apply_staged(&mut self, storage: &mut Self::Cold) { + for replaced in self.replacements.drain() { + storage.0.insert(replaced.0, replaced.1); + } + for (id, new_time) in self.additional_time_played.iter_mut() { + if let Some(player) = storage.0.get_mut(id) { + player.secs_in_game += take(new_time); + } + } + } + + fn any_staged(&self) -> bool { + !self.replacements.is_empty() || !self.additional_time_played.is_empty() + } + } + + /// Allows read only access to player data. + trait PlayerAccess { + fn get_name(&self, id: PlayerId) -> Option>; + fn get_secs_in_game(&self, id: PlayerId) -> Option; + } + + impl PlayerAccess for Players { + fn get_name(&self, id: PlayerId) -> Option> { + self.0.get(&id).map(|player| player.name.as_str()) + } + + fn get_secs_in_game(&self, id: PlayerId) -> Option { + self.0.get(&id).map(|player| player.secs_in_game) + } + } + + impl PlayerAccess for StagedRef<'_, StagedPlayerChanges> { + fn get_name(&self, id: PlayerId) -> Option> { + if let Some(staged) = self.staged.replacements.get(&id) { + Some(MaybeStaged::Staged(staged.name.as_str())) + } else { + self.cold.get_name(id).map(MaybeStaged::Cold) + } + } + + fn get_secs_in_game(&self, id: PlayerId) -> Option { + let base = if let Some(staged) = self.staged.replacements.get(&id) { + Some(staged.secs_in_game) + } else { + self.cold.0.get(&id).map(|player| player.secs_in_game) + }?; + let additional = self + .staged + .additional_time_played + .get(&id) + .copied() + .unwrap_or_default(); + Some(base + additional) + } + } + + /// Allows read only access to player data. + trait PlayerAccessMut { + fn get_name_mut(&mut self, id: PlayerId) -> Option>; + fn add_secs_in_game(&mut self, id: PlayerId, secs: u32); + fn add(&mut self, name: String) -> PlayerId; + } + + impl PlayerAccessMut for Players { + fn get_name_mut(&mut self, id: PlayerId) -> Option> { + self.0.get_mut(&id).map(|player| player.name.as_mut_str()) + } + + fn add_secs_in_game(&mut self, id: PlayerId, secs: u32) { + if let Some(player) = self.0.get_mut(&id) { + player.secs_in_game += secs; + } + } + + fn add(&mut self, name: String) -> PlayerId { + let id = PlayerId(self.0.len() as u32); + self.0.insert( + id, + PlayerData { + name, + secs_in_game: 0, + id, + }, + ); + id + } + } + + impl PlayerAccessMut for Stager<'_, StagedPlayerChanges> { + fn get_name_mut(&mut self, id: PlayerId) -> Option> { + if !self.cold.0.contains_key(&id) && !self.staged.replacements.contains_key(&id) { + return None; + } + + let player = self + .staged + .replacements + .entry(id) + .or_insert_with(|| self.cold.0.get(&id).cloned().unwrap()); + Some(player.name.as_mut_str()) + } + + fn add_secs_in_game(&mut self, id: PlayerId, secs: u32) { + *self.staged.additional_time_played.entry(id).or_default() += secs; + } + + fn add(&mut self, name: String) -> PlayerId { + let id = PlayerId((self.cold.0.len() + self.staged.replacements.len()) as u32); + self.staged.replacements.insert( + id, + PlayerData { + name, + secs_in_game: 0, + id, + }, + ); + id + } + } + + struct LockedNameStagedRef<'a> { + staged: RwLockReadGuard<'a, StagedPlayerChanges>, + // must be valid + id: PlayerId, + } + + struct LockedNameColdRef<'a, T: StagableWrites + 'a> { + cold: T::ColdStorage<'a>, + // must be valid + id: PlayerId, + } + + impl Deref for LockedNameStagedRef<'_> { + type Target = str; + + fn deref(&self) -> &Self::Target { + self.staged + .replacements + .get(&self.id) + .unwrap() + .name + .as_str() + } + } + + impl<'a, T: StagableWrites + 'a> Deref for LockedNameColdRef<'a, T> { + type Target = str; + + fn deref(&self) -> &Self::Target { + self.cold.deref().0.get(&self.id).unwrap().name.as_str() + } + } + + impl> PlayerAccess for StagedRefLocked<'_, T> { + fn get_name(&self, id: PlayerId) -> Option> { + let this = self.clone(); + if this.staged.replacements.contains_key(&id) { + Some(MaybeStaged::Staged(LockedNameStagedRef { + staged: this.get_staged_guard(), + id, + })) + } else if this.cold.0.contains_key(&id) { + Some(MaybeStaged::Cold(LockedNameColdRef:: { + cold: this.get_cold_guard(), + id, + })) + } else { + None + } + } + + fn get_secs_in_game(&self, id: PlayerId) -> Option { + self.as_staged_ref().get_secs_in_game(id) + } + } + + pub struct PlayerRegistry { + players: StageOnWrite, + } + + impl PlayerRegistry { + /// Runs relatively rarely + pub fn player_joined(&self, name: String) -> PlayerId { + self.players.stage_scope_locked(|stager| stager.add(name)) + } + + /// Runs very often + pub fn get_name<'a>(&'a self, id: PlayerId) -> Option + 'a> { + { + let this = self.players.read_lock(); + if this.staged.replacements.contains_key(&id) { + Some(MaybeStaged::Staged(LockedNameStagedRef { + staged: this.get_staged_guard(), + id, + })) + } else if this.cold.0.contains_key(&id) { + Some(MaybeStaged::Cold(LockedNameColdRef::< + StageOnWrite, + > { + cold: this.get_cold_guard(), + id, + })) + } else { + None + } + } + } + } +} From 7be807c5795d5eabdf6f5800dd2b3d4e595747a8 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 14:48:37 -0500 Subject: [PATCH 31/45] finished example --- crates/bevy_utils/src/staging.rs | 64 ++++++++++++++++---------------- 1 file changed, 31 insertions(+), 33 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 3f9a522639839..e8683698f3bf7 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -46,6 +46,7 @@ pub trait StagableWrites: StagableWritesTypes { /// A struct that allows staging changes while reading from cold storage. /// Generally, staging changes should be implemented on this type. +#[derive(Debug)] pub struct Stager<'a, T: StagedChanges> { /// The storage that is read optimized. pub cold: &'a T::Cold, @@ -55,7 +56,7 @@ pub struct Stager<'a, T: StagedChanges> { /// A struct that allows accessing changes while reading from cold storage. /// Generally, reading data should be implemented on this type. -#[derive(Copy)] +#[derive(Copy, Debug)] pub struct StagedRef<'a, T: StagedChanges> { /// The storage that is read optimized. pub cold: &'a T::Cold, @@ -65,6 +66,7 @@ pub struct StagedRef<'a, T: StagedChanges> { /// A locked version of [`Stager`]. /// Use this to hold a lock guard while using [`StagerLocked::as_stager`] or similar. +#[derive(Debug)] pub struct StagerLocked<'a, T: StagableWrites> { inner: &'a T, pub cold: T::ColdStorage<'a>, @@ -73,6 +75,7 @@ pub struct StagerLocked<'a, T: StagableWrites> { /// A locked version of [`StagedRef`]. /// Use this to hold a lock guard while using [`StagerLocked::as_staged_ref`]. +#[derive(Debug)] pub struct StagedRefLocked<'a, T: StagableWrites> { inner: &'a T, pub cold: T::ColdStorage<'a>, @@ -80,6 +83,7 @@ pub struct StagedRefLocked<'a, T: StagableWrites> { } /// A general purpose enum for representing data that may or may not need to be staged. +#[derive(Debug, Clone, PartialEq, Eq)] pub enum MaybeStaged { /// There is no staging necessary. Cold(C), @@ -94,7 +98,7 @@ pub enum MaybeStaged { /// /// This is not designed for atomic use (ie. in an `Arc`). #[cfg_attr(feature = "alloc", doc = "See [`AtomicStageOnWrite`] for that.")] -#[derive(Default)] +#[derive(Default, Debug)] pub struct StageOnWrite { /// Cold data is read optimized. cold: T::Cold, @@ -103,7 +107,7 @@ pub struct StageOnWrite { } #[cfg(feature = "alloc")] -#[derive(Default)] +#[derive(Default, Debug)] struct AtomicStageOnWriteInner { /// Cold data is read optimized. /// This lives behind a [`RwLock`], but it is only written to for applying changes in a non-blocking way. @@ -649,13 +653,14 @@ mod example { use crate as bevy_utils; use bevy_platform_support::collections::HashMap; use bevy_platform_support::prelude::String; - use bevy_utils::staging::{MaybeStaged, StagedChanges, StagedRef}; - - use super::{ColdStorage, StagableWrites, StageOnWrite, StagedRefLocked, Stager}; + use bevy_utils::staging::{ + MaybeStaged, StagableWrites, StageOnWrite, StagedChanges, StagedRef, StagedRefLocked, + Stager, StagerLocked, + }; /// Stores some arbitrary player data. #[derive(Debug, Clone)] - struct PlayerData { + pub struct PlayerData { name: String, secs_in_game: u32, id: PlayerId, @@ -663,15 +668,15 @@ mod example { /// A unique id per player #[derive(Hash, Debug, Clone, Copy, PartialEq, Eq)] - struct PlayerId(u32); + pub struct PlayerId(u32); /// The standard collection of players #[derive(Default, Debug)] - struct Players(HashMap); + pub struct Players(HashMap); /// When a change is made to a player #[derive(Default, Debug)] - struct StagedPlayerChanges { + pub struct StagedPlayerChanges { replacements: HashMap, additional_time_played: HashMap, } @@ -833,29 +838,7 @@ mod example { } } - impl> PlayerAccess for StagedRefLocked<'_, T> { - fn get_name(&self, id: PlayerId) -> Option> { - let this = self.clone(); - if this.staged.replacements.contains_key(&id) { - Some(MaybeStaged::Staged(LockedNameStagedRef { - staged: this.get_staged_guard(), - id, - })) - } else if this.cold.0.contains_key(&id) { - Some(MaybeStaged::Cold(LockedNameColdRef:: { - cold: this.get_cold_guard(), - id, - })) - } else { - None - } - } - - fn get_secs_in_game(&self, id: PlayerId) -> Option { - self.as_staged_ref().get_secs_in_game(id) - } - } - + #[derive(Debug, Default)] pub struct PlayerRegistry { players: StageOnWrite, } @@ -887,5 +870,20 @@ mod example { } } } + + /// Cleans up internal data to make reading faster. + pub fn clean(&mut self) { + self.players.apply_staged_for_full(); + } + + /// Allows reading in bulk without extra locking. + pub fn bulk_read(&self) -> StagedRefLocked<'_, StageOnWrite> { + self.players.read_lock() + } + + /// Allows writing in bulk without extra locking. + pub fn bulk_write(&self) -> StagerLocked<'_, StageOnWrite> { + self.players.stage_lock() + } } } From 0d2990a18630b7aaa197e8274aaf09c909969fa9 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 15:04:53 -0500 Subject: [PATCH 32/45] better docs --- crates/bevy_utils/src/staging.rs | 531 ++++++++++++++++--------------- 1 file changed, 268 insertions(+), 263 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index e8683698f3bf7..9588b7de6cb06 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -1,5 +1,250 @@ //! Provides an abstracted system for staging modifications to data structures that rarely change. //! See [`StageOnWrite`] as a starting point. +//! +//! Here's an example of this utility in action for registering players. This is a bit contrived, but it should communicate the idea. +//! +//! ``` +//! use core::mem::take; +//! use std::sync::RwLockReadGuard; +//! use core::ops::{Deref, DerefMut}; +//! +//! use crate as bevy_utils; +//! use bevy_platform_support::collections::HashMap; +//! use bevy_platform_support::prelude::String; +//! use bevy_utils::staging::{ +//! MaybeStaged, StagableWrites, StageOnWrite, StagedChanges, StagedRef, StagedRefLocked, +//! Stager, StagerLocked, +//! }; +//! +//! /// Stores some arbitrary player data. +//! #[derive(Debug, Clone)] +//! pub struct PlayerData { +//! name: String, +//! secs_in_game: u32, +//! id: PlayerId, +//! } +//! +//! /// A unique id per player +//! #[derive(Hash, Debug, Clone, Copy, PartialEq, Eq)] +//! pub struct PlayerId(u32); +//! +//! /// The standard collection of players +//! #[derive(Default, Debug)] +//! pub struct Players(HashMap); +//! +//! /// When a change is made to a player +//! #[derive(Default, Debug)] +//! pub struct StagedPlayerChanges { +//! replacements: HashMap, +//! additional_time_played: HashMap, +//! } +//! +//! impl StagedChanges for StagedPlayerChanges { +//! type Cold = Players; +//! +//! fn apply_staged(&mut self, storage: &mut Self::Cold) { +//! for replaced in self.replacements.drain() { +//! storage.0.insert(replaced.0, replaced.1); +//! } +//! for (id, new_time) in self.additional_time_played.iter_mut() { +//! if let Some(player) = storage.0.get_mut(id) { +//! player.secs_in_game += take(new_time); +//! } +//! } +//! } +//! +//! fn any_staged(&self) -> bool { +//! !self.replacements.is_empty() || !self.additional_time_played.is_empty() +//! } +//! } +//! +//! /// Allows read only access to player data. +//! trait PlayerAccess { +//! fn get_name(&self, id: PlayerId) -> Option>; +//! fn get_secs_in_game(&self, id: PlayerId) -> Option; +//! } +//! +//! impl PlayerAccess for Players { +//! fn get_name(&self, id: PlayerId) -> Option> { +//! self.0.get(&id).map(|player| player.name.as_str()) +//! } +//! +//! fn get_secs_in_game(&self, id: PlayerId) -> Option { +//! self.0.get(&id).map(|player| player.secs_in_game) +//! } +//! } +//! +//! impl PlayerAccess for StagedRef<'_, StagedPlayerChanges> { +//! fn get_name(&self, id: PlayerId) -> Option> { +//! if let Some(staged) = self.staged.replacements.get(&id) { +//! Some(MaybeStaged::Staged(staged.name.as_str())) +//! } else { +//! self.cold.get_name(id).map(MaybeStaged::Cold) +//! } +//! } +//! +//! fn get_secs_in_game(&self, id: PlayerId) -> Option { +//! let base = if let Some(staged) = self.staged.replacements.get(&id) { +//! Some(staged.secs_in_game) +//! } else { +//! self.cold.0.get(&id).map(|player| player.secs_in_game) +//! }?; +//! let additional = self +//! .staged +//! .additional_time_played +//! .get(&id) +//! .copied() +//! .unwrap_or_default(); +//! Some(base + additional) +//! } +//! } +//! +//! /// Allows mutable access to player data. +//! trait PlayerAccessMut { +//! fn get_name_mut(&mut self, id: PlayerId) -> Option>; +//! fn add_secs_in_game(&mut self, id: PlayerId, secs: u32); +//! fn add(&mut self, name: String) -> PlayerId; +//! } +//! +//! impl PlayerAccessMut for Players { +//! fn get_name_mut(&mut self, id: PlayerId) -> Option> { +//! self.0.get_mut(&id).map(|player| player.name.as_mut_str()) +//! } +//! +//! fn add_secs_in_game(&mut self, id: PlayerId, secs: u32) { +//! if let Some(player) = self.0.get_mut(&id) { +//! player.secs_in_game += secs; +//! } +//! } +//! +//! fn add(&mut self, name: String) -> PlayerId { +//! let id = PlayerId(self.0.len() as u32); +//! self.0.insert( +//! id, +//! PlayerData { +//! name, +//! secs_in_game: 0, +//! id, +//! }, +//! ); +//! id +//! } +//! } +//! +//! impl PlayerAccessMut for Stager<'_, StagedPlayerChanges> { +//! fn get_name_mut(&mut self, id: PlayerId) -> Option> { +//! if !self.cold.0.contains_key(&id) && !self.staged.replacements.contains_key(&id) { +//! return None; +//! } +//! +//! let player = self +//! .staged +//! .replacements +//! .entry(id) +//! .or_insert_with(|| self.cold.0.get(&id).cloned().unwrap()); +//! Some(player.name.as_mut_str()) +//! } +//! +//! fn add_secs_in_game(&mut self, id: PlayerId, secs: u32) { +//! *self.staged.additional_time_played.entry(id).or_default() += secs; +//! } +//! +//! fn add(&mut self, name: String) -> PlayerId { +//! let id = PlayerId((self.cold.0.len() + self.staged.replacements.len()) as u32); +//! self.staged.replacements.insert( +//! id, +//! PlayerData { +//! name, +//! secs_in_game: 0, +//! id, +//! }, +//! ); +//! id +//! } +//! } +//! +//! struct LockedNameStagedRef<'a> { +//! staged: RwLockReadGuard<'a, StagedPlayerChanges>, +//! // must be valid +//! id: PlayerId, +//! } +//! +//! struct LockedNameColdRef<'a, T: StagableWrites + 'a> { +//! cold: T::ColdStorage<'a>, +//! // must be valid +//! id: PlayerId, +//! } +//! +//! impl Deref for LockedNameStagedRef<'_> { +//! type Target = str; +//! +//! fn deref(&self) -> &Self::Target { +//! self.staged +//! .replacements +//! .get(&self.id) +//! .unwrap() +//! .name +//! .as_str() +//! } +//! } +//! +//! impl<'a, T: StagableWrites + 'a> Deref for LockedNameColdRef<'a, T> { +//! type Target = str; +//! +//! fn deref(&self) -> &Self::Target { +//! self.cold.deref().0.get(&self.id).unwrap().name.as_str() +//! } +//! } +//! +//! #[derive(Debug, Default)] +//! pub struct PlayerRegistry { +//! players: StageOnWrite, +//! } +//! +//! impl PlayerRegistry { +//! /// Runs relatively rarely +//! pub fn player_joined(&self, name: String) -> PlayerId { +//! self.players.stage_scope_locked(|stager| stager.add(name)) +//! } +//! +//! /// Runs very often +//! pub fn get_name<'a>(&'a self, id: PlayerId) -> Option + 'a> { +//! { +//! let this = self.players.read_lock(); +//! if this.staged.replacements.contains_key(&id) { +//! Some(MaybeStaged::Staged(LockedNameStagedRef { +//! staged: this.get_staged_guard(), +//! id, +//! })) +//! } else if this.cold.0.contains_key(&id) { +//! Some(MaybeStaged::Cold(LockedNameColdRef::< +//! StageOnWrite, +//! > { +//! cold: this.get_cold_guard(), +//! id, +//! })) +//! } else { +//! None +//! } +//! } +//! } +//! +//! /// Cleans up internal data to make reading faster. +//! pub fn clean(&mut self) { +//! self.players.apply_staged_for_full(); +//! } +//! +//! /// Allows reading in bulk without extra locking. +//! pub fn bulk_read(&self) -> StagedRefLocked<'_, StageOnWrite> { +//! self.players.read_lock() +//! } +//! +//! /// Allows writing in bulk without extra locking. +//! pub fn bulk_write(&self) -> StagerLocked<'_, StageOnWrite> { +//! self.players.stage_lock() +//! } +//! } +//! ``` use bevy_platform_support::sync::{PoisonError, RwLock, RwLockReadGuard, RwLockWriteGuard}; use core::ops::{Deref, DerefMut}; @@ -20,27 +265,30 @@ pub trait StagedChanges: Default { fn any_staged(&self) -> bool; } -/// A trait that signifies that it holds an immutable reference to a cold type (ie. [`StagedChanges::Cold`]). -pub trait ColdStorage: Deref {} - +/// This trait defines relevant types for [`StagableWrites`]. +/// See [`this github issue`](https://github.com/rust-lang/rust/issues/87479) for why this needs to be separate. pub trait StagableWritesTypes { + /// This is the type that will store staged changes. type Staging: StagedChanges; - type ColdStorage<'a>: Deref::Cold> + /// This is the type that will store [`Cold`](StagedChanges::Cold) for [`Staging`](Self::Staging). + /// This is left generalized so that it can be put in a lock or otherwise if necessary. + type ColdRef<'a>: Deref::Cold> where ::Cold: 'a; } +/// This trait generallizes the stage on write concept. pub trait StagableWrites: StagableWritesTypes { /// Allows raw access to reading cold storage, which may still have unapplied staged changes that make this out of date. - /// Only use this if you really know what you are doing. + /// Use this to return data attached to a lock guard when one such guard is already in existence. + /// /// This must never deadlock if there is already a [`Self::ColdStorage`] for this value on this thread. - #[doc(hidden)] - fn raw_read_cold(&self) -> Self::ColdStorage<'_>; + fn raw_read_cold(&self) -> Self::ColdRef<'_>; /// Allows raw access to reading staged changes, which may be missing context of cold storage. - /// Only use this if you really know what you are doing. + /// Use this to return data attached to a lock guard when one such guard is already in existence. + /// /// This must never deadlock if there is already a read for this value on this thread. - #[doc(hidden)] fn raw_read_staged(&self) -> RwLockReadGuard<'_, Self::Staging>; } @@ -69,7 +317,9 @@ pub struct StagedRef<'a, T: StagedChanges> { #[derive(Debug)] pub struct StagerLocked<'a, T: StagableWrites> { inner: &'a T, - pub cold: T::ColdStorage<'a>, + /// The storage that is read optimized. + pub cold: T::ColdRef<'a>, + /// The staged changes. pub staged: RwLockWriteGuard<'a, T::Staging>, } @@ -78,7 +328,9 @@ pub struct StagerLocked<'a, T: StagableWrites> { #[derive(Debug)] pub struct StagedRefLocked<'a, T: StagableWrites> { inner: &'a T, - pub cold: T::ColdStorage<'a>, + /// The storage that is read optimized. + pub cold: T::ColdRef<'a>, + /// The staged changes. pub staged: RwLockReadGuard<'a, T::Staging>, } @@ -142,7 +394,7 @@ pub struct AtomicStageOnWrite(Arc>) impl StagableWritesTypes for StageOnWrite { type Staging = T; - type ColdStorage<'a> + type ColdRef<'a> = &'a T::Cold where T::Cold: 'a; @@ -151,14 +403,14 @@ impl StagableWritesTypes for StageOnWrite { impl StagableWritesTypes for AtomicStageOnWrite { type Staging = T; - type ColdStorage<'a> + type ColdRef<'a> = RwLockReadGuard<'a, T::Cold> where T::Cold: 'a; } impl StagableWrites for StageOnWrite { - fn raw_read_cold(&self) -> Self::ColdStorage<'_> { + fn raw_read_cold(&self) -> Self::ColdRef<'_> { &self.cold } @@ -168,7 +420,7 @@ impl StagableWrites for StageOnWrite { } impl StagableWrites for AtomicStageOnWrite { - fn raw_read_cold(&self) -> Self::ColdStorage<'_> { + fn raw_read_cold(&self) -> Self::ColdRef<'_> { self.0.cold.read().unwrap_or_else(PoisonError::into_inner) } @@ -551,7 +803,7 @@ impl<'a, T: StagableWrites> StagedRefLocked<'a, T> { /// Allows returning a reference to the cold data without releasing its lock (it it has one). #[inline] - pub fn get_cold_guard(&self) -> T::ColdStorage<'a> { + pub fn get_cold_guard(&self) -> T::ColdRef<'a> { self.inner.raw_read_cold() } } @@ -585,10 +837,6 @@ where } } -impl ColdStorage for RwLockReadGuard<'_, T::Cold> {} - -impl ColdStorage for &'_ T::Cold {} - impl> Deref for MaybeStaged { type Target = C::Target; @@ -644,246 +892,3 @@ mod tests { assert_eq!(&full[..], &[5]); } } - -mod example { - use core::mem::take; - use core::ops::{Deref, DerefMut}; - use std::sync::RwLockReadGuard; - - use crate as bevy_utils; - use bevy_platform_support::collections::HashMap; - use bevy_platform_support::prelude::String; - use bevy_utils::staging::{ - MaybeStaged, StagableWrites, StageOnWrite, StagedChanges, StagedRef, StagedRefLocked, - Stager, StagerLocked, - }; - - /// Stores some arbitrary player data. - #[derive(Debug, Clone)] - pub struct PlayerData { - name: String, - secs_in_game: u32, - id: PlayerId, - } - - /// A unique id per player - #[derive(Hash, Debug, Clone, Copy, PartialEq, Eq)] - pub struct PlayerId(u32); - - /// The standard collection of players - #[derive(Default, Debug)] - pub struct Players(HashMap); - - /// When a change is made to a player - #[derive(Default, Debug)] - pub struct StagedPlayerChanges { - replacements: HashMap, - additional_time_played: HashMap, - } - - impl StagedChanges for StagedPlayerChanges { - type Cold = Players; - - fn apply_staged(&mut self, storage: &mut Self::Cold) { - for replaced in self.replacements.drain() { - storage.0.insert(replaced.0, replaced.1); - } - for (id, new_time) in self.additional_time_played.iter_mut() { - if let Some(player) = storage.0.get_mut(id) { - player.secs_in_game += take(new_time); - } - } - } - - fn any_staged(&self) -> bool { - !self.replacements.is_empty() || !self.additional_time_played.is_empty() - } - } - - /// Allows read only access to player data. - trait PlayerAccess { - fn get_name(&self, id: PlayerId) -> Option>; - fn get_secs_in_game(&self, id: PlayerId) -> Option; - } - - impl PlayerAccess for Players { - fn get_name(&self, id: PlayerId) -> Option> { - self.0.get(&id).map(|player| player.name.as_str()) - } - - fn get_secs_in_game(&self, id: PlayerId) -> Option { - self.0.get(&id).map(|player| player.secs_in_game) - } - } - - impl PlayerAccess for StagedRef<'_, StagedPlayerChanges> { - fn get_name(&self, id: PlayerId) -> Option> { - if let Some(staged) = self.staged.replacements.get(&id) { - Some(MaybeStaged::Staged(staged.name.as_str())) - } else { - self.cold.get_name(id).map(MaybeStaged::Cold) - } - } - - fn get_secs_in_game(&self, id: PlayerId) -> Option { - let base = if let Some(staged) = self.staged.replacements.get(&id) { - Some(staged.secs_in_game) - } else { - self.cold.0.get(&id).map(|player| player.secs_in_game) - }?; - let additional = self - .staged - .additional_time_played - .get(&id) - .copied() - .unwrap_or_default(); - Some(base + additional) - } - } - - /// Allows read only access to player data. - trait PlayerAccessMut { - fn get_name_mut(&mut self, id: PlayerId) -> Option>; - fn add_secs_in_game(&mut self, id: PlayerId, secs: u32); - fn add(&mut self, name: String) -> PlayerId; - } - - impl PlayerAccessMut for Players { - fn get_name_mut(&mut self, id: PlayerId) -> Option> { - self.0.get_mut(&id).map(|player| player.name.as_mut_str()) - } - - fn add_secs_in_game(&mut self, id: PlayerId, secs: u32) { - if let Some(player) = self.0.get_mut(&id) { - player.secs_in_game += secs; - } - } - - fn add(&mut self, name: String) -> PlayerId { - let id = PlayerId(self.0.len() as u32); - self.0.insert( - id, - PlayerData { - name, - secs_in_game: 0, - id, - }, - ); - id - } - } - - impl PlayerAccessMut for Stager<'_, StagedPlayerChanges> { - fn get_name_mut(&mut self, id: PlayerId) -> Option> { - if !self.cold.0.contains_key(&id) && !self.staged.replacements.contains_key(&id) { - return None; - } - - let player = self - .staged - .replacements - .entry(id) - .or_insert_with(|| self.cold.0.get(&id).cloned().unwrap()); - Some(player.name.as_mut_str()) - } - - fn add_secs_in_game(&mut self, id: PlayerId, secs: u32) { - *self.staged.additional_time_played.entry(id).or_default() += secs; - } - - fn add(&mut self, name: String) -> PlayerId { - let id = PlayerId((self.cold.0.len() + self.staged.replacements.len()) as u32); - self.staged.replacements.insert( - id, - PlayerData { - name, - secs_in_game: 0, - id, - }, - ); - id - } - } - - struct LockedNameStagedRef<'a> { - staged: RwLockReadGuard<'a, StagedPlayerChanges>, - // must be valid - id: PlayerId, - } - - struct LockedNameColdRef<'a, T: StagableWrites + 'a> { - cold: T::ColdStorage<'a>, - // must be valid - id: PlayerId, - } - - impl Deref for LockedNameStagedRef<'_> { - type Target = str; - - fn deref(&self) -> &Self::Target { - self.staged - .replacements - .get(&self.id) - .unwrap() - .name - .as_str() - } - } - - impl<'a, T: StagableWrites + 'a> Deref for LockedNameColdRef<'a, T> { - type Target = str; - - fn deref(&self) -> &Self::Target { - self.cold.deref().0.get(&self.id).unwrap().name.as_str() - } - } - - #[derive(Debug, Default)] - pub struct PlayerRegistry { - players: StageOnWrite, - } - - impl PlayerRegistry { - /// Runs relatively rarely - pub fn player_joined(&self, name: String) -> PlayerId { - self.players.stage_scope_locked(|stager| stager.add(name)) - } - - /// Runs very often - pub fn get_name<'a>(&'a self, id: PlayerId) -> Option + 'a> { - { - let this = self.players.read_lock(); - if this.staged.replacements.contains_key(&id) { - Some(MaybeStaged::Staged(LockedNameStagedRef { - staged: this.get_staged_guard(), - id, - })) - } else if this.cold.0.contains_key(&id) { - Some(MaybeStaged::Cold(LockedNameColdRef::< - StageOnWrite, - > { - cold: this.get_cold_guard(), - id, - })) - } else { - None - } - } - } - - /// Cleans up internal data to make reading faster. - pub fn clean(&mut self) { - self.players.apply_staged_for_full(); - } - - /// Allows reading in bulk without extra locking. - pub fn bulk_read(&self) -> StagedRefLocked<'_, StageOnWrite> { - self.players.read_lock() - } - - /// Allows writing in bulk without extra locking. - pub fn bulk_write(&self) -> StagerLocked<'_, StageOnWrite> { - self.players.stage_lock() - } - } -} From 18c4ab2969dc10d6a774d8689d7f763a2dc7f3b8 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 15:09:15 -0500 Subject: [PATCH 33/45] fixed docs --- crates/bevy_utils/src/staging.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 9588b7de6cb06..3ccfafb90c914 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -282,7 +282,7 @@ pub trait StagableWrites: StagableWritesTypes { /// Allows raw access to reading cold storage, which may still have unapplied staged changes that make this out of date. /// Use this to return data attached to a lock guard when one such guard is already in existence. /// - /// This must never deadlock if there is already a [`Self::ColdStorage`] for this value on this thread. + /// This must never deadlock if there is already a read for this value on this thread. fn raw_read_cold(&self) -> Self::ColdRef<'_>; /// Allows raw access to reading staged changes, which may be missing context of cold storage. From 3660a570fa532c7833538fcdcb4fbb69d9ca51b0 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 16:10:54 -0500 Subject: [PATCH 34/45] implemented StagableWritesCore --- crates/bevy_utils/src/staging.rs | 127 ++++++++++++++++++++++++++++--- 1 file changed, 118 insertions(+), 9 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 3ccfafb90c914..d77e2641aba16 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -246,7 +246,9 @@ //! } //! ``` -use bevy_platform_support::sync::{PoisonError, RwLock, RwLockReadGuard, RwLockWriteGuard}; +use bevy_platform_support::sync::{ + PoisonError, RwLock, RwLockReadGuard, RwLockWriteGuard, TryLockError, +}; use core::ops::{Deref, DerefMut}; #[cfg(feature = "alloc")] @@ -270,15 +272,19 @@ pub trait StagedChanges: Default { pub trait StagableWritesTypes { /// This is the type that will store staged changes. type Staging: StagedChanges; - /// This is the type that will store [`Cold`](StagedChanges::Cold) for [`Staging`](Self::Staging). + /// This is the type that will reference [`Cold`](StagedChanges::Cold) for [`Staging`](Self::Staging). /// This is left generalized so that it can be put in a lock or otherwise if necessary. type ColdRef<'a>: Deref::Cold> where ::Cold: 'a; + /// This is the type that will mutably reference [`Cold`](StagedChanges::Cold) for [`Staging`](Self::Staging). + /// This is left generalized so that it can be put in a lock or otherwise if necessary. + type ColdMut<'a>: Deref::Cold> + where + ::Cold: 'a; } -/// This trait generallizes the stage on write concept. -pub trait StagableWrites: StagableWritesTypes { +pub trait StagableWritesCore: StagableWritesTypes { /// Allows raw access to reading cold storage, which may still have unapplied staged changes that make this out of date. /// Use this to return data attached to a lock guard when one such guard is already in existence. /// @@ -290,8 +296,29 @@ pub trait StagableWrites: StagableWritesTypes { /// /// This must never deadlock if there is already a read for this value on this thread. fn raw_read_staged(&self) -> RwLockReadGuard<'_, Self::Staging>; + + /// Same as [`raw_read_cold`](StagableWritesCore::raw_read_cold), but never blocks. + fn raw_read_cold_non_blocking(&self) -> Option>; + + /// Same as [`raw_read_staged`](StagableWritesCore::raw_read_staged), but never blocks. + fn raw_read_staged_non_blocking(&self) -> Option>; + + /// Allows raw access to reading staged changes, which may be missing context of cold storage. + fn raw_write_staged(&self) -> RwLockWriteGuard<'_, Self::Staging>; + + /// Same as [`raw_write_staged`](StagableWritesCore::raw_write_staged), but never blocks. + fn raw_write_staged_non_blocking(&self) -> Option>; + + /// Same as [`raw_write_cold`](StagableWritesCore::raw_write_cold), but never locks. + fn raw_write_cold_mut(&mut self) -> &mut ::Cold; + + /// Same as [`raw_write_staged`](StagableWritesCore::raw_write_staged), but never locks. + fn raw_write_staged_mut(&mut self) -> &mut Self::Staging; } +/// This trait generallizes the stage on write concept. +pub trait StagableWrites: StagableWritesTypes {} + /// A struct that allows staging changes while reading from cold storage. /// Generally, staging changes should be implemented on this type. #[derive(Debug)] @@ -398,18 +425,28 @@ impl StagableWritesTypes for StageOnWrite { = &'a T::Cold where T::Cold: 'a; + + type ColdMut<'a> + = &'a mut T::Cold + where + ::Cold: 'a; } -impl StagableWritesTypes for AtomicStageOnWrite { +impl StagableWritesTypes for AtomicStageOnWriteInner { type Staging = T; type ColdRef<'a> = RwLockReadGuard<'a, T::Cold> where T::Cold: 'a; + + type ColdMut<'a> + = RwLockWriteGuard<'a, T::Cold> + where + T::Cold: 'a; } -impl StagableWrites for StageOnWrite { +impl StagableWritesCore for StageOnWrite { fn raw_read_cold(&self) -> Self::ColdRef<'_> { &self.cold } @@ -417,15 +454,87 @@ impl StagableWrites for StageOnWrite { fn raw_read_staged(&self) -> RwLockReadGuard<'_, Self::Staging> { self.staged.read().unwrap_or_else(PoisonError::into_inner) } + + fn raw_read_cold_non_blocking(&self) -> Option> { + Some(&self.cold) + } + + fn raw_read_staged_non_blocking(&self) -> Option> { + match self.staged.try_read() { + Ok(read) => Some(read), + Err(TryLockError::Poisoned(poison)) => Some(poison.into_inner()), + Err(TryLockError::WouldBlock) => None, + } + } + + fn raw_write_staged(&self) -> RwLockWriteGuard<'_, Self::Staging> { + self.staged.write().unwrap_or_else(PoisonError::into_inner) + } + + fn raw_write_staged_non_blocking(&self) -> Option> { + match self.staged.try_write() { + Ok(read) => Some(read), + Err(TryLockError::Poisoned(poison)) => Some(poison.into_inner()), + Err(TryLockError::WouldBlock) => None, + } + } + + fn raw_write_cold_mut(&mut self) -> &mut ::Cold { + &mut self.cold + } + + fn raw_write_staged_mut(&mut self) -> &mut Self::Staging { + self.staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner) + } } -impl StagableWrites for AtomicStageOnWrite { +impl StagableWritesCore for AtomicStageOnWriteInner { fn raw_read_cold(&self) -> Self::ColdRef<'_> { - self.0.cold.read().unwrap_or_else(PoisonError::into_inner) + self.cold.read().unwrap_or_else(PoisonError::into_inner) } fn raw_read_staged(&self) -> RwLockReadGuard<'_, Self::Staging> { - self.0.staged.read().unwrap_or_else(PoisonError::into_inner) + self.staged.read().unwrap_or_else(PoisonError::into_inner) + } + + fn raw_read_cold_non_blocking(&self) -> Option> { + match self.cold.try_read() { + Ok(read) => Some(read), + Err(TryLockError::Poisoned(poison)) => Some(poison.into_inner()), + Err(TryLockError::WouldBlock) => None, + } + } + + fn raw_read_staged_non_blocking(&self) -> Option> { + match self.staged.try_read() { + Ok(read) => Some(read), + Err(TryLockError::Poisoned(poison)) => Some(poison.into_inner()), + Err(TryLockError::WouldBlock) => None, + } + } + + fn raw_write_staged(&self) -> RwLockWriteGuard<'_, Self::Staging> { + self.staged.write().unwrap_or_else(PoisonError::into_inner) + } + + fn raw_write_staged_non_blocking(&self) -> Option> { + match self.staged.try_write() { + Ok(read) => Some(read), + Err(TryLockError::Poisoned(poison)) => Some(poison.into_inner()), + Err(TryLockError::WouldBlock) => None, + } + } + + fn raw_write_cold_mut(&mut self) -> &mut ::Cold { + self.cold.get_mut().unwrap_or_else(PoisonError::into_inner) + } + + fn raw_write_staged_mut(&mut self) -> &mut Self::Staging { + self.staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner) } } From 08cba28fcf312bc4dd95ef789c8cd278fee9adc8 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 16:49:38 -0500 Subject: [PATCH 35/45] finished StagableWritesCore --- crates/bevy_utils/src/staging.rs | 514 +++++++++++++------------------ 1 file changed, 220 insertions(+), 294 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index d77e2641aba16..71c5b199f4a8b 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -251,9 +251,6 @@ use bevy_platform_support::sync::{ }; use core::ops::{Deref, DerefMut}; -#[cfg(feature = "alloc")] -use bevy_platform_support::sync::Arc; - /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). pub trait StagedChanges: Default { /// The more compact data structure that these changes compact into. @@ -269,7 +266,7 @@ pub trait StagedChanges: Default { /// This trait defines relevant types for [`StagableWrites`]. /// See [`this github issue`](https://github.com/rust-lang/rust/issues/87479) for why this needs to be separate. -pub trait StagableWritesTypes { +pub trait StagableWritesTypes: Sized { /// This is the type that will store staged changes. type Staging: StagedChanges; /// This is the type that will reference [`Cold`](StagedChanges::Cold) for [`Staging`](Self::Staging). @@ -284,6 +281,7 @@ pub trait StagableWritesTypes { ::Cold: 'a; } +/// This trait generallizes the stage on write concept. pub trait StagableWritesCore: StagableWritesTypes { /// Allows raw access to reading cold storage, which may still have unapplied staged changes that make this out of date. /// Use this to return data attached to a lock guard when one such guard is already in existence. @@ -309,11 +307,142 @@ pub trait StagableWritesCore: StagableWritesTypes { /// Same as [`raw_write_staged`](StagableWritesCore::raw_write_staged), but never blocks. fn raw_write_staged_non_blocking(&self) -> Option>; + /// Allows raw access to both staged and cold data. + fn raw_write_both_mut( + &mut self, + ) -> ( + &mut Self::Staging, + &mut ::Cold, + ); + /// Same as [`raw_write_cold`](StagableWritesCore::raw_write_cold), but never locks. - fn raw_write_cold_mut(&mut self) -> &mut ::Cold; + #[inline] + fn raw_write_cold_mut(&mut self) -> &mut ::Cold { + self.raw_write_both_mut().1 + } /// Same as [`raw_write_staged`](StagableWritesCore::raw_write_staged), but never locks. - fn raw_write_staged_mut(&mut self) -> &mut Self::Staging; + #[inline] + fn raw_write_staged_mut(&mut self) -> &mut Self::Staging { + self.raw_write_both_mut().0 + } + + /// Gets the inner cold data if there are no staged changes. + /// If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. + #[inline] + fn full(&mut self) -> Option<&mut ::Cold> { + if self.raw_write_staged_mut().any_staged() { + None + } else { + Some(self.raw_write_cold_mut()) + } + } + + /// Applies any staged changes before returning the full value with all changes applied. + /// Immediately after this, [`any_staged`](Self::any_staged) will be false. + #[inline] + fn apply_staged_for_full(&mut self) -> &mut ::Cold { + let (staged, cold) = self.raw_write_both_mut(); + if staged.any_staged() { + staged.apply_staged(cold); + } + cold + } + + /// Returns true if and only if there are staged changes that could be applied. + #[inline] + fn any_staged(&mut self) -> bool { + self.raw_write_staged_mut().any_staged() + } + + /// Same as [`any_staged`](StagableWritesCore::any_staged), but locks and works without mutible access. + #[inline] + fn any_staged_ref(&self) -> bool { + self.raw_read_staged().any_staged() + } + + /// Constructs a [`Stager`] that will stage changes. + #[inline] + fn stage(&mut self) -> Stager<'_, Self::Staging> { + let writes = self.raw_write_both_mut(); + Stager { + cold: writes.1, + staged: writes.0, + } + } + + /// Constructs a [`StagerLocked`], locking internally. + /// + /// # Deadlocks + /// + /// This deadlocks if there are any other lock guards on this thread for this value. + #[inline] + fn stage_lock(&self) -> StagerLocked<'_, Self> { + StagerLocked { + inner: self, + cold: self.raw_read_cold(), + staged: self.raw_write_staged(), + } + } + + /// Constructs a [`Stager`] that will stage changes. + #[inline] + fn read(&mut self) -> StagedRef<'_, Self::Staging> { + let writes = self.raw_write_both_mut(); + StagedRef { + cold: writes.1, + staged: writes.0, + } + } + + /// Constructs a [`StagedRefLocked`], locking internally. + /// + /// # Deadlocks + /// + /// This deadlocks if there are any write lock guards on this thread for this value. + #[inline] + fn read_lock(&self) -> StagedRefLocked<'_, Self> { + StagedRefLocked { + inner: self, + cold: self.raw_read_cold(), + staged: self.raw_read_staged(), + } + } + + /// Runs different logic depending on if additional changes are already staged. + /// This can be faster than greedily applying staged changes if there are already staged changes. + fn maybe_stage( + &mut self, + for_full: impl FnOnce(&mut as Deref>::Target) -> C, + for_staged: impl FnOnce(&mut Stager) -> S, + ) -> MaybeStaged { + let (staged, cold) = self.raw_write_both_mut(); + if staged.any_staged() { + MaybeStaged::Staged(for_staged(&mut Stager { cold, staged })) + } else { + MaybeStaged::Cold(for_full(cold)) + } + } + + /// Easily run a stager function to stage changes. + /// + /// # Deadlocks + /// + /// This deadlocks if there are any other lock guards on this thread for this value. + #[inline] + fn stage_scope_locked(&self, f: impl FnOnce(&mut Stager) -> R) -> R { + f(&mut self.stage_lock().as_stager()) + } + + /// Easily run a [`StagedRef`] function. + /// + /// # Deadlocks + /// + /// This deadlocks if there are any write lock guards on this thread for this value. + #[inline] + fn read_scope_locked(&self, f: impl FnOnce(&StagedRef) -> R) -> R { + f(&self.read_lock().as_staged_ref()) + } } /// This trait generallizes the stage on write concept. @@ -342,7 +471,7 @@ pub struct StagedRef<'a, T: StagedChanges> { /// A locked version of [`Stager`]. /// Use this to hold a lock guard while using [`StagerLocked::as_stager`] or similar. #[derive(Debug)] -pub struct StagerLocked<'a, T: StagableWrites> { +pub struct StagerLocked<'a, T: StagableWritesCore> { inner: &'a T, /// The storage that is read optimized. pub cold: T::ColdRef<'a>, @@ -353,7 +482,7 @@ pub struct StagerLocked<'a, T: StagableWrites> { /// A locked version of [`StagedRef`]. /// Use this to hold a lock guard while using [`StagerLocked::as_staged_ref`]. #[derive(Debug)] -pub struct StagedRefLocked<'a, T: StagableWrites> { +pub struct StagedRefLocked<'a, T: StagableWritesCore> { inner: &'a T, /// The storage that is read optimized. pub cold: T::ColdRef<'a>, @@ -385,9 +514,12 @@ pub struct StageOnWrite { staged: RwLock, } +/// A version of [`StageOnWrite`] designed for atomic use. +/// It functions fully without needing `&mut self`. +/// See [`StageOnWrite`] for details. #[cfg(feature = "alloc")] #[derive(Default, Debug)] -struct AtomicStageOnWriteInner { +pub struct AtomicStageOnWriteInner { /// Cold data is read optimized. /// This lives behind a [`RwLock`], but it is only written to for applying changes in a non-blocking way. /// This will only block if a thread tries to read from it while it is having changes applied, but that is extremely rare. @@ -397,26 +529,26 @@ struct AtomicStageOnWriteInner { staged: RwLock, } -/// A version of [`StageOnWrite`] designed for atomic use. -/// See [`StageOnWrite`] for details. -/// -/// This type includes a baked in [`Arc`], so it can be shared across threads. -/// -/// Many of it's methods take `&mut self` to ensure access is exclusive, preventing possible deadlocks. -/// This doesn not guarantee there are no deadlocks when working with multiple clones of this on the same thread. -/// Here's an example: -/// -/// ```compile_fail -/// use ... -/// let mut stage_on_write = AtomicStageOnWrite::::default(); -/// let reading = stage_on_write.read_lock(); -/// stage_on_write.apply_staged_non_blocking(); -/// ``` -/// -/// Remember to use [`apply_staged_non_blocking`](Self::apply_staged_non_blocking) or similar methods periodically as a best practice. -#[cfg(feature = "alloc")] -#[derive(Clone)] -pub struct AtomicStageOnWrite(Arc>); +// /// A version of [`StageOnWrite`] designed for atomic use. +// /// See [`StageOnWrite`] for details. +// /// +// /// This type includes a baked in [`Arc`], so it can be shared across threads. +// /// +// /// Many of it's methods take `&mut self` to ensure access is exclusive, preventing possible deadlocks. +// /// This doesn not guarantee there are no deadlocks when working with multiple clones of this on the same thread. +// /// Here's an example: +// /// +// /// ```compile_fail +// /// use ... +// /// let mut stage_on_write = AtomicStageOnWrite::::default(); +// /// let reading = stage_on_write.read_lock(); +// /// stage_on_write.apply_staged_non_blocking(); +// /// ``` +// /// +// /// Remember to use [`apply_staged_non_blocking`](Self::apply_staged_non_blocking) or similar methods periodically as a best practice. +// #[cfg(feature = "alloc")] +// #[derive(Clone)] +// pub struct AtomicStageOnWrite(Arc>); impl StagableWritesTypes for StageOnWrite { type Staging = T; @@ -447,18 +579,22 @@ impl StagableWritesTypes for AtomicStageOnWriteInner { } impl StagableWritesCore for StageOnWrite { + #[inline] fn raw_read_cold(&self) -> Self::ColdRef<'_> { &self.cold } + #[inline] fn raw_read_staged(&self) -> RwLockReadGuard<'_, Self::Staging> { self.staged.read().unwrap_or_else(PoisonError::into_inner) } + #[inline] fn raw_read_cold_non_blocking(&self) -> Option> { Some(&self.cold) } + #[inline] fn raw_read_staged_non_blocking(&self) -> Option> { match self.staged.try_read() { Ok(read) => Some(read), @@ -467,10 +603,12 @@ impl StagableWritesCore for StageOnWrite { } } + #[inline] fn raw_write_staged(&self) -> RwLockWriteGuard<'_, Self::Staging> { self.staged.write().unwrap_or_else(PoisonError::into_inner) } + #[inline] fn raw_write_staged_non_blocking(&self) -> Option> { match self.staged.try_write() { Ok(read) => Some(read), @@ -479,26 +617,34 @@ impl StagableWritesCore for StageOnWrite { } } - fn raw_write_cold_mut(&mut self) -> &mut ::Cold { - &mut self.cold - } - - fn raw_write_staged_mut(&mut self) -> &mut Self::Staging { - self.staged - .get_mut() - .unwrap_or_else(PoisonError::into_inner) + #[inline] + fn raw_write_both_mut( + &mut self, + ) -> ( + &mut Self::Staging, + &mut ::Cold, + ) { + ( + self.staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner), + &mut self.cold, + ) } } impl StagableWritesCore for AtomicStageOnWriteInner { + #[inline] fn raw_read_cold(&self) -> Self::ColdRef<'_> { self.cold.read().unwrap_or_else(PoisonError::into_inner) } + #[inline] fn raw_read_staged(&self) -> RwLockReadGuard<'_, Self::Staging> { self.staged.read().unwrap_or_else(PoisonError::into_inner) } + #[inline] fn raw_read_cold_non_blocking(&self) -> Option> { match self.cold.try_read() { Ok(read) => Some(read), @@ -507,6 +653,7 @@ impl StagableWritesCore for AtomicStageOnWriteInner { } } + #[inline] fn raw_read_staged_non_blocking(&self) -> Option> { match self.staged.try_read() { Ok(read) => Some(read), @@ -515,10 +662,12 @@ impl StagableWritesCore for AtomicStageOnWriteInner { } } + #[inline] fn raw_write_staged(&self) -> RwLockWriteGuard<'_, Self::Staging> { self.staged.write().unwrap_or_else(PoisonError::into_inner) } + #[inline] fn raw_write_staged_non_blocking(&self) -> Option> { match self.staged.try_write() { Ok(read) => Some(read), @@ -527,14 +676,19 @@ impl StagableWritesCore for AtomicStageOnWriteInner { } } - fn raw_write_cold_mut(&mut self) -> &mut ::Cold { - self.cold.get_mut().unwrap_or_else(PoisonError::into_inner) - } - - fn raw_write_staged_mut(&mut self) -> &mut Self::Staging { - self.staged - .get_mut() - .unwrap_or_else(PoisonError::into_inner) + #[inline] + fn raw_write_both_mut( + &mut self, + ) -> ( + &mut Self::Staging, + &mut ::Cold, + ) { + ( + self.staged + .get_mut() + .unwrap_or_else(PoisonError::into_inner), + self.cold.get_mut().unwrap_or_else(PoisonError::into_inner), + ) } } @@ -546,202 +700,34 @@ impl StageOnWrite { staged: RwLock::default(), } } - - /// Gets the inner cold data if there are no staged changes. - /// If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. - #[inline] - pub fn full(&mut self) -> Option<&mut T::Cold> { - if self - .staged - .get_mut() - .unwrap_or_else(PoisonError::into_inner) - .any_staged() - { - None - } else { - Some(&mut self.cold) - } - } - - /// Applies any staged changes before returning the full value with all changes applied. - /// Immediately after this, [`any_staged`](Self::any_staged) will be false. - #[inline] - pub fn apply_staged_for_full(&mut self) -> &mut T::Cold { - let staged = self - .staged - .get_mut() - .unwrap_or_else(PoisonError::into_inner); - if staged.any_staged() { - staged.apply_staged(&mut self.cold); - } - &mut self.cold - } - - /// Returns true if and only if there are staged changes that could be applied. - /// If you only have a immutable reference, consider using [`read_scope_locked`](Self::read_scope_locked) with [`StagedChanges::any_staged`]. - #[inline] - pub fn any_staged(&mut self) -> bool { - self.staged - .get_mut() - .unwrap_or_else(PoisonError::into_inner) - .any_staged() - } - - /// Constructs a [`Stager`] that will stage changes. - #[inline] - pub fn stage(&mut self) -> Stager<'_, T> { - Stager { - cold: &mut self.cold, - staged: self - .staged - .get_mut() - .unwrap_or_else(PoisonError::into_inner), - } - } - - /// Constructs a [`StagerLocked`], locking internally. - /// - /// # Deadlocks - /// - /// This deadlocks if there are any other lock guards on this thread for this value. - #[inline] - pub fn stage_lock(&self) -> StagerLocked<'_, Self> { - StagerLocked { - inner: self, - cold: &self.cold, - staged: self.staged.write().unwrap_or_else(PoisonError::into_inner), - } - } - - /// Constructs a [`Stager`] that will stage changes. - #[inline] - pub fn read(&mut self) -> StagedRef<'_, T> { - StagedRef { - cold: &self.cold, - staged: self - .staged - .get_mut() - .unwrap_or_else(PoisonError::into_inner), - } - } - - /// Constructs a [`StagedRefLocked`], locking internally. - /// - /// # Deadlocks - /// - /// This deadlocks if there are any write lock guards on this thread for this value. - #[inline] - pub fn read_lock(&self) -> StagedRefLocked<'_, Self> { - StagedRefLocked { - inner: self, - cold: &self.cold, - staged: self.staged.read().unwrap_or_else(PoisonError::into_inner), - } - } - - /// Runs different logic depending on if additional changes are already staged. - /// This can be faster than greedily applying staged changes if there are already staged changes. - pub fn maybe_stage( - &mut self, - for_full: impl FnOnce(&mut T::Cold) -> C, - for_staged: impl FnOnce(&mut Stager) -> S, - ) -> MaybeStaged { - let staged = self - .staged - .get_mut() - .unwrap_or_else(PoisonError::into_inner); - let cold = &mut self.cold; - if staged.any_staged() { - MaybeStaged::Staged(for_staged(&mut Stager { cold, staged })) - } else { - MaybeStaged::Cold(for_full(cold)) - } - } - - /// Easily run a stager function to stage changes. - /// - /// # Deadlocks - /// - /// This deadlocks if there are any other lock guards on this thread for this value. - #[inline] - pub fn stage_scope_locked(&self, f: impl FnOnce(&mut Stager) -> R) -> R { - f(&mut self.stage_lock().as_stager()) - } - - /// Easily run a [`StagedRef`] function. - /// - /// # Deadlocks - /// - /// This deadlocks if there are any write lock guards on this thread for this value. - #[inline] - pub fn read_scope_locked(&self, f: impl FnOnce(&StagedRef) -> R) -> R { - f(&self.read_lock().as_staged_ref()) - } } #[cfg(feature = "alloc")] -impl AtomicStageOnWrite { - /// Constructs a new [`AtomicStageOnWrite`] with the given value and no staged changes. +impl AtomicStageOnWriteInner { + /// Constructs a new [`AtomicStageOnWriteInner`] with the given value and no staged changes. pub fn new(value: T::Cold) -> Self { - Self(Arc::new(AtomicStageOnWriteInner { + Self { cold: RwLock::new(value), staged: RwLock::default(), - })) - } - - /// Gets the inner cold data if there are no staged changes. - /// If [`any_staged`](Self::any_staged) is known to be false, this can be safely unwrapped. - /// - /// Note that this **Blocks**, so generally prefer [`full_non_blocking`](Self::full_non_blocking). - #[inline] - pub fn full_locked(&mut self) -> Option> { - if self - .0 - .staged - .read() - .unwrap_or_else(PoisonError::into_inner) - .any_staged() - { - None - } else { - Some(self.0.cold.write().unwrap_or_else(PoisonError::into_inner)) } } - /// Applies any staged changes before returning the full value with all changes applied. - /// Immediately after this, [`any_staged`](Self::any_staged) will be false. - /// - /// Note that this **Blocks**, so generally prefer [`apply_staged_for_full_non_blocking`](Self::apply_staged_for_full_non_blocking). - #[inline] - pub fn apply_staged_for_full_locked(&mut self) -> RwLockWriteGuard<'_, T::Cold> { - let mut staged = self - .0 - .staged - .write() - .unwrap_or_else(PoisonError::into_inner); - let mut cold = self.0.cold.write().unwrap_or_else(PoisonError::into_inner); - if staged.any_staged() { - staged.apply_staged(&mut cold); - } - cold - } - /// Gets the inner cold data if there are no staged changes and nobody is reading from the cold data. #[inline] - pub fn full_non_blocking(&mut self) -> Option> { - let staged = self.0.staged.try_read().ok()?; + pub fn full_non_blocking(&self) -> Option> { + let staged = self.staged.try_read().ok()?; if staged.any_staged() { None } else { - self.0.cold.try_write().ok() + self.cold.try_write().ok() } } /// Applies any staged changes before returning the full value with all changes applied. #[inline] - pub fn apply_staged_for_full_non_blocking(&mut self) -> Option> { - let mut cold = self.0.cold.try_write().ok()?; - match self.0.staged.try_write() { + pub fn apply_staged_for_full_non_blocking(&self) -> Option> { + let mut cold = self.cold.try_write().ok()?; + match self.staged.try_write() { Ok(mut staged) => { if staged.any_staged() { staged.apply_staged(&mut cold); @@ -749,7 +735,7 @@ impl AtomicStageOnWrite { Some(cold) } Err(_) => { - let staged = self.0.staged.read().unwrap_or_else(PoisonError::into_inner); + let staged = self.staged.read().unwrap_or_else(PoisonError::into_inner); if staged.any_staged() { None } else { @@ -762,12 +748,12 @@ impl AtomicStageOnWrite { /// If possible applies any staged changes. /// Returns true if it can guarantee there are no more staged changes. #[inline] - pub fn apply_staged_non_blocking(&mut self) -> bool { - let Ok(mut staged) = self.0.staged.try_write() else { + pub fn apply_staged_non_blocking(&self) -> bool { + let Ok(mut staged) = self.staged.try_write() else { return false; }; if staged.any_staged() { - let Ok(mut cold) = self.0.cold.try_write() else { + let Ok(mut cold) = self.cold.try_write() else { return false; }; staged.apply_staged(&mut cold); @@ -777,62 +763,24 @@ impl AtomicStageOnWrite { } } - /// Returns true if and only if there are staged changes that could be applied. - #[inline] - pub fn any_staged(&self) -> bool { - self.0 - .staged - .read() - .unwrap_or_else(PoisonError::into_inner) - .any_staged() - } - - /// Constructs a [`StagerLocked`], locking internally. - #[inline] - pub fn stage_lock(&mut self) -> StagerLocked<'_, Self> { - StagerLocked { - inner: self, - cold: self.0.cold.read().unwrap_or_else(PoisonError::into_inner), - staged: self - .0 - .staged - .write() - .unwrap_or_else(PoisonError::into_inner), - } - } - - /// Constructs a [`StagedRefLocked`], locking internally. - #[inline] - pub fn read_lock(&self) -> StagedRefLocked<'_, Self> { - StagedRefLocked { - inner: self, - cold: self.0.cold.read().unwrap_or_else(PoisonError::into_inner), - staged: self.0.staged.read().unwrap_or_else(PoisonError::into_inner), - } - } - /// Runs different logic depending on if additional changes are already staged and if using cold directly would block. /// This *can* be faster than greedily applying staged changes if there are no staged changes and no reads from cold. pub fn maybe_stage( - &mut self, + &self, for_full: impl FnOnce(&mut T::Cold) -> C, for_staged: impl FnOnce(&mut Stager) -> S, ) -> MaybeStaged { - let mut staged = self - .0 - .staged - .write() - .unwrap_or_else(PoisonError::into_inner); + let mut staged = self.staged.write().unwrap_or_else(PoisonError::into_inner); if staged.any_staged() { - let cold = self.0.cold.read().unwrap_or_else(PoisonError::into_inner); + let cold = self.cold.read().unwrap_or_else(PoisonError::into_inner); MaybeStaged::Staged(for_staged(&mut Stager { cold: &cold, staged: &mut staged, })) - } else if let Ok(mut cold) = self.0.cold.try_write() { + } else if let Ok(mut cold) = self.cold.try_write() { MaybeStaged::Cold(for_full(&mut cold)) } else { - let cold = self.0.cold.read().unwrap_or_else(PoisonError::into_inner); + let cold = self.cold.read().unwrap_or_else(PoisonError::into_inner); MaybeStaged::Staged(for_staged(&mut Stager { cold: &cold, staged: &mut staged, @@ -840,12 +788,6 @@ impl AtomicStageOnWrite { } } - /// Easily run a stager function to stage changes. - #[inline] - pub fn stage_scope_locked(&mut self, f: impl FnOnce(&mut Stager) -> R) -> R { - f(&mut self.stage_lock().as_stager()) - } - /// Easily run a stager function to stage changes. /// Then, tries to apply those changes if doing so wouldn't lock. #[inline] @@ -854,15 +796,9 @@ impl AtomicStageOnWrite { self.apply_staged_non_blocking(); result } - - /// Easily run a [`StagedRef`] function. - #[inline] - pub fn read_scope_locked(&self, f: impl FnOnce(&StagedRef) -> R) -> R { - f(&self.read_lock().as_staged_ref()) - } } -impl<'a, T: StagableWrites> StagerLocked<'a, T> { +impl<'a, T: StagableWritesCore> StagerLocked<'a, T> { /// Allows a user to view this as a [`Stager`]. #[inline] pub fn as_stager(&mut self) -> Stager<'_, T::Staging> { @@ -888,7 +824,7 @@ impl<'a, T: StagableWrites> StagerLocked<'a, T> { } } -impl<'a, T: StagableWrites> StagedRefLocked<'a, T> { +impl<'a, T: StagableWritesCore> StagedRefLocked<'a, T> { /// Allows a user to view this as a [`StagedRef`]. #[inline] pub fn as_staged_ref(&self) -> StagedRef<'_, T::Staging> { @@ -926,7 +862,7 @@ impl<'a, T: StagedChanges> Clone for StagedRef<'a, T> { } } -impl<'a, T: StagableWrites> Clone for StagedRefLocked<'a, T> { +impl<'a, T: StagableWritesCore> Clone for StagedRefLocked<'a, T> { fn clone(&self) -> Self { Self { inner: self.inner, @@ -936,16 +872,6 @@ impl<'a, T: StagableWrites> Clone for StagedRefLocked<'a, T> { } } -#[cfg(feature = "alloc")] -impl Default for AtomicStageOnWrite -where - T::Cold: Default, -{ - fn default() -> Self { - Self::new(T::Cold::default()) - } -} - impl> Deref for MaybeStaged { type Target = C::Target; From 34160b47851717968e386dbafd8be811d5b3ada1 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 17:03:39 -0500 Subject: [PATCH 36/45] Alert for deadlocks with unsafe --- crates/bevy_utils/src/staging.rs | 79 +++++++++++++++++++++----------- 1 file changed, 51 insertions(+), 28 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 71c5b199f4a8b..9e88dea6c177e 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -1,3 +1,7 @@ +#![expect( + unsafe_code, + reason = "This module marks some items as unsafe to alert users to deadlock potential." +)] //! Provides an abstracted system for staging modifications to data structures that rarely change. //! See [`StageOnWrite`] as a starting point. //! @@ -373,11 +377,11 @@ pub trait StagableWritesCore: StagableWritesTypes { /// Constructs a [`StagerLocked`], locking internally. /// - /// # Deadlocks + /// # Safety /// - /// This deadlocks if there are any other lock guards on this thread for this value. + /// There must not be any other lock guards on this thread for this value. Otherwise it deadlocks. #[inline] - fn stage_lock(&self) -> StagerLocked<'_, Self> { + unsafe fn stage_lock_unsafe(&self) -> StagerLocked<'_, Self> { StagerLocked { inner: self, cold: self.raw_read_cold(), @@ -397,11 +401,11 @@ pub trait StagableWritesCore: StagableWritesTypes { /// Constructs a [`StagedRefLocked`], locking internally. /// - /// # Deadlocks + /// # Safety /// - /// This deadlocks if there are any write lock guards on this thread for this value. + /// There must not be any write lock guards on this thread for this value. Otherwise it deadlocks. #[inline] - fn read_lock(&self) -> StagedRefLocked<'_, Self> { + unsafe fn read_lock_unsafe(&self) -> StagedRefLocked<'_, Self> { StagedRefLocked { inner: self, cold: self.raw_read_cold(), @@ -423,31 +427,46 @@ pub trait StagableWritesCore: StagableWritesTypes { MaybeStaged::Cold(for_full(cold)) } } +} + +/// This trait generallizes the stage on write concept. +pub trait StagableWrites: Deref +where + Self::Target: StagableWritesCore, +{ + /// Exactly the same as [`StagableWritesCore::stage_lock_usafe`] + #[inline] + fn stage_lock(&mut self) -> StagerLocked<'_, Self::Target> { + // Safety: Because we have exclusive, mutable access, this is safe. + unsafe { self.stage_lock_unsafe() } + } + + /// Exactly the same as [`StagableWritesCore::read_lock_unsafe`] + #[inline] + fn read_lock(&self) -> StagedRefLocked<'_, Self::Target> { + // Safety: Because we have exclusive, mutable access, this is safe. + unsafe { self.read_lock_unsafe() } + } /// Easily run a stager function to stage changes. - /// - /// # Deadlocks - /// - /// This deadlocks if there are any other lock guards on this thread for this value. #[inline] - fn stage_scope_locked(&self, f: impl FnOnce(&mut Stager) -> R) -> R { + fn stage_scope_locked( + &mut self, + f: impl FnOnce(&mut Stager<::Staging>) -> R, + ) -> R { f(&mut self.stage_lock().as_stager()) } /// Easily run a [`StagedRef`] function. - /// - /// # Deadlocks - /// - /// This deadlocks if there are any write lock guards on this thread for this value. #[inline] - fn read_scope_locked(&self, f: impl FnOnce(&StagedRef) -> R) -> R { + fn read_scope_locked( + &mut self, + f: impl FnOnce(&StagedRef<::Staging>) -> R, + ) -> R { f(&self.read_lock().as_staged_ref()) } } -/// This trait generallizes the stage on write concept. -pub trait StagableWrites: StagableWritesTypes {} - /// A struct that allows staging changes while reading from cold storage. /// Generally, staging changes should be implemented on this type. #[derive(Debug)] @@ -765,7 +784,11 @@ impl AtomicStageOnWriteInner { /// Runs different logic depending on if additional changes are already staged and if using cold directly would block. /// This *can* be faster than greedily applying staged changes if there are no staged changes and no reads from cold. - pub fn maybe_stage( + /// + /// # Safety + /// + /// There must not be any other lock guards for this value on this thread. Otherwise, this will deadlock. + pub unsafe fn maybe_stage_unsafe( &self, for_full: impl FnOnce(&mut T::Cold) -> C, for_staged: impl FnOnce(&mut Stager) -> S, @@ -788,14 +811,14 @@ impl AtomicStageOnWriteInner { } } - /// Easily run a stager function to stage changes. - /// Then, tries to apply those changes if doing so wouldn't lock. - #[inline] - pub fn stage_scope_locked_eager(&mut self, f: impl FnOnce(&mut Stager) -> R) -> R { - let result = self.stage_scope_locked(f); - self.apply_staged_non_blocking(); - result - } + // /// Easily run a stager function to stage changes. + // /// Then, tries to apply those changes if doing so wouldn't lock. + // #[inline] + // pub fn stage_scope_locked_eager(&self, f: impl FnOnce(&mut Stager) -> R) -> R { + // let result = self.stage_scope_locked(f); + // self.apply_staged_non_blocking(); + // result + // } } impl<'a, T: StagableWritesCore> StagerLocked<'a, T> { From 472b1c2cb16fe9c81eefa73accef1d6e699bfc2c Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 17:31:12 -0500 Subject: [PATCH 37/45] improved deadlock prevention --- crates/bevy_utils/src/staging.rs | 148 +++++++++++++++++++------------ 1 file changed, 89 insertions(+), 59 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 9e88dea6c177e..e8ece32b0a12a 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -255,6 +255,9 @@ use bevy_platform_support::sync::{ }; use core::ops::{Deref, DerefMut}; +#[cfg(feature = "alloc")] +use bevy_platform_support::sync::Arc; + /// Signifies that this type represents staged changes to [`Cold`](Self::Cold). pub trait StagedChanges: Default { /// The more compact data structure that these changes compact into. @@ -400,12 +403,8 @@ pub trait StagableWritesCore: StagableWritesTypes { } /// Constructs a [`StagedRefLocked`], locking internally. - /// - /// # Safety - /// - /// There must not be any write lock guards on this thread for this value. Otherwise it deadlocks. #[inline] - unsafe fn read_lock_unsafe(&self) -> StagedRefLocked<'_, Self> { + fn read_lock(&self) -> StagedRefLocked<'_, Self> { StagedRefLocked { inner: self, cold: self.raw_read_cold(), @@ -415,6 +414,7 @@ pub trait StagableWritesCore: StagableWritesTypes { /// Runs different logic depending on if additional changes are already staged. /// This can be faster than greedily applying staged changes if there are already staged changes. + #[inline] fn maybe_stage( &mut self, for_full: impl FnOnce(&mut as Deref>::Target) -> C, @@ -427,43 +427,49 @@ pub trait StagableWritesCore: StagableWritesTypes { MaybeStaged::Cold(for_full(cold)) } } -} -/// This trait generallizes the stage on write concept. -pub trait StagableWrites: Deref -where - Self::Target: StagableWritesCore, -{ - /// Exactly the same as [`StagableWritesCore::stage_lock_usafe`] + /// Easily run a [`StagedRef`] function. #[inline] - fn stage_lock(&mut self) -> StagerLocked<'_, Self::Target> { - // Safety: Because we have exclusive, mutable access, this is safe. - unsafe { self.stage_lock_unsafe() } + fn read_scope_locked( + &self, + f: impl FnOnce(&StagedRef<::Staging>) -> R, + ) -> R { + f(&self.read_lock().as_staged_ref()) } +} + +/// This trait provides some conviniencies around [`StagableWritesCore`]. +/// +/// For example, mutable references are used to enforce safety for some functions. +pub trait StagableWrites { + /// This is the inner [`StagableWritesCore`] type responsible for the bulk of the implementation. + type Core: StagableWritesCore; - /// Exactly the same as [`StagableWritesCore::read_lock_unsafe`] + /// Gets the inner core. + fn get_core(&self) -> &Self::Core; + + /// Exactly the same as [`StagableWritesCore::stage_lock_usafe`] #[inline] - fn read_lock(&self) -> StagedRefLocked<'_, Self::Target> { + fn stage_lock(&mut self) -> StagerLocked<'_, Self::Core> { // Safety: Because we have exclusive, mutable access, this is safe. - unsafe { self.read_lock_unsafe() } + unsafe { self.get_core().stage_lock_unsafe() } } /// Easily run a stager function to stage changes. #[inline] fn stage_scope_locked( &mut self, - f: impl FnOnce(&mut Stager<::Staging>) -> R, + f: impl FnOnce(&mut Stager<::Staging>) -> R, ) -> R { f(&mut self.stage_lock().as_stager()) } +} - /// Easily run a [`StagedRef`] function. - #[inline] - fn read_scope_locked( - &mut self, - f: impl FnOnce(&StagedRef<::Staging>) -> R, - ) -> R { - f(&self.read_lock().as_staged_ref()) +impl StagableWrites for T { + type Core = T; + + fn get_core(&self) -> &Self::Core { + self } } @@ -538,7 +544,7 @@ pub struct StageOnWrite { /// See [`StageOnWrite`] for details. #[cfg(feature = "alloc")] #[derive(Default, Debug)] -pub struct AtomicStageOnWriteInner { +pub struct AtomicStageOnWrite { /// Cold data is read optimized. /// This lives behind a [`RwLock`], but it is only written to for applying changes in a non-blocking way. /// This will only block if a thread tries to read from it while it is having changes applied, but that is extremely rare. @@ -548,26 +554,15 @@ pub struct AtomicStageOnWriteInner { staged: RwLock, } -// /// A version of [`StageOnWrite`] designed for atomic use. -// /// See [`StageOnWrite`] for details. -// /// -// /// This type includes a baked in [`Arc`], so it can be shared across threads. -// /// -// /// Many of it's methods take `&mut self` to ensure access is exclusive, preventing possible deadlocks. -// /// This doesn not guarantee there are no deadlocks when working with multiple clones of this on the same thread. -// /// Here's an example: -// /// -// /// ```compile_fail -// /// use ... -// /// let mut stage_on_write = AtomicStageOnWrite::::default(); -// /// let reading = stage_on_write.read_lock(); -// /// stage_on_write.apply_staged_non_blocking(); -// /// ``` -// /// -// /// Remember to use [`apply_staged_non_blocking`](Self::apply_staged_non_blocking) or similar methods periodically as a best practice. -// #[cfg(feature = "alloc")] -// #[derive(Clone)] -// pub struct AtomicStageOnWrite(Arc>); +/// A version of [`StageOnWrite`] designed for atomic use. +/// See [`StageOnWrite`] for details. +/// +/// This type includes a baked in [`Arc`], so it can be shared across threads. +/// +/// Remember to use [`apply_staged_non_blocking`](Self::apply_staged_non_blocking) or similar methods periodically as a best practice. +#[cfg(feature = "alloc")] +#[derive(Clone)] +pub struct ArcStageOnWrite(pub Arc>); impl StagableWritesTypes for StageOnWrite { type Staging = T; @@ -583,7 +578,7 @@ impl StagableWritesTypes for StageOnWrite { ::Cold: 'a; } -impl StagableWritesTypes for AtomicStageOnWriteInner { +impl StagableWritesTypes for AtomicStageOnWrite { type Staging = T; type ColdRef<'a> @@ -652,7 +647,7 @@ impl StagableWritesCore for StageOnWrite { } } -impl StagableWritesCore for AtomicStageOnWriteInner { +impl StagableWritesCore for AtomicStageOnWrite { #[inline] fn raw_read_cold(&self) -> Self::ColdRef<'_> { self.cold.read().unwrap_or_else(PoisonError::into_inner) @@ -721,8 +716,7 @@ impl StageOnWrite { } } -#[cfg(feature = "alloc")] -impl AtomicStageOnWriteInner { +impl AtomicStageOnWrite { /// Constructs a new [`AtomicStageOnWriteInner`] with the given value and no staged changes. pub fn new(value: T::Cold) -> Self { Self { @@ -810,15 +804,51 @@ impl AtomicStageOnWriteInner { })) } } +} + +#[cfg(feature = "alloc")] +impl ArcStageOnWrite { + /// Constructs a new [`ArcStageOnWrite`] with the given value and no staged changes. + pub fn new(value: T::Cold) -> Self { + Self(Arc::new(AtomicStageOnWrite::new(value))) + } - // /// Easily run a stager function to stage changes. - // /// Then, tries to apply those changes if doing so wouldn't lock. - // #[inline] - // pub fn stage_scope_locked_eager(&self, f: impl FnOnce(&mut Stager) -> R) -> R { - // let result = self.stage_scope_locked(f); - // self.apply_staged_non_blocking(); - // result - // } + /// Exactly the same as [`AtomicStageOnWrite::maybe_stage`], but uses `&mut` to maintain safety. + pub fn maybe_stage( + &mut self, + for_full: impl FnOnce(&mut T::Cold) -> C, + for_staged: impl FnOnce(&mut Stager) -> S, + ) -> MaybeStaged { + // Safety: Safe since we have an exclusive reference to self. + unsafe { self.maybe_stage_unsafe(for_full, for_staged) } + } + + /// Easily run a stager function to stage changes. + /// Then, tries to apply those changes if doing so wouldn't lock. + /// + /// # Deadlocks + /// + /// This can still deadlock if this [`Arc`] has been cloned around on the same thread and is still being locked on. + /// But that is very unlikely. + #[inline] + pub fn stage_scope_locked_eager(&mut self, f: impl FnOnce(&mut Stager) -> R) -> R { + // Safety: Since this has mutible access to self, we can be reasonably sure this is safe. + // The only way this isn't safe is if the arc has been cloned on the same thread instead of passed by ref. + // But that is documented above + let mut lock = unsafe { self.stage_lock_unsafe() }; + let result = f(&mut lock.as_stager()); + self.apply_staged_non_blocking(); + result + } +} + +#[cfg(feature = "alloc")] +impl Deref for ArcStageOnWrite { + type Target = Arc>; + + fn deref(&self) -> &Self::Target { + &self.0 + } } impl<'a, T: StagableWritesCore> StagerLocked<'a, T> { From 2e50c5fecd0af3f26cd2b36a5a4bcdc990f11ae7 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 17:39:50 -0500 Subject: [PATCH 38/45] impl StagableWrites --- crates/bevy_utils/src/staging.rs | 36 ++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index e8ece32b0a12a..5fe89ac913447 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -564,6 +564,11 @@ pub struct AtomicStageOnWrite { #[derive(Clone)] pub struct ArcStageOnWrite(pub Arc>); +/// Although it is is often enough to pass around references to a [`StagableWritesCore`], it is sometimes desierable to encapsulate that reference here. +/// That enables utilities like [`StagableWrites`] +#[derive(Copy)] +pub struct RefStageOnWrite<'a, T: StagableWritesCore>(pub &'a T); + impl StagableWritesTypes for StageOnWrite { type Staging = T; @@ -851,6 +856,31 @@ impl Deref for ArcStageOnWrite { } } +impl Deref for RefStageOnWrite<'_, T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + self.0 + } +} + +impl StagableWrites for RefStageOnWrite<'_, T> { + type Core = T; + + fn get_core(&self) -> &Self::Core { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl StagableWrites for ArcStageOnWrite { + type Core = AtomicStageOnWrite; + + fn get_core(&self) -> &Self::Core { + &self.0 + } +} + impl<'a, T: StagableWritesCore> StagerLocked<'a, T> { /// Allows a user to view this as a [`Stager`]. #[inline] @@ -915,6 +945,12 @@ impl<'a, T: StagedChanges> Clone for StagedRef<'a, T> { } } +impl<'a, T: StagableWritesCore> Clone for RefStageOnWrite<'a, T> { + fn clone(&self) -> Self { + Self(self.0) + } +} + impl<'a, T: StagableWritesCore> Clone for StagedRefLocked<'a, T> { fn clone(&self) -> Self { Self { From 10c1857db638fda63deebde5b3367b7e8ba8b6a1 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 17:46:23 -0500 Subject: [PATCH 39/45] use RefStageOnWrite internally --- crates/bevy_utils/src/staging.rs | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 5fe89ac913447..22ea3817f634a 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -386,7 +386,7 @@ pub trait StagableWritesCore: StagableWritesTypes { #[inline] unsafe fn stage_lock_unsafe(&self) -> StagerLocked<'_, Self> { StagerLocked { - inner: self, + inner: RefStageOnWrite(self), cold: self.raw_read_cold(), staged: self.raw_write_staged(), } @@ -406,7 +406,7 @@ pub trait StagableWritesCore: StagableWritesTypes { #[inline] fn read_lock(&self) -> StagedRefLocked<'_, Self> { StagedRefLocked { - inner: self, + inner: RefStageOnWrite(self), cold: self.raw_read_cold(), staged: self.raw_read_staged(), } @@ -485,7 +485,7 @@ pub struct Stager<'a, T: StagedChanges> { /// A struct that allows accessing changes while reading from cold storage. /// Generally, reading data should be implemented on this type. -#[derive(Copy, Debug)] +#[derive(Debug)] pub struct StagedRef<'a, T: StagedChanges> { /// The storage that is read optimized. pub cold: &'a T::Cold, @@ -497,7 +497,7 @@ pub struct StagedRef<'a, T: StagedChanges> { /// Use this to hold a lock guard while using [`StagerLocked::as_stager`] or similar. #[derive(Debug)] pub struct StagerLocked<'a, T: StagableWritesCore> { - inner: &'a T, + inner: RefStageOnWrite<'a, T>, /// The storage that is read optimized. pub cold: T::ColdRef<'a>, /// The staged changes. @@ -508,7 +508,7 @@ pub struct StagerLocked<'a, T: StagableWritesCore> { /// Use this to hold a lock guard while using [`StagerLocked::as_staged_ref`]. #[derive(Debug)] pub struct StagedRefLocked<'a, T: StagableWritesCore> { - inner: &'a T, + inner: RefStageOnWrite<'a, T>, /// The storage that is read optimized. pub cold: T::ColdRef<'a>, /// The staged changes. @@ -566,7 +566,7 @@ pub struct ArcStageOnWrite(pub Arc>); /// Although it is is often enough to pass around references to a [`StagableWritesCore`], it is sometimes desierable to encapsulate that reference here. /// That enables utilities like [`StagableWrites`] -#[derive(Copy)] +#[derive(Debug)] pub struct RefStageOnWrite<'a, T: StagableWritesCore>(pub &'a T); impl StagableWritesTypes for StageOnWrite { @@ -902,7 +902,7 @@ impl<'a, T: StagableWritesCore> StagerLocked<'a, T> { /// Releases the lock, returning the underlying [`StagableWrites`] structure. #[inline] - pub fn release(self) -> &'a T { + pub fn release(self) -> RefStageOnWrite<'a, T> { self.inner } } @@ -919,35 +919,36 @@ impl<'a, T: StagableWritesCore> StagedRefLocked<'a, T> { /// Releases the lock, returning the underlying [`StagableWrites`] structure. #[inline] - pub fn release(self) -> &'a T { + pub fn release(self) -> RefStageOnWrite<'a, T> { self.inner } /// Allows returning a reference to the locked staged data without releasing its lock. #[inline] pub fn get_staged_guard(&self) -> RwLockReadGuard<'a, T::Staging> { - self.inner.raw_read_staged() + self.inner.0.raw_read_staged() } /// Allows returning a reference to the cold data without releasing its lock (it it has one). #[inline] pub fn get_cold_guard(&self) -> T::ColdRef<'a> { - self.inner.raw_read_cold() + self.inner.0.raw_read_cold() } } impl<'a, T: StagedChanges> Clone for StagedRef<'a, T> { fn clone(&self) -> Self { - Self { - staged: self.staged, - cold: self.cold, - } + *self } } +impl<'a, T: StagedChanges> Copy for StagedRef<'a, T> {} + +impl<'a, T: StagableWritesCore> Copy for RefStageOnWrite<'a, T> {} + impl<'a, T: StagableWritesCore> Clone for RefStageOnWrite<'a, T> { fn clone(&self) -> Self { - Self(self.0) + *self } } From 0576c695e6355c07193ebcd9c886f21b051153e9 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 18:01:05 -0500 Subject: [PATCH 40/45] fixed docs --- crates/bevy_utils/src/staging.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 22ea3817f634a..9caeb8b5b2f1b 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -322,7 +322,7 @@ pub trait StagableWritesCore: StagableWritesTypes { &mut ::Cold, ); - /// Same as [`raw_write_cold`](StagableWritesCore::raw_write_cold), but never locks. + /// Gets the cold data mutably without locking. #[inline] fn raw_write_cold_mut(&mut self) -> &mut ::Cold { self.raw_write_both_mut().1 @@ -448,7 +448,7 @@ pub trait StagableWrites { /// Gets the inner core. fn get_core(&self) -> &Self::Core; - /// Exactly the same as [`StagableWritesCore::stage_lock_usafe`] + /// Exactly the same as [`StagableWritesCore::stage_lock_unsafe`] #[inline] fn stage_lock(&mut self) -> StagerLocked<'_, Self::Core> { // Safety: Because we have exclusive, mutable access, this is safe. @@ -542,7 +542,6 @@ pub struct StageOnWrite { /// A version of [`StageOnWrite`] designed for atomic use. /// It functions fully without needing `&mut self`. /// See [`StageOnWrite`] for details. -#[cfg(feature = "alloc")] #[derive(Default, Debug)] pub struct AtomicStageOnWrite { /// Cold data is read optimized. @@ -559,7 +558,7 @@ pub struct AtomicStageOnWrite { /// /// This type includes a baked in [`Arc`], so it can be shared across threads. /// -/// Remember to use [`apply_staged_non_blocking`](Self::apply_staged_non_blocking) or similar methods periodically as a best practice. +/// Remember to use [`apply_staged_non_blocking`](AtomicStageOnWrite::apply_staged_non_blocking) or similar methods periodically as a best practice. #[cfg(feature = "alloc")] #[derive(Clone)] pub struct ArcStageOnWrite(pub Arc>); @@ -722,7 +721,7 @@ impl StageOnWrite { } impl AtomicStageOnWrite { - /// Constructs a new [`AtomicStageOnWriteInner`] with the given value and no staged changes. + /// Constructs a new [`AtomicStageOnWrite`] with the given value and no staged changes. pub fn new(value: T::Cold) -> Self { Self { cold: RwLock::new(value), From 69b7ca8ac27fa7657e2f11b699b88e3413de93d3 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 22:45:33 -0500 Subject: [PATCH 41/45] docs pass --- crates/bevy_utils/src/staging.rs | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 9caeb8b5b2f1b..e22cec27f53ea 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -5,20 +5,21 @@ //! Provides an abstracted system for staging modifications to data structures that rarely change. //! See [`StageOnWrite`] as a starting point. //! -//! Here's an example of this utility in action for registering players. This is a bit contrived, but it should communicate the idea. +//! +//! +//! # Example +//! +//! Here's an example of this utility in action for registering players. +//! This is a bit contrived, but it should communicate the idea. //! //! ``` //! use core::mem::take; //! use std::sync::RwLockReadGuard; //! use core::ops::{Deref, DerefMut}; //! -//! use crate as bevy_utils; //! use bevy_platform_support::collections::HashMap; //! use bevy_platform_support::prelude::String; -//! use bevy_utils::staging::{ -//! MaybeStaged, StagableWrites, StageOnWrite, StagedChanges, StagedRef, StagedRefLocked, -//! Stager, StagerLocked, -//! }; +//! use self::bevy_utils::staging::*; //! //! /// Stores some arbitrary player data. //! #[derive(Debug, Clone)] @@ -173,8 +174,8 @@ //! id: PlayerId, //! } //! -//! struct LockedNameColdRef<'a, T: StagableWrites + 'a> { -//! cold: T::ColdStorage<'a>, +//! struct LockedNameColdRef<'a, T: StagableWritesCore + 'a> { +//! cold: T::ColdRef<'a>, //! // must be valid //! id: PlayerId, //! } @@ -192,7 +193,7 @@ //! } //! } //! -//! impl<'a, T: StagableWrites + 'a> Deref for LockedNameColdRef<'a, T> { +//! impl<'a, T: StagableWritesCore + 'a> Deref for LockedNameColdRef<'a, T> { //! type Target = str; //! //! fn deref(&self) -> &Self::Target { @@ -208,7 +209,7 @@ //! impl PlayerRegistry { //! /// Runs relatively rarely //! pub fn player_joined(&self, name: String) -> PlayerId { -//! self.players.stage_scope_locked(|stager| stager.add(name)) +//! self.bulk_write().as_stager().add(name) //! } //! //! /// Runs very often @@ -245,10 +246,12 @@ //! //! /// Allows writing in bulk without extra locking. //! pub fn bulk_write(&self) -> StagerLocked<'_, StageOnWrite> { -//! self.players.stage_lock() +//! // SAFETY: unsafe is used to take responsibility for deadlocks. +//! unsafe { self.players.stage_lock_unsafe() } //! } //! } //! ``` +//! use bevy_platform_support::sync::{ PoisonError, RwLock, RwLockReadGuard, RwLockWriteGuard, TryLockError, From fd858d59869e25e6b60419d96a2d9861822d4d98 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 23:40:01 -0500 Subject: [PATCH 42/45] module level docs --- crates/bevy_utils/src/staging.rs | 76 ++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index e22cec27f53ea..04902d0995043 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -5,7 +5,83 @@ //! Provides an abstracted system for staging modifications to data structures that rarely change. //! See [`StageOnWrite`] as a starting point. //! +//! # Rational //! +//! Lets say you want to have a collection of items that is read from often but rarely written to. +//! This comes up a lot in registries, like components, bundles, reflection, assets, etc. +//! Lets say you also want to share this collection between threads freely. +//! There are lots of ways to do this, so what does this module do differently? +//! +//! The standard solution is to use a [`RwLock`] with the collection inside. +//! Then you can pass around the lock between threads freely, even putting it in an `Arc` if desired. +//! However, writing to the collection blocks all reads from it. +//! In performance critical code, this can shut down processes that are reading data from the collection. +//! Worse, if the lifetime of the data being read is long, it can block the writing thread for a significant time. +//! In some cases, this is even prone to dead locking. +//! If any of these are concerns, [`RwLock`] is not enough. +//! +//! There are plenty of third part crates that offer relevant functionality. +//! For example, [left_right](https://docs.rs/left-right/latest/left_right/) is similar to this crate, but it only supports one writer at a time, allows readers to desync from the writer, and lacks `no_std` support. +//! Other crates exist but come with similar downsides, taking double memory, not having `no_std` support, etc. +//! +//! So this module is its own solution to the problem. +//! Use this if and only if: +//! - The collection you are storing has very few writes. +//! - The collection has many concurrent reads. +//! - The collection needs to be able to be written to from anywhere. +//! - The collection needs to be updated everywhere immediately when written to. +//! - The collection needs to be minimally blocking but tolerates locking. +//! +//! # How it works +//! +//! The general approach here is called "Stage on Write", similar to "Copy on Write" from std. +//! +//! Data that has not been changed in a while lives in a compact, read-optimized data structure. +//! We call this read-optimized, old (not changed recently) data "cold" data. +//! When a change is made, instead of locking the cold data and applying it directly, we lock a temporary storage data structure and queue the change. +//! These queued changes we call "staged" data, and its type implements [`StagedChanges`]. +//! Then, at user defined points, we drain these staged changes into the cold data. +//! This requires locking both data structures, but since these points can be much less frequent than the already rare writes, this doesn't matter. +//! In principal, the staged data never needs to be drained into the cold data, but the staged data will never benefit from the faster read access of cold data. +//! The traits [`StagableWrites`] and company represent types that corrdinat this behavior. +//! +//! A few other types help with this. +//! If a lock is held or there is mutably access to the underlying stage on write type, [`Stager`] and [`StagedRef`] can be obtained. +//! Use [`StagedRef`] when you need to access data with standard references. +//! Since the there might be a lock involved, this should be used only when needed. +//! Use [`Stager`] when you need to write data, since this gives read access to cold data, and write access to staged data. +//! However, these types are typically obtained by locking, and returning a reference from them requires all of its locks be kept. +//! As a result, there are better types to use for reading. +//! +//! [`StagedRefLocked`] fills the same role as [`StagedRef`], but gives underlying access to the locks. +//! So, if the requested data lives in cold data, you can return the lock guard for cold, and drop the lock guard for staged, freeing it up for writes. +//! The [`MaybeStaged`] enum helps with this, and this will be even easier once [RwLockGuard mapping](https://doc.rust-lang.org/std/sync/struct.MappedRwLockReadGuard.html) is stabilized. +//! [`StagerLocked`] offers effectively the same powers as [`Stager`], but also gives access to the underlying locks. +//! So, if you want to return a mutable reference to something in staged data, you can use this to do that while allowing the cold lock to be dropped. +//! This is less useful, but still has its place. +//! +//! In general: +//! - Implement general purpose reads on [`StagedRef`], +//! - High performance reads on [`StagedRefLocked`], +//! - Writes on [`Stager`], +//! - and niche/advanced uses on [`StagerLocked`]. +//! +//! In addition, this module offers two implementations of the stage on write concept: [`StageOnWrite`] and [`AtomicStageOnWrite`]. +//! [`StageOnWrite`] is the simpler implementation, storing cold data directly and staged data in an [`RwLock`] to synchronize writes. +//! Because it stores cold data directly, the only way to clean the data (drain staged data in cold) is to have mutable access to it. +//! This means it can't be put in an `Arc` or similar and still be able to be cleaned. +//! [`AtomicStagedOnWrite`] comes to the resque here. It stores cold data in another [`RwLock`], allowing the data to be cleaned with immutable access. +//! Although blocking methods for this exist, [`AtomicStagedOnWrite`] also offers non-blocking methods for cleaning. +//! Hence, in normal use, [`AtomicStagedOnWrite`] will almost never block to read from cold data. +//! Additionally, because it can see when cold is being read or not, it can apply staged changes as needed without needing specific calls from the user. +//! +//! Finally, [`StagableWrites`] offers some utilities to prevent deadlocking. +//! It is implemented by [`RefStageOnWrite`] and `ArcStageOnWrite` for utility. +//! The idea behind this type is that the most common way to accidentally deadlock is to maintain an immutable borrow while making a muttable borrow via synchronization. +//! This trait makes muttable borrows requie mutable access to self, preventing this kind of deadlock. +//! Because these types only wrap cloneable references to to the [`StagableWritesCore`] type, this can still be shared between thread safely. +//! It also means that this does not fully prevent deadlock, especially if the same thread is maintaining a lock guard on one copy of the stage on write structure while another copy is being used. +//! Still this protection is better than none. //! //! # Example //! From 23f2f71f651e3d9009df1715bebd5821bb7eda46 Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 23:42:52 -0500 Subject: [PATCH 43/45] default for arc --- crates/bevy_utils/src/staging.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index 04902d0995043..a57488341c6cf 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -1060,6 +1060,15 @@ impl> DerefMut for MaybeStaged Default for ArcStageOnWrite +where + T::Cold: Default, +{ + fn default() -> Self { + Self::new(T::Cold::default()) + } +} + #[cfg(test)] mod tests { use bevy_platform_support::{collections::HashMap, prelude::Vec}; From acba828a4203c00cfaf1198a6cee0b7a2d2acd5e Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Thu, 6 Feb 2025 23:55:59 -0500 Subject: [PATCH 44/45] clean up and polish --- crates/bevy_utils/src/staging.rs | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index a57488341c6cf..baf17cb66e315 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -17,11 +17,11 @@ //! However, writing to the collection blocks all reads from it. //! In performance critical code, this can shut down processes that are reading data from the collection. //! Worse, if the lifetime of the data being read is long, it can block the writing thread for a significant time. -//! In some cases, this is even prone to dead locking. +//! In some cases, this is even prone to deadlocking. //! If any of these are concerns, [`RwLock`] is not enough. //! -//! There are plenty of third part crates that offer relevant functionality. -//! For example, [left_right](https://docs.rs/left-right/latest/left_right/) is similar to this crate, but it only supports one writer at a time, allows readers to desync from the writer, and lacks `no_std` support. +//! There are plenty of third party crates that offer relevant functionality. +//! For example, [left_right](https://docs.rs/left-right/latest/left_right/) is similar to this module, but it only supports one writer at a time, allows readers to desync from the writer, and lacks `no_std` support. //! Other crates exist but come with similar downsides, taking double memory, not having `no_std` support, etc. //! //! So this module is its own solution to the problem. @@ -30,7 +30,7 @@ //! - The collection has many concurrent reads. //! - The collection needs to be able to be written to from anywhere. //! - The collection needs to be updated everywhere immediately when written to. -//! - The collection needs to be minimally blocking but tolerates locking. +//! - The collection can't let writes interupt/block reading other data. //! //! # How it works //! @@ -42,15 +42,16 @@ //! These queued changes we call "staged" data, and its type implements [`StagedChanges`]. //! Then, at user defined points, we drain these staged changes into the cold data. //! This requires locking both data structures, but since these points can be much less frequent than the already rare writes, this doesn't matter. -//! In principal, the staged data never needs to be drained into the cold data, but the staged data will never benefit from the faster read access of cold data. -//! The traits [`StagableWrites`] and company represent types that corrdinat this behavior. +//! In principal, the staged data never needs to be drained into the cold data, but the data in staged data will not benefit from the faster read access of cold data. +//! The traits [`StagableWrites`] and company represent types that coordinate this behavior. //! //! A few other types help with this. -//! If a lock is held or there is mutably access to the underlying stage on write type, [`Stager`] and [`StagedRef`] can be obtained. +//! If a lock is held or there is mutable access to the underlying stage on write type, [`Stager`] and [`StagedRef`] can be obtained. //! Use [`StagedRef`] when you need to access data with standard references. //! Since the there might be a lock involved, this should be used only when needed. //! Use [`Stager`] when you need to write data, since this gives read access to cold data, and write access to staged data. //! However, these types are typically obtained by locking, and returning a reference from them requires all of its locks be kept. +//! (Both cold *and* staged are locked even if the reference only points to one of them). //! As a result, there are better types to use for reading. //! //! [`StagedRefLocked`] fills the same role as [`StagedRef`], but gives underlying access to the locks. @@ -62,13 +63,13 @@ //! //! In general: //! - Implement general purpose reads on [`StagedRef`], -//! - High performance reads on [`StagedRefLocked`], -//! - Writes on [`Stager`], +//! - high performance reads on [`StagedRefLocked`], +//! - writes on [`Stager`], //! - and niche/advanced uses on [`StagerLocked`]. //! //! In addition, this module offers two implementations of the stage on write concept: [`StageOnWrite`] and [`AtomicStageOnWrite`]. //! [`StageOnWrite`] is the simpler implementation, storing cold data directly and staged data in an [`RwLock`] to synchronize writes. -//! Because it stores cold data directly, the only way to clean the data (drain staged data in cold) is to have mutable access to it. +//! Because it stores cold data directly, the only way to clean the data (drain staged data into cold) is to have mutable access to it. //! This means it can't be put in an `Arc` or similar and still be able to be cleaned. //! [`AtomicStagedOnWrite`] comes to the resque here. It stores cold data in another [`RwLock`], allowing the data to be cleaned with immutable access. //! Although blocking methods for this exist, [`AtomicStagedOnWrite`] also offers non-blocking methods for cleaning. @@ -78,7 +79,7 @@ //! Finally, [`StagableWrites`] offers some utilities to prevent deadlocking. //! It is implemented by [`RefStageOnWrite`] and `ArcStageOnWrite` for utility. //! The idea behind this type is that the most common way to accidentally deadlock is to maintain an immutable borrow while making a muttable borrow via synchronization. -//! This trait makes muttable borrows requie mutable access to self, preventing this kind of deadlock. +//! This trait makes mutable borrows requie mutable access to self, preventing this kind of deadlock. //! Because these types only wrap cloneable references to to the [`StagableWritesCore`] type, this can still be shared between thread safely. //! It also means that this does not fully prevent deadlock, especially if the same thread is maintaining a lock guard on one copy of the stage on write structure while another copy is being used. //! Still this protection is better than none. @@ -1060,6 +1061,7 @@ impl> DerefMut for MaybeStaged Default for ArcStageOnWrite where T::Cold: Default, From 944bafee8980f0831da8b954966dee586a6c9a2e Mon Sep 17 00:00:00 2001 From: Elliott Pierce Date: Fri, 7 Feb 2025 00:08:06 -0500 Subject: [PATCH 45/45] fixed docs --- crates/bevy_utils/src/staging.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/bevy_utils/src/staging.rs b/crates/bevy_utils/src/staging.rs index baf17cb66e315..ce6420f3df766 100644 --- a/crates/bevy_utils/src/staging.rs +++ b/crates/bevy_utils/src/staging.rs @@ -71,9 +71,9 @@ //! [`StageOnWrite`] is the simpler implementation, storing cold data directly and staged data in an [`RwLock`] to synchronize writes. //! Because it stores cold data directly, the only way to clean the data (drain staged data into cold) is to have mutable access to it. //! This means it can't be put in an `Arc` or similar and still be able to be cleaned. -//! [`AtomicStagedOnWrite`] comes to the resque here. It stores cold data in another [`RwLock`], allowing the data to be cleaned with immutable access. -//! Although blocking methods for this exist, [`AtomicStagedOnWrite`] also offers non-blocking methods for cleaning. -//! Hence, in normal use, [`AtomicStagedOnWrite`] will almost never block to read from cold data. +//! [`AtomicStageOnWrite`] comes to the resque here. It stores cold data in another [`RwLock`], allowing the data to be cleaned with immutable access. +//! Although blocking methods for this exist, [`AtomicStageOnWrite`] also offers non-blocking methods for cleaning. +//! Hence, in normal use, [`AtomicStageOnWrite`] will almost never block to read from cold data. //! Additionally, because it can see when cold is being read or not, it can apply staged changes as needed without needing specific calls from the user. //! //! Finally, [`StagableWrites`] offers some utilities to prevent deadlocking.