Skip to content

Commit

Permalink
crates/sel4-externally-shared: Improve API
Browse files Browse the repository at this point in the history
Signed-off-by: Nick Spinale <[email protected]>
  • Loading branch information
nspin committed Oct 20, 2023
1 parent 111c982 commit d140258
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 35 deletions.
28 changes: 14 additions & 14 deletions crates/sel4-externally-shared/src/atomics/ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,30 +16,30 @@ impl<'a, T, A> AtomicPtr<'a, T, A> {

impl<'a, T: Atomic, A: Readable> AtomicPtr<'a, T, A> {
#[inline]
pub fn load(&self, order: Ordering) -> T {
pub fn load(self, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe { generic::atomic_load(self.as_const_ptr(), order.into()) }
}
}

impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> {
#[inline]
pub fn store(&self, val: T, order: Ordering) {
pub fn store(self, val: T, order: Ordering) {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe {
generic::atomic_store(self.as_mut_ptr(), val, order.into());
}
}

#[inline]
pub fn swap(&self, val: T, order: Ordering) -> T {
pub fn swap(self, val: T, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe { generic::atomic_swap(self.as_mut_ptr(), val, order.into()) }
}

#[inline]
pub fn compare_exchange(
&self,
self,
current: T,
new: T,
success: Ordering,
Expand All @@ -59,7 +59,7 @@ impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> {

#[inline]
pub fn compare_exchange_weak(
&self,
self,
current: T,
new: T,
success: Ordering,
Expand All @@ -78,44 +78,44 @@ impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> {
}

#[inline]
pub fn fetch_add(&self, val: T, order: Ordering) -> T {
pub fn fetch_add(self, val: T, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe { generic::atomic_add(self.as_mut_ptr(), val, order.into()) }
}

#[inline]
pub fn fetch_sub(&self, val: T, order: Ordering) -> T {
pub fn fetch_sub(self, val: T, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe { generic::atomic_sub(self.as_mut_ptr(), val, order.into()) }
}

#[inline]
pub fn fetch_and(&self, val: T, order: Ordering) -> T {
pub fn fetch_and(self, val: T, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe { generic::atomic_and(self.as_mut_ptr(), val, order.into()) }
}

#[inline]
pub fn fetch_nand(&self, val: T, order: Ordering) -> T {
pub fn fetch_nand(self, val: T, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe { generic::atomic_nand(self.as_mut_ptr(), val, order.into()) }
}

#[inline]
pub fn fetch_or(&self, val: T, order: Ordering) -> T {
pub fn fetch_or(self, val: T, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe { generic::atomic_or(self.as_mut_ptr(), val, order.into()) }
}

#[inline]
pub fn fetch_xor(&self, val: T, order: Ordering) -> T {
pub fn fetch_xor(self, val: T, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe { generic::atomic_xor(self.as_mut_ptr(), val, order.into()) }
}

#[inline]
pub fn fetch_update<F>(
&self,
self,
set_order: Ordering,
fetch_order: Ordering,
mut f: F,
Expand All @@ -134,7 +134,7 @@ impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> {
}

#[inline]
pub fn fetch_max(&self, val: T, order: Ordering) -> T {
pub fn fetch_max(self, val: T, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe {
if T::IS_SIGNED {
Expand All @@ -146,7 +146,7 @@ impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> {
}

#[inline]
pub fn fetch_min(&self, val: T, order: Ordering) -> T {
pub fn fetch_min(self, val: T, order: Ordering) -> T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe {
if T::IS_SIGNED {
Expand Down
17 changes: 7 additions & 10 deletions crates/sel4-externally-shared/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,23 +5,20 @@

use core::ptr::NonNull;

use volatile::{
access::{Access, ReadWrite},
VolatilePtr, VolatileRef,
};
use volatile::access::{Access, ReadWrite};

pub use volatile::{access, map_field};
pub use volatile::{access, map_field, VolatilePtr, VolatileRef};

mod atomics;
mod ops;

pub mod ops;

pub use atomics::{Atomic, AtomicPtr};
pub use ops::{ByteWiseOps, DistrustfulOps, NormalOps, UnorderedAtomicOps};

// TODO
pub type ExternallySharedOps = DistrustfulOps<NormalOps>;
// pub type ExternallySharedOps = DistrustfulOps<volatile::ops::VolatileOps>;
// pub type ExternallySharedOps = DistrustfulOps<ByteWiseOps<UnorderedAtomicOps>>;
pub type ExternallySharedOps = ops::ZerocopyOps<ops::NormalOps>;
// pub type ExternallySharedOps = ops::ZerocopyOps<ops::VolatileOps>;
// pub type ExternallySharedOps = ops::ZerocopyOps<ops::BytewiseOps<ops::UnorderedAtomicOps>>;

pub type ExternallySharedRef<'a, T, A = ReadWrite> = VolatileRef<'a, T, A, ExternallySharedOps>;

Expand Down
8 changes: 4 additions & 4 deletions crates/sel4-externally-shared/src/ops/bytewise_ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ use zerocopy::{AsBytes, FromBytes};
use volatile::ops::BulkOps;

#[derive(Debug, Default, Copy, Clone)]
pub struct ByteWiseOps<O>(O);
pub struct BytewiseOps<O>(O);

impl<O: Ops> Ops for ByteWiseOps<O> {}
impl<O: Ops> Ops for BytewiseOps<O> {}

#[cfg(feature = "unstable")]
impl<O: BulkOps<u8>, T: FromBytes + AsBytes> UnitaryOps<T> for ByteWiseOps<O> {
impl<O: BulkOps<u8>, T: FromBytes + AsBytes> UnitaryOps<T> for BytewiseOps<O> {
unsafe fn read(src: *const T) -> T {
let mut val = T::new_zeroed();
let view = val.as_bytes_mut();
Expand All @@ -29,7 +29,7 @@ impl<O: BulkOps<u8>, T: FromBytes + AsBytes> UnitaryOps<T> for ByteWiseOps<O> {
}

#[cfg(feature = "unstable")]
impl<O: BulkOps<u8>, T: FromBytes + AsBytes> BulkOps<T> for ByteWiseOps<O> {
impl<O: BulkOps<u8>, T: FromBytes + AsBytes> BulkOps<T> for BytewiseOps<O> {
unsafe fn memmove(dst: *mut T, src: *const T, count: usize) {
unsafe { O::memmove(dst.cast(), src.cast(), count * mem::size_of::<T>()) }
}
Expand Down
8 changes: 5 additions & 3 deletions crates/sel4-externally-shared/src/ops/mod.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
pub use volatile::ops::*;

mod bytewise_ops;
mod distrustful_ops;
mod normal_ops;
mod unordered_atomic_ops;
mod zerocopy_ops;

pub use bytewise_ops::ByteWiseOps;
pub use distrustful_ops::DistrustfulOps;
pub use bytewise_ops::BytewiseOps;
pub use normal_ops::NormalOps;
pub use unordered_atomic_ops::UnorderedAtomicOps;
pub use zerocopy_ops::ZerocopyOps;
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ use zerocopy::{AsBytes, FromBytes};
use volatile::ops::BulkOps;

#[derive(Debug, Default, Copy, Clone)]
pub struct DistrustfulOps<O>(O);
pub struct ZerocopyOps<O>(O);

impl<O: Ops> Ops for DistrustfulOps<O> {}
impl<O: Ops> Ops for ZerocopyOps<O> {}

impl<O: UnitaryOps<T>, T: FromBytes + AsBytes> UnitaryOps<T> for DistrustfulOps<O> {
impl<O: UnitaryOps<T>, T: FromBytes + AsBytes> UnitaryOps<T> for ZerocopyOps<O> {
unsafe fn read(src: *const T) -> T {
unsafe { O::read(src) }
}
Expand All @@ -20,7 +20,7 @@ impl<O: UnitaryOps<T>, T: FromBytes + AsBytes> UnitaryOps<T> for DistrustfulOps<
}

#[cfg(feature = "unstable")]
impl<O: BulkOps<T>, T: FromBytes + AsBytes> BulkOps<T> for DistrustfulOps<O> {
impl<O: BulkOps<T>, T: FromBytes + AsBytes> BulkOps<T> for ZerocopyOps<O> {
unsafe fn memmove(dst: *mut T, src: *const T, count: usize) {
unsafe { O::memmove(dst, src, count) }
}
Expand Down

0 comments on commit d140258

Please sign in to comment.