From d1402581aa42457be1f7942d61ccdae3534b4802 Mon Sep 17 00:00:00 2001 From: Nick Spinale Date: Fri, 20 Oct 2023 13:06:53 +0000 Subject: [PATCH] crates/sel4-externally-shared: Improve API Signed-off-by: Nick Spinale --- .../sel4-externally-shared/src/atomics/ops.rs | 28 +++++++++---------- crates/sel4-externally-shared/src/lib.rs | 17 +++++------ .../src/ops/bytewise_ops.rs | 8 +++--- crates/sel4-externally-shared/src/ops/mod.rs | 8 ++++-- .../{distrustful_ops.rs => zerocopy_ops.rs} | 8 +++--- 5 files changed, 34 insertions(+), 35 deletions(-) rename crates/sel4-externally-shared/src/ops/{distrustful_ops.rs => zerocopy_ops.rs} (81%) diff --git a/crates/sel4-externally-shared/src/atomics/ops.rs b/crates/sel4-externally-shared/src/atomics/ops.rs index 233a0f56a..08d04ec5f 100644 --- a/crates/sel4-externally-shared/src/atomics/ops.rs +++ b/crates/sel4-externally-shared/src/atomics/ops.rs @@ -16,7 +16,7 @@ impl<'a, T, A> AtomicPtr<'a, T, A> { impl<'a, T: Atomic, A: Readable> AtomicPtr<'a, T, A> { #[inline] - pub fn load(&self, order: Ordering) -> T { + pub fn load(self, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { generic::atomic_load(self.as_const_ptr(), order.into()) } } @@ -24,7 +24,7 @@ impl<'a, T: Atomic, A: Readable> AtomicPtr<'a, T, A> { impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> { #[inline] - pub fn store(&self, val: T, order: Ordering) { + pub fn store(self, val: T, order: Ordering) { // SAFETY: data races are prevented by atomic intrinsics. unsafe { generic::atomic_store(self.as_mut_ptr(), val, order.into()); @@ -32,14 +32,14 @@ impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> { } #[inline] - pub fn swap(&self, val: T, order: Ordering) -> T { + pub fn swap(self, val: T, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { generic::atomic_swap(self.as_mut_ptr(), val, order.into()) } } #[inline] pub fn compare_exchange( - &self, + self, current: T, new: T, success: Ordering, @@ -59,7 +59,7 @@ impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> { #[inline] pub fn compare_exchange_weak( - &self, + self, current: T, new: T, success: Ordering, @@ -78,44 +78,44 @@ impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> { } #[inline] - pub fn fetch_add(&self, val: T, order: Ordering) -> T { + pub fn fetch_add(self, val: T, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { generic::atomic_add(self.as_mut_ptr(), val, order.into()) } } #[inline] - pub fn fetch_sub(&self, val: T, order: Ordering) -> T { + pub fn fetch_sub(self, val: T, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { generic::atomic_sub(self.as_mut_ptr(), val, order.into()) } } #[inline] - pub fn fetch_and(&self, val: T, order: Ordering) -> T { + pub fn fetch_and(self, val: T, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { generic::atomic_and(self.as_mut_ptr(), val, order.into()) } } #[inline] - pub fn fetch_nand(&self, val: T, order: Ordering) -> T { + pub fn fetch_nand(self, val: T, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { generic::atomic_nand(self.as_mut_ptr(), val, order.into()) } } #[inline] - pub fn fetch_or(&self, val: T, order: Ordering) -> T { + pub fn fetch_or(self, val: T, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { generic::atomic_or(self.as_mut_ptr(), val, order.into()) } } #[inline] - pub fn fetch_xor(&self, val: T, order: Ordering) -> T { + pub fn fetch_xor(self, val: T, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { generic::atomic_xor(self.as_mut_ptr(), val, order.into()) } } #[inline] pub fn fetch_update( - &self, + self, set_order: Ordering, fetch_order: Ordering, mut f: F, @@ -134,7 +134,7 @@ impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> { } #[inline] - pub fn fetch_max(&self, val: T, order: Ordering) -> T { + pub fn fetch_max(self, val: T, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { if T::IS_SIGNED { @@ -146,7 +146,7 @@ impl<'a, T: Atomic, A: Readable + Writable> AtomicPtr<'a, T, A> { } #[inline] - pub fn fetch_min(&self, val: T, order: Ordering) -> T { + pub fn fetch_min(self, val: T, order: Ordering) -> T { // SAFETY: data races are prevented by atomic intrinsics. unsafe { if T::IS_SIGNED { diff --git a/crates/sel4-externally-shared/src/lib.rs b/crates/sel4-externally-shared/src/lib.rs index 63c61c995..d21e61b21 100644 --- a/crates/sel4-externally-shared/src/lib.rs +++ b/crates/sel4-externally-shared/src/lib.rs @@ -5,23 +5,20 @@ use core::ptr::NonNull; -use volatile::{ - access::{Access, ReadWrite}, - VolatilePtr, VolatileRef, -}; +use volatile::access::{Access, ReadWrite}; -pub use volatile::{access, map_field}; +pub use volatile::{access, map_field, VolatilePtr, VolatileRef}; mod atomics; -mod ops; + +pub mod ops; pub use atomics::{Atomic, AtomicPtr}; -pub use ops::{ByteWiseOps, DistrustfulOps, NormalOps, UnorderedAtomicOps}; // TODO -pub type ExternallySharedOps = DistrustfulOps; -// pub type ExternallySharedOps = DistrustfulOps; -// pub type ExternallySharedOps = DistrustfulOps>; +pub type ExternallySharedOps = ops::ZerocopyOps; +// pub type ExternallySharedOps = ops::ZerocopyOps; +// pub type ExternallySharedOps = ops::ZerocopyOps>; pub type ExternallySharedRef<'a, T, A = ReadWrite> = VolatileRef<'a, T, A, ExternallySharedOps>; diff --git a/crates/sel4-externally-shared/src/ops/bytewise_ops.rs b/crates/sel4-externally-shared/src/ops/bytewise_ops.rs index aba2f85b5..3fd3122cf 100644 --- a/crates/sel4-externally-shared/src/ops/bytewise_ops.rs +++ b/crates/sel4-externally-shared/src/ops/bytewise_ops.rs @@ -9,12 +9,12 @@ use zerocopy::{AsBytes, FromBytes}; use volatile::ops::BulkOps; #[derive(Debug, Default, Copy, Clone)] -pub struct ByteWiseOps(O); +pub struct BytewiseOps(O); -impl Ops for ByteWiseOps {} +impl Ops for BytewiseOps {} #[cfg(feature = "unstable")] -impl, T: FromBytes + AsBytes> UnitaryOps for ByteWiseOps { +impl, T: FromBytes + AsBytes> UnitaryOps for BytewiseOps { unsafe fn read(src: *const T) -> T { let mut val = T::new_zeroed(); let view = val.as_bytes_mut(); @@ -29,7 +29,7 @@ impl, T: FromBytes + AsBytes> UnitaryOps for ByteWiseOps { } #[cfg(feature = "unstable")] -impl, T: FromBytes + AsBytes> BulkOps for ByteWiseOps { +impl, T: FromBytes + AsBytes> BulkOps for BytewiseOps { unsafe fn memmove(dst: *mut T, src: *const T, count: usize) { unsafe { O::memmove(dst.cast(), src.cast(), count * mem::size_of::()) } } diff --git a/crates/sel4-externally-shared/src/ops/mod.rs b/crates/sel4-externally-shared/src/ops/mod.rs index b94d82e8c..7ecca856f 100644 --- a/crates/sel4-externally-shared/src/ops/mod.rs +++ b/crates/sel4-externally-shared/src/ops/mod.rs @@ -1,9 +1,11 @@ +pub use volatile::ops::*; + mod bytewise_ops; -mod distrustful_ops; mod normal_ops; mod unordered_atomic_ops; +mod zerocopy_ops; -pub use bytewise_ops::ByteWiseOps; -pub use distrustful_ops::DistrustfulOps; +pub use bytewise_ops::BytewiseOps; pub use normal_ops::NormalOps; pub use unordered_atomic_ops::UnorderedAtomicOps; +pub use zerocopy_ops::ZerocopyOps; diff --git a/crates/sel4-externally-shared/src/ops/distrustful_ops.rs b/crates/sel4-externally-shared/src/ops/zerocopy_ops.rs similarity index 81% rename from crates/sel4-externally-shared/src/ops/distrustful_ops.rs rename to crates/sel4-externally-shared/src/ops/zerocopy_ops.rs index d1e980fb8..59ca7252c 100644 --- a/crates/sel4-externally-shared/src/ops/distrustful_ops.rs +++ b/crates/sel4-externally-shared/src/ops/zerocopy_ops.rs @@ -5,11 +5,11 @@ use zerocopy::{AsBytes, FromBytes}; use volatile::ops::BulkOps; #[derive(Debug, Default, Copy, Clone)] -pub struct DistrustfulOps(O); +pub struct ZerocopyOps(O); -impl Ops for DistrustfulOps {} +impl Ops for ZerocopyOps {} -impl, T: FromBytes + AsBytes> UnitaryOps for DistrustfulOps { +impl, T: FromBytes + AsBytes> UnitaryOps for ZerocopyOps { unsafe fn read(src: *const T) -> T { unsafe { O::read(src) } } @@ -20,7 +20,7 @@ impl, T: FromBytes + AsBytes> UnitaryOps for DistrustfulOps< } #[cfg(feature = "unstable")] -impl, T: FromBytes + AsBytes> BulkOps for DistrustfulOps { +impl, T: FromBytes + AsBytes> BulkOps for ZerocopyOps { unsafe fn memmove(dst: *mut T, src: *const T, count: usize) { unsafe { O::memmove(dst, src, count) } }