diff --git a/Cargo.lock b/Cargo.lock index 12190b0..62e3f53 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,11 +19,11 @@ dependencies = [ [[package]] name = "heapless" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bfb9eb618601c89945a70e254898da93b13be0388091d42117462b265bb3fad" +version = "0.9.0" +source = "git+https://github.com/zeenix/heapless?rev=c23f5d60#c23f5d6038269ec9ae747ab44720c6ca4dd5f09c" dependencies = [ "hash32", + "portable-atomic", "serde", "stable_deref_trait", ] @@ -39,6 +39,8 @@ name = "mayheap" version = "0.2.0" dependencies = [ "heapless", + "paste", + "portable-atomic", "serde", "serde_json", ] @@ -49,6 +51,18 @@ version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "portable-atomic" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + [[package]] name = "proc-macro2" version = "1.0.94" diff --git a/Cargo.toml b/Cargo.toml index 83a5769..7e6df62 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,12 +12,15 @@ default = ["alloc"] alloc = ["serde?/alloc"] heapless = ["dep:heapless"] serde = ["dep:serde", "heapless?/serde"] +portable-atomic = ["dep:portable-atomic", "heapless?/portable-atomic"] [dependencies] -heapless = { version = "0.8", optional = true } +heapless = { git = "https://github.com/zeenix/heapless", rev = "c23f5d60", optional = true } serde = { version = "1", default-features = false, features = [ "derive", ], optional = true } +portable-atomic = { version = "1", optional = true } +paste = "1.0" [dev-dependencies] serde_json = "1" diff --git a/src/boxed.rs b/src/boxed.rs new file mode 100644 index 0000000..0566d3d --- /dev/null +++ b/src/boxed.rs @@ -0,0 +1,200 @@ +//! Abstraction over `heapless::pool::boxed` and `alloc::boxed`. +//! +//! The API is modeled after `heapless::pool::boxed` but simpler. This module is only available +//! when either: +//! +//! - `alloc` feature is enabled, or +//! - `heapless` and `portable-atomic` features are enabled. +//! +//! # Usage +//! +//! ``` +//! use mayheap::{box_pool, boxed::{BoxPool, Box}}; +//! +//! // Create a pool for u32 type with a capacity of 10. +//! box_pool!(MyBoxPool: u32, 2); +//! +//! // Allocate a new boxed value from the pool. +//! let mut boxed = MyBoxPool.alloc(42).unwrap(); +//! assert_eq!(*boxed, 42); +//! +//! // Let's mutate the boxed value. +//! *boxed = 100; +//! assert_eq!(*boxed, 100); +//! +//! // Let's allocate more. +//! let _boxed = MyBoxPool.alloc(43).unwrap(); +//! +//! #[cfg(feature = "alloc")] +//! { +//! // This will work fine since capacity (which is 2 here) is irrelevant when using alloc. +//! let boxed = MyBoxPool.alloc(44).unwrap(); +//! assert_eq!(*boxed, 44); +//! } +//! #[cfg(feature = "heapless")] +//! { +//! // This will not. +//! let res = MyBoxPool.alloc(45); +//! assert_eq!(res, Err(45)); +//! } +//! ``` + +use core::ops::{Deref, DerefMut}; + +/// A singleton that manages pool::boxed::Box-es. +/// +/// Don't implement this trait directly. Use [`crate::box_pool`] to create an implementation. +pub trait BoxPool { + /// The data type managed by the memory pool. + type Data; + /// The implementation-specific type of the boxed value. + type BoxedValue: DerefMut; + + /// Allocates a new boxed value from the pool. + fn alloc(&self, value: Self::Data) -> Result, Self::Data> + where + Self: Sized; +} + +/// A boxed value managed by a [`BoxPool`]. +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Box(P::BoxedValue); + +impl Box

{ + /// Allocates a new boxed value from the pool. + pub fn new(value: P::BoxedValue) -> Self { + Self(value) + } +} + +impl Deref for Box

{ + type Target = P::Data; + + fn deref(&self) -> &Self::Target { + self.0.deref() + } +} + +impl DerefMut for Box

{ + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.deref_mut() + } +} + +/// Creates a new BoxPool singleton with the given $name that manages the specified $data_type +#[cfg(feature = "alloc")] +#[macro_export] +macro_rules! box_pool { + ($name:ident: $ty:ty, $capacity:expr) => { + #[derive(Debug, Clone, PartialEq, Eq)] + pub struct $name; + + impl $crate::boxed::BoxPool for $name { + type Data = $ty; + type BoxedValue = $crate::reexports::alloc::boxed::Box<$ty>; + + fn alloc(&self, value: Self::Data) -> Result<$crate::boxed::Box, Self::Data> { + Ok($crate::boxed::Box::new( + $crate::reexports::alloc::boxed::Box::new(value), + )) + } + } + + $crate::reexports::paste::paste! { + // Let's use the $capacity variable so callers don't get "unused const" warnings. + #[allow(non_upper_case_globals, dead_code)] + const [<__dummy__ $name>]: () = { + let _ = $capacity; + }; + } + }; +} + +/// Creates a new BoxPool singleton with the given $name that manages the specified $data_type +#[cfg(not(feature = "alloc"))] +#[macro_export] +macro_rules! box_pool { + ($name:ident: $ty:ty, $capacity:expr) => { + $crate::reexports::paste::paste! { + heapless::box_pool!([<$name Pool>]: $ty); + + #[derive(Debug, Clone, PartialEq, Eq)] + pub struct $name; + + impl $crate::boxed::BoxPool for $name { + type Data = $ty; + type BoxedValue = heapless::pool::boxed::Box<[<$name Pool>]>; + + fn alloc(&self, value: Self::Data) -> Result<$crate::boxed::Box, $ty> { + $name.init(); + + [<$name Pool>].alloc(value).map($crate::boxed::Box::new) + } + } + + impl $name { + fn init(&self) { + use portable_atomic::{AtomicU8, Ordering}; + use heapless::pool::boxed::BoxBlock; + + static STATE: AtomicU8 = AtomicU8::new(InitState::Uninitialized as u8); + + match STATE + .compare_exchange( + InitState::Uninitialized as u8, + InitState::Initializing as u8, + Ordering::AcqRel, + Ordering::Acquire, + ) + .map(|state| state.into()) + .map_err(|state| state.into()) + { + Ok(InitState::Uninitialized) => { + // We won the race, initialize. + let blocks: &'static mut [BoxBlock<$ty>] = { + #[allow(clippy::declare_interior_mutable_const)] + const BLOCK: BoxBlock<$ty> = BoxBlock::new(); + static mut BLOCKS: [BoxBlock<$ty>; $capacity] = [BLOCK; $capacity]; + unsafe { core::ptr::addr_of_mut!(BLOCKS).as_mut().unwrap() } + }; + for block in blocks { + [<$name Pool>].manage(block); + } + STATE.store(InitState::Initialized as u8, Ordering::Release); + } + Err(InitState::Initializing) => { + // Someone else is initializing, wait. + while STATE.load(Ordering::Acquire) == InitState::Initializing as u8 { + core::hint::spin_loop(); + } + } + Err(InitState::Initialized) => { + // Already initialized. + } + // All other states should never happen. + _ => unreachable!(), + } + + #[repr(u8)] + #[derive(PartialEq)] + enum InitState { + Uninitialized = 0, + Initializing = 1, + Initialized = 2, + } + + impl From for InitState { + fn from(value: u8) -> Self { + match value { + 0 => InitState::Uninitialized, + 1 => InitState::Initializing, + 2 => InitState::Initialized, + _ => unreachable!(), + } + } + } + } + } + } + }; +} diff --git a/src/lib.rs b/src/lib.rs index fff1325..9210a29 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -12,8 +12,13 @@ #[cfg(all(not(feature = "alloc"), not(feature = "heapless")))] compile_error!("Either the `alloc` or `heapless` feature must be enabled"); -#[cfg(feature = "alloc")] -extern crate alloc; +// Re-exports for the macros. +#[doc(hidden)] +pub mod reexports { + #[cfg(feature = "alloc")] + pub extern crate alloc; + pub use paste; +} pub mod vec; pub use vec::Vec; @@ -24,6 +29,12 @@ pub use string::String; mod error; pub use error::{Error, Result}; +#[cfg(any( + all(feature = "portable-atomic", feature = "heapless"), + feature = "alloc" +))] +pub mod boxed; + #[cfg(test)] mod tests { #[cfg(feature = "serde")] diff --git a/src/string.rs b/src/string.rs index 6f3f78e..3dc2a36 100644 --- a/src/string.rs +++ b/src/string.rs @@ -5,7 +5,7 @@ use core::{cmp::Ordering, fmt, hash, iter, ops, str}; use crate::Vec; #[cfg(feature = "alloc")] -type Inner = alloc::string::String; +type Inner = crate::reexports::alloc::string::String; #[cfg(not(feature = "alloc"))] type Inner = heapless::String; @@ -357,7 +357,9 @@ macro_rules! impl_try_from_num { fn try_from(s: $num) -> Result { #[cfg(feature = "alloc")] { - Ok(Self(alloc::string::ToString::to_string(&s))) + Ok(Self(crate::reexports::alloc::string::ToString::to_string( + &s, + ))) } #[cfg(not(feature = "alloc"))] { diff --git a/src/vec.rs b/src/vec.rs index 8dd0dd3..1415e01 100644 --- a/src/vec.rs +++ b/src/vec.rs @@ -5,7 +5,7 @@ use core::{cmp::Ordering, fmt, hash, iter::FromIterator, ops, slice}; #[cfg(feature = "alloc")] -pub(crate) type Inner = alloc::vec::Vec; +pub(crate) type Inner = crate::reexports::alloc::vec::Vec; #[cfg(not(feature = "alloc"))] pub(crate) type Inner = heapless::Vec; @@ -437,37 +437,18 @@ impl FromIterator for Vec { #[derive(Clone, Debug)] pub struct IntoIter { #[cfg(feature = "alloc")] - iter: alloc::vec::IntoIter, + iter: crate::reexports::alloc::vec::IntoIter, // FIXME: Once the fix for https://github.com/rust-embedded/heapless/issues/530 is released. We // can turn this into a wrapper around `heapless::vec::IntoIter`. #[cfg(not(feature = "alloc"))] - vec: heapless::Vec, - #[cfg(not(feature = "alloc"))] - next: usize, + iter: heapless::vec::IntoIter, } impl Iterator for IntoIter { type Item = T; #[inline] fn next(&mut self) -> Option { - #[cfg(feature = "alloc")] - { - self.iter.next() - } - #[cfg(not(feature = "alloc"))] - { - if self.next < self.vec.len() { - // SAFETY: - // * `next` is always less than `len`. - // * `<*const T>::add` takes `size_of::()` into account so the pointer returned - // by it will be aligned correctly (which is assumed by `ptr::read`). - let item = unsafe { (self.vec.as_ptr().add(self.next)).read() }; - self.next += 1; - Some(item) - } else { - None - } - } + self.iter.next() } } @@ -478,24 +459,7 @@ impl IntoIterator for Vec { #[inline] fn into_iter(self) -> Self::IntoIter { IntoIter { - #[cfg(feature = "alloc")] iter: self.0.into_iter(), - #[cfg(not(feature = "alloc"))] - vec: self.0, - #[cfg(not(feature = "alloc"))] - next: 0, - } - } -} - -#[cfg(not(feature = "alloc"))] -impl Drop for IntoIter { - fn drop(&mut self) { - unsafe { - // Drop all the elements that have not been moved out of vec - core::ptr::drop_in_place(&mut self.vec.as_mut_slice()[self.next..]); - // Prevent dropping of other elements - self.vec.set_len(0); } } }