Skip to content

✨ Add boxed module #6

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 5 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 17 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,15 @@ default = ["alloc"]
alloc = ["serde?/alloc"]
heapless = ["dep:heapless"]
serde = ["dep:serde", "heapless?/serde"]
portable-atomic = ["dep:portable-atomic", "heapless?/portable-atomic"]

[dependencies]
heapless = { version = "0.8", optional = true }
heapless = { git = "https://github.com/zeenix/heapless", rev = "c23f5d60", optional = true }
serde = { version = "1", default-features = false, features = [
"derive",
], optional = true }
portable-atomic = { version = "1", optional = true }
paste = "1.0"

[dev-dependencies]
serde_json = "1"
200 changes: 200 additions & 0 deletions src/boxed.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,200 @@
//! Abstraction over `heapless::pool::boxed` and `alloc::boxed`.
//!
//! The API is modeled after `heapless::pool::boxed` but simpler. This module is only available
//! when either:
//!
//! - `alloc` feature is enabled, or
//! - `heapless` and `portable-atomic` features are enabled.
//!
//! # Usage
//!
//! ```
//! use mayheap::{box_pool, boxed::{BoxPool, Box}};
//!
//! // Create a pool for u32 type with a capacity of 10.
//! box_pool!(MyBoxPool: u32, 2);
//!
//! // Allocate a new boxed value from the pool.
//! let mut boxed = MyBoxPool.alloc(42).unwrap();
//! assert_eq!(*boxed, 42);
//!
//! // Let's mutate the boxed value.
//! *boxed = 100;
//! assert_eq!(*boxed, 100);
//!
//! // Let's allocate more.
//! let _boxed = MyBoxPool.alloc(43).unwrap();
//!
//! #[cfg(feature = "alloc")]
//! {
//! // This will work fine since capacity (which is 2 here) is irrelevant when using alloc.
//! let boxed = MyBoxPool.alloc(44).unwrap();
//! assert_eq!(*boxed, 44);
//! }
//! #[cfg(feature = "heapless")]
//! {
//! // This will not.
//! let res = MyBoxPool.alloc(45);
//! assert_eq!(res, Err(45));
//! }
//! ```

use core::ops::{Deref, DerefMut};

/// A singleton that manages pool::boxed::Box-es.
///
/// Don't implement this trait directly. Use [`crate::box_pool`] to create an implementation.
pub trait BoxPool {
/// The data type managed by the memory pool.
type Data;
/// The implementation-specific type of the boxed value.
type BoxedValue: DerefMut<Target = Self::Data>;

/// Allocates a new boxed value from the pool.
fn alloc(&self, value: Self::Data) -> Result<Box<Self>, Self::Data>
where
Self: Sized;
}

/// A boxed value managed by a [`BoxPool`].
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Box<P: BoxPool>(P::BoxedValue);

impl<P: BoxPool> Box<P> {
/// Allocates a new boxed value from the pool.
pub fn new(value: P::BoxedValue) -> Self {
Self(value)
}
}

impl<P: BoxPool> Deref for Box<P> {
type Target = P::Data;

fn deref(&self) -> &Self::Target {
self.0.deref()
}
}

impl<P: BoxPool> DerefMut for Box<P> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.0.deref_mut()
}
}

/// Creates a new BoxPool singleton with the given $name that manages the specified $data_type
#[cfg(feature = "alloc")]
#[macro_export]
macro_rules! box_pool {
($name:ident: $ty:ty, $capacity:expr) => {
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct $name;

impl $crate::boxed::BoxPool for $name {
type Data = $ty;
type BoxedValue = $crate::reexports::alloc::boxed::Box<$ty>;

fn alloc(&self, value: Self::Data) -> Result<$crate::boxed::Box<Self>, Self::Data> {
Ok($crate::boxed::Box::new(
$crate::reexports::alloc::boxed::Box::new(value),
))
}
}

$crate::reexports::paste::paste! {
// Let's use the $capacity variable so callers don't get "unused const" warnings.
#[allow(non_upper_case_globals, dead_code)]
const [<__dummy__ $name>]: () = {
let _ = $capacity;
};
}
};
}

/// Creates a new BoxPool singleton with the given $name that manages the specified $data_type
#[cfg(not(feature = "alloc"))]
#[macro_export]
macro_rules! box_pool {
($name:ident: $ty:ty, $capacity:expr) => {
$crate::reexports::paste::paste! {
heapless::box_pool!([<$name Pool>]: $ty);

#[derive(Debug, Clone, PartialEq, Eq)]
pub struct $name;

impl $crate::boxed::BoxPool for $name {
type Data = $ty;
type BoxedValue = heapless::pool::boxed::Box<[<$name Pool>]>;

fn alloc(&self, value: Self::Data) -> Result<$crate::boxed::Box<Self>, $ty> {
$name.init();

[<$name Pool>].alloc(value).map($crate::boxed::Box::new)
}
}

impl $name {
fn init(&self) {
use portable_atomic::{AtomicU8, Ordering};
use heapless::pool::boxed::BoxBlock;

static STATE: AtomicU8 = AtomicU8::new(InitState::Uninitialized as u8);

match STATE
.compare_exchange(
InitState::Uninitialized as u8,
InitState::Initializing as u8,
Ordering::AcqRel,
Ordering::Acquire,
)
.map(|state| state.into())
.map_err(|state| state.into())
{
Ok(InitState::Uninitialized) => {
// We won the race, initialize.
let blocks: &'static mut [BoxBlock<$ty>] = {
#[allow(clippy::declare_interior_mutable_const)]
const BLOCK: BoxBlock<$ty> = BoxBlock::new();
static mut BLOCKS: [BoxBlock<$ty>; $capacity] = [BLOCK; $capacity];
unsafe { core::ptr::addr_of_mut!(BLOCKS).as_mut().unwrap() }
};
for block in blocks {
[<$name Pool>].manage(block);
}
STATE.store(InitState::Initialized as u8, Ordering::Release);
}
Err(InitState::Initializing) => {
// Someone else is initializing, wait.
while STATE.load(Ordering::Acquire) == InitState::Initializing as u8 {
core::hint::spin_loop();
}
}
Err(InitState::Initialized) => {
// Already initialized.
}
// All other states should never happen.
_ => unreachable!(),
}

#[repr(u8)]
#[derive(PartialEq)]
enum InitState {
Uninitialized = 0,
Initializing = 1,
Initialized = 2,
}

impl From<u8> for InitState {
fn from(value: u8) -> Self {
match value {
0 => InitState::Uninitialized,
1 => InitState::Initializing,
2 => InitState::Initialized,
_ => unreachable!(),
}
}
}
}
}
}
};
}
15 changes: 13 additions & 2 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,13 @@
#[cfg(all(not(feature = "alloc"), not(feature = "heapless")))]
compile_error!("Either the `alloc` or `heapless` feature must be enabled");

#[cfg(feature = "alloc")]
extern crate alloc;
// Re-exports for the macros.
#[doc(hidden)]
pub mod reexports {
#[cfg(feature = "alloc")]
pub extern crate alloc;
pub use paste;
}

pub mod vec;
pub use vec::Vec;
Expand All @@ -24,6 +29,12 @@ pub use string::String;
mod error;
pub use error::{Error, Result};

#[cfg(any(
all(feature = "portable-atomic", feature = "heapless"),
feature = "alloc"
))]
pub mod boxed;

#[cfg(test)]
mod tests {
#[cfg(feature = "serde")]
Expand Down
6 changes: 4 additions & 2 deletions src/string.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use core::{cmp::Ordering, fmt, hash, iter, ops, str};
use crate::Vec;

#[cfg(feature = "alloc")]
type Inner<const N: usize> = alloc::string::String;
type Inner<const N: usize> = crate::reexports::alloc::string::String;
#[cfg(not(feature = "alloc"))]
type Inner<const N: usize> = heapless::String<N>;

Expand Down Expand Up @@ -357,7 +357,9 @@ macro_rules! impl_try_from_num {
fn try_from(s: $num) -> Result<Self, Self::Error> {
#[cfg(feature = "alloc")]
{
Ok(Self(alloc::string::ToString::to_string(&s)))
Ok(Self(crate::reexports::alloc::string::ToString::to_string(
&s,
)))
}
#[cfg(not(feature = "alloc"))]
{
Expand Down
44 changes: 4 additions & 40 deletions src/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
use core::{cmp::Ordering, fmt, hash, iter::FromIterator, ops, slice};

#[cfg(feature = "alloc")]
pub(crate) type Inner<T, const N: usize> = alloc::vec::Vec<T>;
pub(crate) type Inner<T, const N: usize> = crate::reexports::alloc::vec::Vec<T>;
#[cfg(not(feature = "alloc"))]
pub(crate) type Inner<T, const N: usize> = heapless::Vec<T, N>;

Expand Down Expand Up @@ -437,37 +437,18 @@ impl<T, const N: usize> FromIterator<T> for Vec<T, N> {
#[derive(Clone, Debug)]
pub struct IntoIter<T, const N: usize> {
#[cfg(feature = "alloc")]
iter: alloc::vec::IntoIter<T>,
iter: crate::reexports::alloc::vec::IntoIter<T>,
// FIXME: Once the fix for https://github.com/rust-embedded/heapless/issues/530 is released. We
// can turn this into a wrapper around `heapless::vec::IntoIter`.
#[cfg(not(feature = "alloc"))]
vec: heapless::Vec<T, N>,
#[cfg(not(feature = "alloc"))]
next: usize,
iter: heapless::vec::IntoIter<T, N, usize>,
}

impl<T, const N: usize> Iterator for IntoIter<T, N> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
#[cfg(feature = "alloc")]
{
self.iter.next()
}
#[cfg(not(feature = "alloc"))]
{
if self.next < self.vec.len() {
// SAFETY:
// * `next` is always less than `len`.
// * `<*const T>::add` takes `size_of::<T>()` into account so the pointer returned
// by it will be aligned correctly (which is assumed by `ptr::read`).
let item = unsafe { (self.vec.as_ptr().add(self.next)).read() };
self.next += 1;
Some(item)
} else {
None
}
}
self.iter.next()
}
}

Expand All @@ -478,24 +459,7 @@ impl<T, const N: usize> IntoIterator for Vec<T, N> {
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter {
#[cfg(feature = "alloc")]
iter: self.0.into_iter(),
#[cfg(not(feature = "alloc"))]
vec: self.0,
#[cfg(not(feature = "alloc"))]
next: 0,
}
}
}

#[cfg(not(feature = "alloc"))]
impl<T, const N: usize> Drop for IntoIter<T, N> {
fn drop(&mut self) {
unsafe {
// Drop all the elements that have not been moved out of vec
core::ptr::drop_in_place(&mut self.vec.as_mut_slice()[self.next..]);
// Prevent dropping of other elements
self.vec.set_len(0);
}
}
}
Expand Down
Loading