Skip to content

Commit fa57e53

Browse files
committed
Remove the 'object format' API
Within `zerogc-simple`, commit to internal ABI stability. This is nessecarry for JIT compiled code and FFI code.
1 parent 348cff7 commit fa57e53

File tree

14 files changed

+468
-826
lines changed

14 files changed

+468
-826
lines changed

libs/context/src/collector.rs

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,14 @@ use zerogc::{Gc, GcSafe, GcSystem, Trace, GcSimpleAlloc, NullTrace, TraceImmutab
1212

1313
use crate::{CollectorContext};
1414
use crate::state::{CollectionManager, RawContext};
15-
use std::ffi::c_void;
16-
use zerogc::format::{ObjectFormat, GcLayoutInternals};
1715

1816
/// A specific implementation of a collector
19-
pub unsafe trait RawCollectorImpl: GcLayoutInternals + 'static + Sized {
17+
pub unsafe trait RawCollectorImpl: 'static + Sized {
2018
/// A dynamic pointer to a `Trace` root
2119
///
2220
/// The simple collector implements this as
2321
/// a trait object pointer.
2422
type DynTracePtr: Copy + Debug + 'static;
25-
type Fmt: ObjectFormat<Self>;
2623

2724
/// A pointer to this collector
2825
///
@@ -48,7 +45,7 @@ pub unsafe trait RawCollectorImpl: GcLayoutInternals + 'static + Sized {
4845
where 'gc: 'a, T: GcSafe + ?Sized + 'gc;
4946

5047
/// Convert the specified value into a dyn pointer
51-
unsafe fn create_dyn_pointer<T: Trace>(&self, t: *mut T) -> Self::DynTracePtr;
48+
unsafe fn as_dyn_trace_pointer<T: Trace>(t: *mut T) -> Self::DynTracePtr;
5249

5350
/// Initialize an instance of the collector
5451
///

libs/context/src/handle.rs

Lines changed: 15 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -11,18 +11,15 @@ use alloc::vec::Vec;
1111
use zerogc::{Trace, GcSafe, GcErase, GcRebrand, GcVisitor, NullTrace, TraceImmutable, GcHandleSystem, GcBindHandle};
1212
use crate::{Gc, WeakCollectorRef, CollectorId, CollectorContext, CollectorRef, CollectionManager};
1313
use crate::collector::RawCollectorImpl;
14-
use std::ffi::c_void;
15-
use crate::utils::AtomicCell;
16-
use zerogc::format::ObjectFormat;
1714

1815
const INITIAL_HANDLE_CAPACITY: usize = 64;
1916

2017
/// A [RawCollectorImpl] that supports handles
2118
pub unsafe trait RawHandleImpl: RawCollectorImpl {
2219
/// Type information
23-
type TypeInfo: Sized + Copy;
20+
type TypeInfo: Sized;
2421

25-
fn type_info_for_val<T: GcSafe>(&self, val: &T) -> Self::TypeInfo;
22+
fn type_info_of<T: GcSafe>() -> &'static Self::TypeInfo;
2623

2724
fn handle_list(&self) -> &GcHandleList<Self>;
2825
}
@@ -235,7 +232,7 @@ impl<C: RawHandleImpl> GcHandleList<C> {
235232
/// Now that's behind a layer of abstraction,
236233
/// the unsafety has technically been moved to the caller.
237234
pub unsafe fn trace<F, E>(&mut self, mut visitor: F) -> Result<(), E>
238-
where F: FnMut(<C::Fmt as ObjectFormat<C>>::DynObject, &C::TypeInfo) -> Result<(), E> {
235+
where F: FnMut(*mut (), &C::TypeInfo) -> Result<(), E> {
239236
/*
240237
* TODO: This fence seems unnecessary since we should
241238
* already have exclusive access.....
@@ -357,14 +354,15 @@ pub struct GcRawHandle<C: RawHandleImpl> {
357354
/// Refers to the underlying value of this handle.
358355
///
359356
/// If it's null, it's invalid, and is actually
360-
/// a [FreedHandle].
357+
/// a freed handle
361358
///
362359
/// The underlying value can only be safely accessed
363360
/// if there isn't a collection in progress
364361
value: AtomicPtr<()>,
365362
/// I think this should be protected by the other atomic
366363
/// accesses. Regardless, I'll put it in an AtomicPtr anyways.
367-
pub(crate) type_info: AtomicCell<C::TypeInfo>,
364+
// TODO: Encapsulate
365+
pub(crate) type_info: AtomicPtr<C::TypeInfo>,
368366
/// The reference count to the handle
369367
///
370368
/// If this is zero the value can be freed
@@ -381,13 +379,9 @@ impl<C: RawHandleImpl> GcRawHandle<C> {
381379
/// - It is assumed that the appropriate atomic fences (if any)
382380
/// have already been applied (TODO: Don't we have exclusive access?)
383381
unsafe fn trace_inner<F, E>(&self, trace: &mut F) -> Result<(), E>
384-
where F: FnMut(<C::Fmt as ObjectFormat<C>>::DynObject, &C::TypeInfo) -> Result<(), E> {
385-
let raw_value = self.value.load(Ordering::Relaxed);
386-
assert_eq!(
387-
std::mem::size_of::<*mut ()>(),
388-
std::mem::size_of::<C::DynTracePtr>(),
389-
);
390-
if raw_value.is_null() {
382+
where F: FnMut(*mut (), &C::TypeInfo) -> Result<(), E> {
383+
let value = self.value.load(Ordering::Relaxed);
384+
if value.is_null() {
391385
debug_assert_eq!(
392386
self.refcnt.load(Ordering::Relaxed),
393387
0
@@ -398,9 +392,8 @@ impl<C: RawHandleImpl> GcRawHandle<C> {
398392
self.refcnt.load(Ordering::Relaxed),
399393
0
400394
);
401-
let value = C::Fmt::untyped_object_from_raw(raw_value as *mut c_void);
402-
let type_info = self.type_info.load();
403-
trace(value, &type_info)
395+
let type_info = &*self.type_info.load(Ordering::Relaxed);
396+
trace(value, type_info)
404397
}
405398
}
406399
pub struct GcHandle<T: GcSafe, C: RawHandleImpl> {
@@ -623,7 +616,10 @@ unsafe impl<'gc, 'a, T, C> GcHandleSystem<'gc, 'a, T> for CollectorRef<C>
623616
* the handle!!!
624617
*/
625618
raw.type_info.store(
626-
collector.as_ref().type_info_for_val::<T>(gc.value())
619+
C::type_info_of::<T>()
620+
as *const C::TypeInfo
621+
as *mut C::TypeInfo,
622+
Ordering::Release
627623
);
628624
raw.refcnt.store(1, Ordering::Release);
629625
let weak_collector = collector.weak_ref();

libs/context/src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
const_fn_trait_bound, // So generics + const fn are unstable, huh?
55
)]
66
#![cfg_attr(not(feature = "std"), no_std)]
7-
//! The implementation of [::zerogc::CollectorContext] that is
7+
//! The implementation of (GcContext)[`::zerogc::GcContext`] that is
88
//! shared among both thread-safe and thread-unsafe code.
99
1010
/*
@@ -134,7 +134,7 @@ impl<C: RawCollectorImpl> CollectorContext<C> {
134134
) -> R {
135135
let old_link = (*(*self.raw).shadow_stack_ptr()).last;
136136
let new_link = ShadowStackLink {
137-
element: (*self.raw).collector().create_dyn_pointer(value),
137+
element: C::as_dyn_trace_pointer(value),
138138
prev: old_link
139139
};
140140
(*(*self.raw).shadow_stack_ptr()).last = &new_link;

libs/context/src/state/nosync.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
//! A simpler implementation of [::zerogc::CollectorContext]
1+
//! A simpler implementation of (GcContext)[`::zerogc::GcContext`]
22
//! that doesn't support multiple threads/contexts.
33
//!
44
//! In exchange, there is no locking :)

libs/context/src/utils.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use core::cell::Cell;
99
#[macro_export]
1010
macro_rules! field_offset {
1111
($target:ty, $($field:ident).+) => {
12-
(core::ptr::addr_of!((*(std::ptr::null() as *mut $target))$(.$field)*) as usize)
12+
unsafe { (core::ptr::addr_of!((*(std::ptr::null_mut::<$target>()))$(.$field)*) as usize) }
1313
};
1414
}
1515

libs/simple/examples/binary_trees.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ fn main() {
6969

7070
let long_lived_tree = bottom_up_tree(&gc, max_depth);
7171

72-
let (long_lived_tree, ()) = safepoint_recurse!(gc, long_lived_tree, |gc, long_lived_tree| {
72+
let (long_lived_tree, ()) = safepoint_recurse!(gc, long_lived_tree, |gc, _long_lived_tree| {
7373
(min_depth / 2..max_depth / 2 + 1).into_iter().for_each(|half_depth| {
7474
let depth = half_depth * 2;
7575
let iterations = 1 << ((max_depth - depth + min_depth) as u32);

libs/simple/src/alloc.rs

Lines changed: 37 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ use parking_lot::Mutex;
1414
use std::cell::RefCell;
1515

1616
use zerogc_context::utils::AtomicCell;
17-
use zerogc::format::{ObjectFormat, OpenAllocObjectFormat};
1817

1918
/// The minimum size of supported memory (in words)
2019
///
@@ -30,22 +29,25 @@ pub const ARENA_ELEMENT_ALIGN: usize = ARENA_HEADER_LAYOUT.align();
3029
/// The size of headers in the arena
3130
///
3231
/// This is the same regardless of the underlying object format
33-
const ARENA_HEADER_LAYOUT: Layout = Layout::new::<DummyGcHeader>();
32+
const ARENA_HEADER_LAYOUT: Layout = Layout::new::<GcHeader>();
3433

35-
use super::DummyGcHeader;
36-
use crate::{RawSimpleCollector, RawObjectFormat};
34+
use crate::layout::{GcHeader};
3735

3836
#[inline]
39-
pub const fn small_object_size<T>() -> usize {
37+
pub const fn small_object_size(layout: Layout) -> usize {
4038
let header_layout = ARENA_HEADER_LAYOUT;
4139
header_layout.size() + header_layout
42-
.padding_needed_for(std::mem::align_of::<T>())
43-
+ mem::size_of::<T>()
40+
.padding_needed_for(layout.align())
41+
+ layout.size()
42+
}
43+
#[inline]
44+
pub const fn fits_small_object(layout: Layout) -> bool {
45+
small_object_size(layout) <= MAXIMUM_SMALL_WORDS * std::mem::size_of::<usize>()
46+
&& layout.align() <= ARENA_ELEMENT_ALIGN
4447
}
4548
#[inline]
4649
pub const fn is_small_object<T>() -> bool {
47-
small_object_size::<T>() <= MAXIMUM_SMALL_WORDS * 8
48-
&& mem::align_of::<T>() <= ARENA_ELEMENT_ALIGN
50+
fits_small_object(Layout::new::<T>())
4951
}
5052

5153
pub(crate) struct Chunk {
@@ -121,7 +123,7 @@ pub struct FreeSlot {
121123
#[repr(C)]
122124
pub(crate) union MaybeFreeSlot {
123125
pub free: FreeSlot,
124-
pub header: DummyGcHeader,
126+
pub header: GcHeader,
125127
}
126128

127129
impl MaybeFreeSlot {
@@ -199,7 +201,7 @@ impl ArenaState {
199201
self.current_chunk.store(ptr);
200202
}
201203
#[inline]
202-
fn alloc(&self, element_size: usize) -> NonNull<DummyGcHeader> {
204+
fn alloc(&self, element_size: usize) -> NonNull<GcHeader> {
203205
unsafe {
204206
let chunk = &*self.current_chunk().as_ptr();
205207
match chunk.try_alloc(element_size) {
@@ -211,7 +213,7 @@ impl ArenaState {
211213

212214
#[cold]
213215
#[inline(never)]
214-
fn alloc_fallback(&self, element_size: usize) -> NonNull<DummyGcHeader> {
216+
fn alloc_fallback(&self, element_size: usize) -> NonNull<GcHeader> {
215217
let mut chunks = self.lock_chunks();
216218
// Now that we hold the lock, check the current chunk again
217219
unsafe {
@@ -227,7 +229,7 @@ impl ArenaState {
227229
self.force_current_chunk(NonNull::from(&**chunks.last().unwrap()));
228230
self.current_chunk().as_ref()
229231
.try_alloc(element_size).unwrap()
230-
.cast::<DummyGcHeader>()
232+
.cast::<GcHeader>()
231233
}
232234
}
233235
}
@@ -249,7 +251,7 @@ impl FreeList {
249251
self.next.store(next)
250252
}
251253
#[inline]
252-
fn take_free(&self) -> Option<NonNull<DummyGcHeader>> {
254+
fn take_free(&self) -> Option<NonNull<GcHeader>> {
253255
loop {
254256
let next_free = match self.next.load() {
255257
Some(free) => free,
@@ -271,27 +273,25 @@ impl FreeList {
271273
}
272274
}
273275

274-
pub struct SmallArena<Fmt: RawObjectFormat> {
276+
pub struct SmallArena {
275277
pub(crate) element_size: usize,
276278
state: ArenaState,
277-
pub(crate) free: FreeList,
278-
format: &'static Fmt
279+
pub(crate) free: FreeList
279280
}
280-
impl<Fmt: RawObjectFormat> SmallArena<Fmt> {
281+
impl SmallArena {
281282
#[cold] // Initialization is the slow path
282-
fn with_words(format: &'static Fmt, num_words: usize) -> SmallArena<Fmt> {
283+
fn with_words(num_words: usize) -> SmallArena {
283284
assert!(num_words >= MINIMUM_WORDS);
284285
let element_size = num_words * mem::size_of::<usize>();
285286
assert!(INITIAL_SIZE >= element_size * 2);
286287
let chunks = vec![Chunk::alloc(INITIAL_SIZE)];
287288
SmallArena {
288289
state: ArenaState::new(chunks),
289290
element_size, free: Default::default(),
290-
format
291291
}
292292
}
293293
#[inline]
294-
pub(crate) fn alloc(&self) -> NonNull<DummyGcHeader> {
294+
pub(crate) fn alloc(&self) -> NonNull<GcHeader> {
295295
// Check the free list
296296
if let Some(free) = self.free.take_free() {
297297
free
@@ -312,11 +312,11 @@ impl<Fmt: RawObjectFormat> SmallArena<Fmt> {
312312
}
313313
}
314314
macro_rules! arena_match {
315-
($format:expr, $arenas:expr, $target:ident, max = $max:expr; $($size:pat => $num_words:literal @ $idx:expr),*) => {
315+
($arenas:expr, $target:ident, max = $max:expr; $($size:pat => $num_words:literal @ $idx:expr),*) => {
316316
Some(match $target {
317317
$($size => $arenas[$idx].get_or_init(|| {
318318
assert_eq!(SMALL_ARENA_SIZES[$idx], $num_words);
319-
SmallArena::with_words($format, $num_words)
319+
SmallArena::with_words($num_words)
320320
}),)*
321321
_ => {
322322
assert!($target > $max);
@@ -330,16 +330,16 @@ const SMALL_ARENA_SIZES: [usize; NUM_SMALL_ARENAS] = [
330330
10, 12, 14, 16,
331331
20, 24, 28, 32
332332
];
333-
pub struct SmallArenaList<Fmt: RawObjectFormat> {
333+
pub struct SmallArenaList {
334334
// NOTE: Internally boxed to avoid bloating main struct
335-
arenas: Box<[OnceCell<SmallArena<Fmt>>; NUM_SMALL_ARENAS]>
335+
arenas: Box<[OnceCell<SmallArena>; NUM_SMALL_ARENAS]>
336336
}
337-
impl<Fmt: RawObjectFormat> SmallArenaList<Fmt> {
337+
impl SmallArenaList {
338338
pub fn new() -> Self {
339339
// NOTE: Why does writing arrays have to be so difficult:?
340340
unsafe {
341341
let mut arenas: Box<[
342-
MaybeUninit<OnceCell<SmallArena<Fmt>>>;
342+
MaybeUninit<OnceCell<SmallArena>>;
343343
NUM_SMALL_ARENAS
344344
]> = Box::new_uninit().assume_init();
345345
for i in 0..NUM_SMALL_ARENAS {
@@ -348,29 +348,32 @@ impl<Fmt: RawObjectFormat> SmallArenaList<Fmt> {
348348
SmallArenaList {
349349
// NOTE: This is done because I want to explicitly specify types
350350
arenas: mem::transmute::<
351-
Box<[MaybeUninit<OnceCell<SmallArena<Fmt>>>; NUM_SMALL_ARENAS]>,
352-
Box<[OnceCell<SmallArena<Fmt>>; NUM_SMALL_ARENAS]>
351+
Box<[MaybeUninit<OnceCell<SmallArena>>; NUM_SMALL_ARENAS]>,
352+
Box<[OnceCell<SmallArena>; NUM_SMALL_ARENAS]>
353353
>(arenas)
354354
}
355355
}
356356
}
357-
pub fn iter(&self) -> impl Iterator<Item=&SmallArena<Fmt>> + '_ {
357+
pub fn iter(&self) -> impl Iterator<Item=&SmallArena> + '_ {
358358
self.arenas.iter().filter_map(OnceCell::get)
359359
}
360360
#[inline] // This should be constant folded away (size/align is const)
361-
pub fn find<T>(&self) -> Option<&SmallArena<Fmt>> {
361+
pub fn find<T>(&self) -> Option<&SmallArena> {
362362
if std::mem::align_of::<T>() > ARENA_ELEMENT_ALIGN {
363363
return None
364364
}
365+
if !is_small_object::<T>() {
366+
return None
367+
}
365368
// Divide round up
366369
let word_size = mem::size_of::<usize>();
367-
let num_words = (small_object_size::<T>() + (word_size - 1))
370+
let num_words = (small_object_size(Layout::new::<T>()) + (word_size - 1))
368371
/ word_size;
369372
self.find_raw(num_words)
370373
}
371374
#[inline] // We want this constant-folded away......
372-
fn find_raw(&self, num_words: usize) -> Option<&SmallArena<Fmt>> {
373-
arena_match!(self.format,
375+
fn find_raw(&self, num_words: usize) -> Option<&SmallArena> {
376+
arena_match!(
374377
self.arenas, num_words, max = 32;
375378
0..=2 => 2 @ 0,
376379
3 => 3 @ 1,

0 commit comments

Comments
 (0)