@@ -15,6 +15,88 @@ use std::cell::RefCell;
15
15
16
16
use zerogc_context:: utils:: AtomicCell ;
17
17
18
+ const DEBUG_INTERNAL_ALLOCATOR : bool = cfg ! ( zerogc_simple_debug_alloc) ;
19
+ mod debug {
20
+ pub const PADDING : u32 = 0xDEADBEAF ;
21
+ pub const UNINIT : u32 = 0xCAFEBABE ;
22
+ pub const PADDING_TIMES : usize = 16 ;
23
+ pub const PADDING_BYTES : usize = PADDING_TIMES * 4 ;
24
+ pub unsafe fn pad_memory_block ( ptr : * mut u8 , size : usize ) {
25
+ assert ! ( super :: DEBUG_INTERNAL_ALLOCATOR ) ;
26
+ let start = ptr. sub ( PADDING_BYTES ) ;
27
+ for i in 0 ..PADDING_TIMES {
28
+ ( start as * mut u32 ) . add ( i) . write ( PADDING ) ;
29
+ }
30
+ let end = ptr. add ( size) ;
31
+ for i in 0 ..PADDING_TIMES {
32
+ ( end as * mut u32 ) . add ( i) . write ( PADDING ) ;
33
+ }
34
+ }
35
+ pub unsafe fn mark_memory_uninit ( ptr : * mut u8 , size : usize ) {
36
+ assert ! ( super :: DEBUG_INTERNAL_ALLOCATOR ) ;
37
+ let ( blocks, leftover) = ( size / 4 , size % 4 ) ;
38
+ for i in 0 ..blocks {
39
+ ( ptr as * mut u32 ) . add ( i) . write ( UNINIT ) ;
40
+ }
41
+ let leftover_ptr = ptr. add ( blocks * 4 ) ;
42
+ debug_assert_eq ! ( leftover_ptr. wrapping_add( leftover) , ptr. add( size) ) ;
43
+ for i in 0 ..leftover {
44
+ leftover_ptr. add ( i) . write ( 0xF0 ) ;
45
+ }
46
+ }
47
+ pub unsafe fn assert_padded ( ptr : * mut u8 , size : usize ) {
48
+ assert ! ( super :: DEBUG_INTERNAL_ALLOCATOR ) ;
49
+ let start = ptr. sub ( PADDING_BYTES ) ;
50
+ let end = ptr. add ( size) ;
51
+ let start_padding = std:: slice:: from_raw_parts (
52
+ start as * const u8 as * const u32 ,
53
+ PADDING_TIMES
54
+ ) ;
55
+ let region = std:: slice:: from_raw_parts (
56
+ ptr as * const u8 ,
57
+ size
58
+ ) ;
59
+ let end_padding = std:: slice:: from_raw_parts (
60
+ end as * const u8 as * const u32 ,
61
+ PADDING_TIMES
62
+ ) ;
63
+ let print_memory_region = || {
64
+ use std:: fmt:: Write ;
65
+ let mut res = String :: new ( ) ;
66
+ for & val in start_padding {
67
+ write ! ( & mut res, "{:X}" , val) . unwrap ( ) ;
68
+ }
69
+ res. push_str ( "||" ) ;
70
+ for & b in region {
71
+ write ! ( & mut res, "{:X}" , b) . unwrap ( ) ;
72
+ }
73
+ res. push_str ( "||" ) ;
74
+ for & val in end_padding {
75
+ write ! ( & mut res, "{:X}" , val) . unwrap ( ) ;
76
+ }
77
+ res
78
+ } ;
79
+ // Closest to farthest
80
+ for ( idx, & block) in start_padding. iter ( ) . rev ( ) . enumerate ( ) {
81
+ if block == PADDING { continue }
82
+ assert_eq ! (
83
+ block, PADDING ,
84
+ "Unexpected start padding (offset -{}) w/ {}" ,
85
+ idx * 4 ,
86
+ print_memory_region( )
87
+ ) ;
88
+ }
89
+ for ( idx, & block) in end_padding. iter ( ) . enumerate ( ) {
90
+ if block == PADDING { continue }
91
+ assert_eq ! (
92
+ block, PADDING ,
93
+ "Unexpected end padding (offset {}) w/ {}" ,
94
+ idx * 4 ,
95
+ print_memory_region( )
96
+ )
97
+ }
98
+ }
99
+ }
18
100
/// The minimum size of supported memory (in words)
19
101
///
20
102
/// Since the header takes at least one word,
@@ -73,10 +155,6 @@ impl Chunk {
73
155
}
74
156
}
75
157
#[ inline]
76
- fn current ( & self ) -> * mut u8 {
77
- self . current . load ( )
78
- }
79
- #[ inline]
80
158
fn capacity ( & self ) -> usize {
81
159
self . end as usize - self . start as usize
82
160
}
@@ -200,7 +278,11 @@ pub(crate) struct FreeList {
200
278
next : AtomicCell < Option < NonNull < FreeSlot > > >
201
279
}
202
280
impl FreeList {
203
- pub ( crate ) unsafe fn add_free ( & self , free : * mut UnknownHeader ) {
281
+ unsafe fn add_free ( & self , free : * mut UnknownHeader , size : usize ) {
282
+ if DEBUG_INTERNAL_ALLOCATOR {
283
+ debug:: assert_padded ( free as * mut u8 , size) ;
284
+ debug:: mark_memory_uninit ( free as * mut u8 , size) ;
285
+ }
204
286
let new_slot = free as * mut FreeSlot ;
205
287
let mut next = self . next . load ( ) ;
206
288
loop {
@@ -214,14 +296,6 @@ impl FreeList {
214
296
}
215
297
}
216
298
#[ inline]
217
- pub ( crate ) fn next_free ( & self ) -> Option < NonNull < FreeSlot > > {
218
- self . next . load ( )
219
- }
220
- #[ inline]
221
- pub ( crate ) unsafe fn set_next_free ( & self , next : Option < NonNull < FreeSlot > > ) {
222
- self . next . store ( next)
223
- }
224
- #[ inline]
225
299
fn take_free ( & self ) -> Option < NonNull < u8 > > {
226
300
loop {
227
301
let next_free = match self . next . load ( ) {
@@ -248,7 +322,7 @@ pub struct SmallArena {
248
322
249
323
impl SmallArena {
250
324
pub ( crate ) unsafe fn add_free ( & self , obj : * mut UnknownHeader ) {
251
- self . free . add_free ( obj)
325
+ self . free . add_free ( obj, self . element_size )
252
326
}
253
327
#[ cold] // Initialization is the slow path
254
328
fn with_words ( num_words : usize ) -> SmallArena {
@@ -266,6 +340,15 @@ impl SmallArena {
266
340
// Check the free list
267
341
if let Some ( free) = self . free . take_free ( ) {
268
342
free. cast ( )
343
+ } else if DEBUG_INTERNAL_ALLOCATOR {
344
+ let mem = self . state . alloc ( self . element_size + debug:: PADDING_BYTES * 2 )
345
+ . as_ptr ( ) as * mut u8 ;
346
+ unsafe {
347
+ let mem = mem. add ( debug:: PADDING_BYTES ) ;
348
+ debug:: pad_memory_block ( mem, self . element_size ) ;
349
+ debug:: mark_memory_uninit ( mem, self . element_size ) ;
350
+ NonNull :: new_unchecked ( mem) . cast ( )
351
+ }
269
352
} else {
270
353
self . state . alloc ( self . element_size )
271
354
}
@@ -314,9 +397,6 @@ impl SmallArenaList {
314
397
}
315
398
}
316
399
}
317
- pub fn iter ( & self ) -> impl Iterator < Item =& SmallArena > + ' _ {
318
- self . arenas . iter ( ) . filter_map ( OnceCell :: get)
319
- }
320
400
#[ inline] // This should hopefully be constant folded away (layout is const)
321
401
pub fn find ( & self , layout : Layout ) -> Option < & SmallArena > {
322
402
if !fits_small_object ( layout) {
0 commit comments