@@ -90,6 +90,14 @@ pub enum RefKind {
90
90
Raw ,
91
91
}
92
92
93
+ /// What kind of access is being performed?
94
+ #[ derive( Copy , Clone , Debug , Hash , PartialEq , Eq ) ]
95
+ pub enum AccessKind {
96
+ Read ,
97
+ Write ,
98
+ Dealloc ,
99
+ }
100
+
93
101
/// Extra global state in the memory, available to the memory access hooks
94
102
#[ derive( Debug ) ]
95
103
pub struct BarrierTracking {
@@ -221,13 +229,13 @@ impl<'tcx> Stack {
221
229
fn access (
222
230
& mut self ,
223
231
bor : Borrow ,
224
- is_write : bool ,
232
+ kind : AccessKind ,
225
233
barrier_tracking : & BarrierTracking ,
226
234
) -> EvalResult < ' tcx > {
227
235
// Check if we can match the frozen "item".
228
236
// Not possible on writes!
229
237
if self . is_frozen ( ) {
230
- if !is_write {
238
+ if kind == AccessKind :: Read {
231
239
// When we are frozen, we just accept all reads. No harm in this.
232
240
// The deref already checked that `Uniq` items are in the stack, and that
233
241
// the location is frozen if it should be.
@@ -247,26 +255,41 @@ impl<'tcx> Stack {
247
255
) ) )
248
256
}
249
257
( BorStackItem :: Uniq ( itm_t) , Borrow :: Uniq ( bor_t) ) if itm_t == bor_t => {
250
- // Found matching unique item.
251
- return Ok ( ( ) )
258
+ // Found matching unique item. Continue after the match.
252
259
}
253
- ( BorStackItem :: Shr , _) if !is_write => {
260
+ ( BorStackItem :: Shr , _) if kind == AccessKind :: Read => {
254
261
// When reading, everything can use a shared item!
255
262
// We do not want to do this when writing: Writing to an `&mut`
256
263
// should reaffirm its exclusivity (i.e., make sure it is
257
- // on top of the stack).
258
- return Ok ( ( ) )
264
+ // on top of the stack). Continue after the match.
259
265
}
260
266
( BorStackItem :: Shr , Borrow :: Shr ( _) ) => {
261
- // Found matching shared item.
262
- return Ok ( ( ) )
267
+ // Found matching shared item. Continue after the match.
263
268
}
264
269
_ => {
265
- // Pop this. This ensures U2.
270
+ // Pop this, go on . This ensures U2.
266
271
let itm = self . borrows . pop ( ) . unwrap ( ) ;
267
272
trace ! ( "access: Popping {:?}" , itm) ;
273
+ continue
274
+ }
275
+ }
276
+ // If we got here, we found a matching item. Congratulations!
277
+ // However, we are not done yet: If this access is deallocating, we must make sure
278
+ // there are no active barriers remaining on the stack.
279
+ if kind == AccessKind :: Dealloc {
280
+ for & itm in self . borrows . iter ( ) . rev ( ) {
281
+ match itm {
282
+ BorStackItem :: FnBarrier ( call) if barrier_tracking. is_active ( call) => {
283
+ return err ! ( MachineError ( format!(
284
+ "Deallocating with active barrier ({})" , call
285
+ ) ) )
286
+ }
287
+ _ => { } ,
288
+ }
268
289
}
269
290
}
291
+ // NOW we are done.
292
+ return Ok ( ( ) )
270
293
}
271
294
// If we got here, we did not find our item.
272
295
err ! ( MachineError ( format!(
@@ -352,18 +375,16 @@ impl<'tcx> Stacks {
352
375
& self ,
353
376
ptr : Pointer < Borrow > ,
354
377
size : Size ,
355
- is_write : bool ,
356
- barrier_tracking : & BarrierTracking ,
378
+ kind : AccessKind ,
357
379
) -> EvalResult < ' tcx > {
358
- trace ! ( "{} access of tag {:?}: {:?}, size {}" ,
359
- if is_write { "read" } else { "write" } ,
360
- ptr. tag, ptr, size. bytes( ) ) ;
380
+ trace ! ( "{:?} access of tag {:?}: {:?}, size {}" , kind, ptr. tag, ptr, size. bytes( ) ) ;
361
381
// Even reads can have a side-effect, by invalidating other references.
362
382
// This is fundamentally necessary since `&mut` asserts that there
363
383
// are no accesses through other references, not even reads.
384
+ let barrier_tracking = self . barrier_tracking . borrow ( ) ;
364
385
let mut stacks = self . stacks . borrow_mut ( ) ;
365
386
for stack in stacks. iter_mut ( ptr. offset , size) {
366
- stack. access ( ptr. tag , is_write , barrier_tracking) ?;
387
+ stack. access ( ptr. tag , kind , & * barrier_tracking) ?;
367
388
}
368
389
Ok ( ( ) )
369
390
}
@@ -377,16 +398,24 @@ impl<'tcx> Stacks {
377
398
mut barrier : Option < CallId > ,
378
399
new_bor : Borrow ,
379
400
new_kind : RefKind ,
380
- barrier_tracking : & BarrierTracking ,
381
401
) -> EvalResult < ' tcx > {
382
402
assert_eq ! ( new_bor. is_unique( ) , new_kind == RefKind :: Unique ) ;
383
403
trace ! ( "reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}" ,
384
404
ptr. tag, new_bor, new_kind, ptr, size. bytes( ) ) ;
385
405
if new_kind == RefKind :: Raw {
386
406
// No barrier for raw, including `&UnsafeCell`. They can rightfully
387
407
// alias with `&mut`.
408
+ // FIXME: This means that the `dereferencable` attribute on non-frozen shared
409
+ // references is incorrect! They are dereferencable when the function is
410
+ // called, but might become non-dereferencable during the coruse of execution.
411
+ // Also see [1], [2].
412
+ //
413
+ // [1]: <https://internals.rust-lang.org/t/
414
+ // is-it-possible-to-be-memory-safe-with-deallocated-self/8457/8>,
415
+ // [2]: <https://lists.llvm.org/pipermail/llvm-dev/2018-July/124555.html>
388
416
barrier = None ;
389
417
}
418
+ let barrier_tracking = self . barrier_tracking . borrow ( ) ;
390
419
let mut stacks = self . stacks . borrow_mut ( ) ;
391
420
for stack in stacks. iter_mut ( ptr. offset , size) {
392
421
// Access source `ptr`, create new ref.
@@ -410,7 +439,12 @@ impl<'tcx> Stacks {
410
439
continue ;
411
440
}
412
441
// We need to do some actual work.
413
- stack. access ( ptr. tag , new_kind == RefKind :: Unique , barrier_tracking) ?;
442
+ let access_kind = if new_kind == RefKind :: Unique {
443
+ AccessKind :: Write
444
+ } else {
445
+ AccessKind :: Read
446
+ } ;
447
+ stack. access ( ptr. tag , access_kind, & * barrier_tracking) ?;
414
448
if let Some ( call) = barrier {
415
449
stack. barrier ( call) ;
416
450
}
@@ -440,7 +474,7 @@ impl AllocationExtra<Borrow, MemoryState> for Stacks {
440
474
ptr : Pointer < Borrow > ,
441
475
size : Size ,
442
476
) -> EvalResult < ' tcx > {
443
- alloc. extra . access ( ptr, size, /*is_write*/ false , & * alloc . extra . barrier_tracking . borrow ( ) )
477
+ alloc. extra . access ( ptr, size, AccessKind :: Read )
444
478
}
445
479
446
480
#[ inline( always) ]
@@ -449,7 +483,7 @@ impl AllocationExtra<Borrow, MemoryState> for Stacks {
449
483
ptr : Pointer < Borrow > ,
450
484
size : Size ,
451
485
) -> EvalResult < ' tcx > {
452
- alloc. extra . access ( ptr, size, /*is_write*/ true , & * alloc . extra . barrier_tracking . borrow ( ) )
486
+ alloc. extra . access ( ptr, size, AccessKind :: Write )
453
487
}
454
488
455
489
#[ inline( always) ]
@@ -458,9 +492,7 @@ impl AllocationExtra<Borrow, MemoryState> for Stacks {
458
492
ptr : Pointer < Borrow > ,
459
493
size : Size ,
460
494
) -> EvalResult < ' tcx > {
461
- // This is like mutating
462
- alloc. extra . access ( ptr, size, /*is_write*/ true , & * alloc. extra . barrier_tracking . borrow ( ) )
463
- // FIXME: Error out of there are any barriers?
495
+ alloc. extra . access ( ptr, size, AccessKind :: Dealloc )
464
496
}
465
497
}
466
498
@@ -627,12 +659,12 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> {
627
659
// Reference that cares about freezing. We need a frozen-sensitive reborrow.
628
660
self . visit_freeze_sensitive ( place, size, |cur_ptr, size, frozen| {
629
661
let kind = if frozen { RefKind :: Frozen } else { RefKind :: Raw } ;
630
- alloc. extra . reborrow ( cur_ptr, size, barrier, new_bor, kind, & * self . memory ( ) . extra . borrow ( ) )
662
+ alloc. extra . reborrow ( cur_ptr, size, barrier, new_bor, kind)
631
663
} ) ?;
632
664
} else {
633
665
// Just treat this as one big chunk.
634
666
let kind = if new_bor. is_unique ( ) { RefKind :: Unique } else { RefKind :: Raw } ;
635
- alloc. extra . reborrow ( ptr, size, barrier, new_bor, kind, & * self . memory ( ) . extra . borrow ( ) ) ?;
667
+ alloc. extra . reborrow ( ptr, size, barrier, new_bor, kind) ?;
636
668
}
637
669
Ok ( new_ptr)
638
670
}
0 commit comments