@@ -481,16 +481,14 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
481
481
// the *value* (including the associated provenance if this is an AtomicPtr) at this location.
482
482
// Only metadata on the location itself is used.
483
483
let scalar = this. allow_data_races_ref ( move |this| this. read_scalar ( & place. into ( ) ) ) ?;
484
- if atomic == AtomicReadOp :: SeqCst {
485
- if let Some ( global) = & this. memory . extra . data_race {
486
- global. sc_read ( ) ;
487
- }
488
- }
489
484
490
485
if let Some ( global) = & this. memory . extra . data_race {
491
486
let ( alloc_id, base_offset, ..) = this. memory . ptr_get_alloc ( place. ptr ) ?;
492
487
if let Some ( alloc_buffers) = this. memory . get_alloc_extra ( alloc_id) ?. weak_memory . as_ref ( )
493
488
{
489
+ if atomic == AtomicReadOp :: SeqCst {
490
+ global. sc_read ( ) ;
491
+ }
494
492
let mut rng = this. memory . extra . rng . borrow_mut ( ) ;
495
493
let loaded = alloc_buffers. buffered_read (
496
494
alloc_range ( base_offset, place. layout . size ) ,
@@ -518,19 +516,16 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
518
516
let this = self . eval_context_mut ( ) ;
519
517
this. allow_data_races_mut ( move |this| this. write_scalar ( val, & ( * dest) . into ( ) ) ) ?;
520
518
521
- if atomic == AtomicWriteOp :: SeqCst {
522
- if let Some ( global) = this. memory . extra . data_race . as_ref ( ) {
523
- global. sc_write ( ) ;
524
- }
525
- }
526
-
527
519
this. validate_atomic_store ( dest, atomic) ?;
528
520
let ( alloc_id, base_offset, ..) = this. memory . ptr_get_alloc ( dest. ptr ) ?;
529
521
if let (
530
522
crate :: AllocExtra { weak_memory : Some ( alloc_buffers) , .. } ,
531
523
crate :: MemoryExtra { data_race : Some ( global) , .. } ,
532
524
) = this. memory . get_alloc_extra_mut ( alloc_id) ?
533
525
{
526
+ if atomic == AtomicWriteOp :: SeqCst {
527
+ global. sc_write ( ) ;
528
+ }
534
529
let size = dest. layout . size ;
535
530
alloc_buffers. buffered_write (
536
531
val,
@@ -561,12 +556,6 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
561
556
let val = if neg { this. unary_op ( mir:: UnOp :: Not , & val) ? } else { val } ;
562
557
this. allow_data_races_mut ( |this| this. write_immediate ( * val, & ( * place) . into ( ) ) ) ?;
563
558
564
- if atomic == AtomicRwOp :: SeqCst {
565
- if let Some ( global) = & this. memory . extra . data_race {
566
- global. sc_read ( ) ;
567
- global. sc_write ( ) ;
568
- }
569
- }
570
559
this. validate_atomic_rmw ( place, atomic) ?;
571
560
572
561
this. buffered_atomic_rmw ( val. to_scalar_or_uninit ( ) , place, atomic) ?;
@@ -585,12 +574,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
585
574
586
575
let old = this. allow_data_races_mut ( |this| this. read_scalar ( & place. into ( ) ) ) ?;
587
576
this. allow_data_races_mut ( |this| this. write_scalar ( new, & ( * place) . into ( ) ) ) ?;
588
- if atomic == AtomicRwOp :: SeqCst {
589
- if let Some ( global) = & this. memory . extra . data_race {
590
- global. sc_read ( ) ;
591
- global. sc_write ( ) ;
592
- }
593
- }
577
+
594
578
this. validate_atomic_rmw ( place, atomic) ?;
595
579
596
580
this. buffered_atomic_rmw ( new, place, atomic) ?;
@@ -619,12 +603,6 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
619
603
620
604
this. allow_data_races_mut ( |this| this. write_immediate ( * * new_val, & ( * place) . into ( ) ) ) ?;
621
605
622
- if atomic == AtomicRwOp :: SeqCst {
623
- if let Some ( global) = & this. memory . extra . data_race {
624
- global. sc_read ( ) ;
625
- global. sc_write ( ) ;
626
- }
627
- }
628
606
this. validate_atomic_rmw ( & place, atomic) ?;
629
607
630
608
this. buffered_atomic_rmw ( new_val. to_scalar_or_uninit ( ) , place, atomic) ?;
@@ -673,27 +651,18 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
673
651
// otherwise treat this as an atomic load with the fail ordering.
674
652
if cmpxchg_success {
675
653
this. allow_data_races_mut ( |this| this. write_scalar ( new, & ( * place) . into ( ) ) ) ?;
676
- if success == AtomicRwOp :: SeqCst {
677
- if let Some ( global) = & this. memory . extra . data_race {
678
- global. sc_read ( ) ;
679
- global. sc_write ( ) ;
680
- }
681
- }
682
654
this. validate_atomic_rmw ( place, success) ?;
683
655
this. buffered_atomic_rmw ( new, place, success) ?;
684
656
} else {
685
- if fail == AtomicReadOp :: SeqCst {
686
- if let Some ( global) = & this. memory . extra . data_race {
687
- global. sc_read ( ) ;
688
- }
689
- }
690
-
691
657
this. validate_atomic_load ( place, fail) ?;
692
658
// A failed compare exchange is equivalent to a load, reading from the latest store
693
659
// in the modification order.
694
660
// Since `old` is only a value and not the store element, we need to separately
695
661
// find it in our store buffer and perform load_impl on it.
696
662
if let Some ( global) = & this. memory . extra . data_race {
663
+ if fail == AtomicReadOp :: SeqCst {
664
+ global. sc_read ( ) ;
665
+ }
697
666
let size = place. layout . size ;
698
667
let ( alloc_id, base_offset, ..) = this. memory . ptr_get_alloc ( place. ptr ) ?;
699
668
if let Some ( alloc_buffers) =
@@ -723,6 +692,10 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
723
692
crate :: MemoryExtra { data_race : Some ( global) , .. } ,
724
693
) = this. memory . get_alloc_extra_mut ( alloc_id) ?
725
694
{
695
+ if atomic == AtomicRwOp :: SeqCst {
696
+ global. sc_read ( ) ;
697
+ global. sc_write ( ) ;
698
+ }
726
699
let size = place. layout . size ;
727
700
let range = alloc_range ( base_offset, size) ;
728
701
alloc_buffers. read_from_last_store ( range, global) ;
0 commit comments