@@ -75,7 +75,7 @@ use rustc_target::abi::Size;
75
75
76
76
use crate :: {
77
77
ImmTy , Immediate , InterpResult , MPlaceTy , MemPlaceMeta , MiriEvalContext , MiriEvalContextExt ,
78
- OpTy , Pointer , RangeMap , ScalarMaybeUninit , Tag , ThreadId , VClock , VTimestamp ,
78
+ OpTy , Pointer , RangeMap , Scalar , ScalarMaybeUninit , Tag , ThreadId , VClock , VTimestamp ,
79
79
VectorIdx , MemoryKind , MiriMemoryKind
80
80
} ;
81
81
@@ -544,31 +544,42 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
544
544
545
545
/// Perform an atomic compare and exchange at a given memory location.
546
546
/// On success an atomic RMW operation is performed and on failure
547
- /// only an atomic read occurs.
547
+ /// only an atomic read occurs. If `can_fail_spuriously` is true,
548
+ /// then we treat it as a "compare_exchange_weak" operation, and
549
+ /// some portion of the time fail even when the values are actually
550
+ /// identical.
548
551
fn atomic_compare_exchange_scalar (
549
552
& mut self ,
550
553
place : MPlaceTy < ' tcx , Tag > ,
551
554
expect_old : ImmTy < ' tcx , Tag > ,
552
555
new : ScalarMaybeUninit < Tag > ,
553
556
success : AtomicRwOp ,
554
557
fail : AtomicReadOp ,
558
+ can_fail_spuriously : bool ,
555
559
) -> InterpResult < ' tcx , Immediate < Tag > > {
560
+ use rand:: Rng as _;
556
561
let this = self . eval_context_mut ( ) ;
557
562
558
563
// Failure ordering cannot be stronger than success ordering, therefore first attempt
559
564
// to read with the failure ordering and if successful then try again with the success
560
565
// read ordering and write in the success case.
561
566
// Read as immediate for the sake of `binary_op()`
562
567
let old = this. allow_data_races_mut ( |this| this. read_immediate ( place. into ( ) ) ) ?;
563
-
564
568
// `binary_op` will bail if either of them is not a scalar.
565
569
let eq = this. overflowing_binary_op ( mir:: BinOp :: Eq , old, expect_old) ?. 0 ;
566
- let res = Immediate :: ScalarPair ( old. to_scalar_or_uninit ( ) , eq. into ( ) ) ;
570
+ // If the operation would succeed, but is "weak", fail 50% of the time.
571
+ // FIXME: this is quite arbitrary.
572
+ let cmpxchg_success = eq. to_bool ( ) ?
573
+ && ( !can_fail_spuriously || this. memory . extra . rng . borrow_mut ( ) . gen_range ( 0 , 2 ) == 0 ) ;
574
+ let res = Immediate :: ScalarPair (
575
+ old. to_scalar_or_uninit ( ) ,
576
+ Scalar :: from_bool ( cmpxchg_success) . into ( ) ,
577
+ ) ;
567
578
568
579
// Update ptr depending on comparison.
569
580
// if successful, perform a full rw-atomic validation
570
581
// otherwise treat this as an atomic load with the fail ordering.
571
- if eq . to_bool ( ) ? {
582
+ if cmpxchg_success {
572
583
this. allow_data_races_mut ( |this| this. write_scalar ( new, place. into ( ) ) ) ?;
573
584
this. validate_atomic_rmw ( place, success) ?;
574
585
} else {
0 commit comments