@@ -101,6 +101,8 @@ pub struct Allocation<Prov: Provenance = CtfeProvenance, Extra = (), Bytes = Box
101
101
/// at the given offset.
102
102
provenance : ProvenanceMap < Prov > ,
103
103
/// Denotes which part of this allocation is initialized.
104
+ ///
105
+ /// Invariant: the uninitialized parts are 0 (in `bytes`) and have no provenance.
104
106
init_mask : InitMask ,
105
107
/// The alignment of the allocation to detect unaligned reads.
106
108
/// (`Align` guarantees that this is a power of two.)
@@ -792,28 +794,25 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
792
794
/// Write "uninit" to the given memory range.
793
795
pub fn write_uninit ( & mut self , cx : & impl HasDataLayout , range : AllocRange ) -> AllocResult {
794
796
self . mark_init ( range, false ) ;
797
+ // Restore the invariant that the uninit parts are 0 and have no provenance.
798
+ self . bytes [ range. start . bytes_usize ( ) ..range. end ( ) . bytes_usize ( ) ] . fill ( 0 ) ;
795
799
self . provenance . clear ( range, cx) ?;
796
800
Ok ( ( ) )
797
801
}
798
802
799
- /// Initialize all previously uninitialized bytes in the entire allocation, and set
800
- /// provenance of everything to `Wildcard`. Before calling this, make sure all
801
- /// provenance in this allocation is exposed!
802
- pub fn prepare_for_native_access ( & mut self ) {
803
- let full_range = AllocRange { start : Size :: ZERO , size : Size :: from_bytes ( self . len ( ) ) } ;
804
- // Overwrite uninitialized bytes with 0, to ensure we don't leak whatever their value happens to be.
805
- for chunk in self . init_mask . range_as_init_chunks ( full_range) {
806
- if !chunk. is_init ( ) {
807
- let uninit_bytes = & mut self . bytes
808
- [ chunk. range ( ) . start . bytes_usize ( ) ..chunk. range ( ) . end . bytes_usize ( ) ] ;
809
- uninit_bytes. fill ( 0 ) ;
810
- }
811
- }
812
- // Mark everything as initialized now.
813
- self . mark_init ( full_range, true ) ;
814
-
815
- // Set provenance of all bytes to wildcard.
816
- self . provenance . write_wildcards ( self . len ( ) ) ;
803
+ /// Mark all bytes in the given range as initialised and reset the provenance
804
+ /// to wildcards. This entirely breaks the normal mechanisms for tracking
805
+ /// initialisation and is only provided for Miri operating in native-lib
806
+ /// mode. UB will be missed if the underlying bytes were not actually written to.
807
+ ///
808
+ /// If `range` is `None`, defaults to performing this on the whole allocation.
809
+ pub fn process_native_write ( & mut self , cx : & impl HasDataLayout , range : Option < AllocRange > ) {
810
+ let range = range. unwrap_or_else ( || AllocRange {
811
+ start : Size :: ZERO ,
812
+ size : Size :: from_bytes ( self . len ( ) ) ,
813
+ } ) ;
814
+ self . mark_init ( range, true ) ;
815
+ self . provenance . write_wildcards ( cx, range) ;
817
816
}
818
817
819
818
/// Remove all provenance in the given memory range.
0 commit comments