Skip to content

Commit 567249d

Browse files
committed
Auto merge of #104591 - Manishearth:rollup-b3ser4e, r=Manishearth
Rollup of 8 pull requests Successful merges: - #102977 (remove HRTB from `[T]::is_sorted_by{,_key}`) - #103378 (Fix mod_inv termination for the last iteration) - #103456 (`unchecked_{shl|shr}` should use `u32` as the RHS) - #103701 (Simplify some pointer method implementations) - #104047 (Diagnostics `icu4x` based list formatting.) - #104338 (Enforce that `dyn*` coercions are actually pointer-sized) - #104498 (Edit docs for `rustc_errors::Handler::stash_diagnostic`) - #104556 (rustdoc: use `code-header` class to format enum variants) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
2 parents db5d177 + 6c314a2 commit 567249d

File tree

10 files changed

+96
-87
lines changed

10 files changed

+96
-87
lines changed

core/src/lib.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@
131131
#![feature(const_pin)]
132132
#![feature(const_ptr_sub_ptr)]
133133
#![feature(const_replace)]
134+
#![feature(const_result_drop)]
134135
#![feature(const_ptr_as_ref)]
135136
#![feature(const_ptr_is_null)]
136137
#![feature(const_ptr_read)]
@@ -155,6 +156,7 @@
155156
#![feature(maybe_uninit_uninit_array)]
156157
#![feature(ptr_alignment_type)]
157158
#![feature(ptr_metadata)]
159+
#![feature(set_ptr_value)]
158160
#![feature(slice_ptr_get)]
159161
#![feature(slice_split_at_unchecked)]
160162
#![feature(str_internals)]

core/src/marker.rs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -809,6 +809,15 @@ pub trait Destruct {}
809809
#[cfg_attr(not(bootstrap), rustc_deny_explicit_impl)]
810810
pub trait Tuple {}
811811

812+
/// A marker for things
813+
#[unstable(feature = "pointer_sized_trait", issue = "none")]
814+
#[cfg_attr(not(bootstrap), lang = "pointer_sized")]
815+
#[rustc_on_unimplemented(
816+
message = "`{Self}` needs to be a pointer-sized type",
817+
label = "`{Self}` needs to be a pointer-sized type"
818+
)]
819+
pub trait PointerSized {}
820+
812821
/// Implementations of `Copy` for primitive types.
813822
///
814823
/// Implementations that cannot be described in Rust

core/src/num/int_macros.rs

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -761,10 +761,11 @@ macro_rules! int_impl {
761761
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
762762
#[inline(always)]
763763
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
764-
pub const unsafe fn unchecked_shl(self, rhs: Self) -> Self {
764+
pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self {
765765
// SAFETY: the caller must uphold the safety contract for
766766
// `unchecked_shl`.
767-
unsafe { intrinsics::unchecked_shl(self, rhs) }
767+
// Any legal shift amount is losslessly representable in the self type.
768+
unsafe { intrinsics::unchecked_shl(self, rhs.try_into().ok().unwrap_unchecked()) }
768769
}
769770

770771
/// Checked shift right. Computes `self >> rhs`, returning `None` if `rhs` is
@@ -808,10 +809,11 @@ macro_rules! int_impl {
808809
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
809810
#[inline(always)]
810811
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
811-
pub const unsafe fn unchecked_shr(self, rhs: Self) -> Self {
812+
pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self {
812813
// SAFETY: the caller must uphold the safety contract for
813814
// `unchecked_shr`.
814-
unsafe { intrinsics::unchecked_shr(self, rhs) }
815+
// Any legal shift amount is losslessly representable in the self type.
816+
unsafe { intrinsics::unchecked_shr(self, rhs.try_into().ok().unwrap_unchecked()) }
815817
}
816818

817819
/// Checked absolute value. Computes `self.abs()`, returning `None` if
@@ -1358,11 +1360,12 @@ macro_rules! int_impl {
13581360
#[must_use = "this returns the result of the operation, \
13591361
without modifying the original"]
13601362
#[inline(always)]
1363+
#[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)]
13611364
pub const fn wrapping_shl(self, rhs: u32) -> Self {
13621365
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
13631366
// out of bounds
13641367
unsafe {
1365-
intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT)
1368+
self.unchecked_shl(rhs & ($BITS - 1))
13661369
}
13671370
}
13681371

@@ -1387,11 +1390,12 @@ macro_rules! int_impl {
13871390
#[must_use = "this returns the result of the operation, \
13881391
without modifying the original"]
13891392
#[inline(always)]
1393+
#[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)]
13901394
pub const fn wrapping_shr(self, rhs: u32) -> Self {
13911395
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
13921396
// out of bounds
13931397
unsafe {
1394-
intrinsics::unchecked_shr(self, (rhs & ($BITS - 1)) as $SelfT)
1398+
self.unchecked_shr(rhs & ($BITS - 1))
13951399
}
13961400
}
13971401

core/src/num/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
#![stable(feature = "rust1", since = "1.0.0")]
44

55
use crate::ascii;
6+
use crate::convert::TryInto;
67
use crate::error::Error;
78
use crate::intrinsics;
89
use crate::mem;

core/src/num/uint_macros.rs

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -908,10 +908,11 @@ macro_rules! uint_impl {
908908
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
909909
#[inline(always)]
910910
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
911-
pub const unsafe fn unchecked_shl(self, rhs: Self) -> Self {
911+
pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self {
912912
// SAFETY: the caller must uphold the safety contract for
913913
// `unchecked_shl`.
914-
unsafe { intrinsics::unchecked_shl(self, rhs) }
914+
// Any legal shift amount is losslessly representable in the self type.
915+
unsafe { intrinsics::unchecked_shl(self, rhs.try_into().ok().unwrap_unchecked()) }
915916
}
916917

917918
/// Checked shift right. Computes `self >> rhs`, returning `None`
@@ -955,10 +956,11 @@ macro_rules! uint_impl {
955956
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
956957
#[inline(always)]
957958
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
958-
pub const unsafe fn unchecked_shr(self, rhs: Self) -> Self {
959+
pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self {
959960
// SAFETY: the caller must uphold the safety contract for
960961
// `unchecked_shr`.
961-
unsafe { intrinsics::unchecked_shr(self, rhs) }
962+
// Any legal shift amount is losslessly representable in the self type.
963+
unsafe { intrinsics::unchecked_shr(self, rhs.try_into().ok().unwrap_unchecked()) }
962964
}
963965

964966
/// Checked exponentiation. Computes `self.pow(exp)`, returning `None` if
@@ -1374,11 +1376,12 @@ macro_rules! uint_impl {
13741376
#[must_use = "this returns the result of the operation, \
13751377
without modifying the original"]
13761378
#[inline(always)]
1379+
#[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)]
13771380
pub const fn wrapping_shl(self, rhs: u32) -> Self {
13781381
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
13791382
// out of bounds
13801383
unsafe {
1381-
intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT)
1384+
self.unchecked_shl(rhs & ($BITS - 1))
13821385
}
13831386
}
13841387

@@ -1406,11 +1409,12 @@ macro_rules! uint_impl {
14061409
#[must_use = "this returns the result of the operation, \
14071410
without modifying the original"]
14081411
#[inline(always)]
1412+
#[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)]
14091413
pub const fn wrapping_shr(self, rhs: u32) -> Self {
14101414
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
14111415
// out of bounds
14121416
unsafe {
1413-
intrinsics::unchecked_shr(self, (rhs & ($BITS - 1)) as $SelfT)
1417+
self.unchecked_shr(rhs & ($BITS - 1))
14141418
}
14151419
}
14161420

core/src/ptr/const_ptr.rs

Lines changed: 10 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -79,19 +79,14 @@ impl<T: ?Sized> *const T {
7979
/// }
8080
/// ```
8181
#[unstable(feature = "set_ptr_value", issue = "75091")]
82+
#[rustc_const_unstable(feature = "set_ptr_value", issue = "75091")]
8283
#[must_use = "returns a new pointer rather than modifying its argument"]
8384
#[inline]
84-
pub fn with_metadata_of<U>(self, mut val: *const U) -> *const U
85+
pub const fn with_metadata_of<U>(self, meta: *const U) -> *const U
8586
where
8687
U: ?Sized,
8788
{
88-
let target = &mut val as *mut *const U as *mut *const u8;
89-
// SAFETY: In case of a thin pointer, this operations is identical
90-
// to a simple assignment. In case of a fat pointer, with the current
91-
// fat pointer layout implementation, the first field of such a
92-
// pointer is always the data pointer, which is likewise assigned.
93-
unsafe { *target = self as *const u8 };
94-
val
89+
from_raw_parts::<U>(self as *const (), metadata(meta))
9590
}
9691

9792
/// Changes constness without changing the type.
@@ -478,8 +473,7 @@ impl<T: ?Sized> *const T {
478473
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
479474
pub const unsafe fn byte_offset(self, count: isize) -> Self {
480475
// SAFETY: the caller must uphold the safety contract for `offset`.
481-
let this = unsafe { self.cast::<u8>().offset(count).cast::<()>() };
482-
from_raw_parts::<T>(this, metadata(self))
476+
unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
483477
}
484478

485479
/// Calculates the offset from a pointer using wrapping arithmetic.
@@ -559,7 +553,7 @@ impl<T: ?Sized> *const T {
559553
#[unstable(feature = "pointer_byte_offsets", issue = "96283")]
560554
#[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
561555
pub const fn wrapping_byte_offset(self, count: isize) -> Self {
562-
from_raw_parts::<T>(self.cast::<u8>().wrapping_offset(count).cast::<()>(), metadata(self))
556+
self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
563557
}
564558

565559
/// Masks out bits of the pointer according to a mask.
@@ -597,8 +591,7 @@ impl<T: ?Sized> *const T {
597591
#[must_use = "returns a new pointer rather than modifying its argument"]
598592
#[inline(always)]
599593
pub fn mask(self, mask: usize) -> *const T {
600-
let this = intrinsics::ptr_mask(self.cast::<()>(), mask);
601-
from_raw_parts::<T>(this, metadata(self))
594+
intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self)
602595
}
603596

604597
/// Calculates the distance between two pointers. The returned value is in
@@ -939,8 +932,7 @@ impl<T: ?Sized> *const T {
939932
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
940933
pub const unsafe fn byte_add(self, count: usize) -> Self {
941934
// SAFETY: the caller must uphold the safety contract for `add`.
942-
let this = unsafe { self.cast::<u8>().add(count).cast::<()>() };
943-
from_raw_parts::<T>(this, metadata(self))
935+
unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
944936
}
945937

946938
/// Calculates the offset from a pointer (convenience for
@@ -1026,8 +1018,7 @@ impl<T: ?Sized> *const T {
10261018
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
10271019
pub const unsafe fn byte_sub(self, count: usize) -> Self {
10281020
// SAFETY: the caller must uphold the safety contract for `sub`.
1029-
let this = unsafe { self.cast::<u8>().sub(count).cast::<()>() };
1030-
from_raw_parts::<T>(this, metadata(self))
1021+
unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
10311022
}
10321023

10331024
/// Calculates the offset from a pointer using wrapping arithmetic.
@@ -1107,7 +1098,7 @@ impl<T: ?Sized> *const T {
11071098
#[unstable(feature = "pointer_byte_offsets", issue = "96283")]
11081099
#[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
11091100
pub const fn wrapping_byte_add(self, count: usize) -> Self {
1110-
from_raw_parts::<T>(self.cast::<u8>().wrapping_add(count).cast::<()>(), metadata(self))
1101+
self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
11111102
}
11121103

11131104
/// Calculates the offset from a pointer using wrapping arithmetic.
@@ -1187,7 +1178,7 @@ impl<T: ?Sized> *const T {
11871178
#[unstable(feature = "pointer_byte_offsets", issue = "96283")]
11881179
#[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
11891180
pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1190-
from_raw_parts::<T>(self.cast::<u8>().wrapping_sub(count).cast::<()>(), metadata(self))
1181+
self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
11911182
}
11921183

11931184
/// Reads the value from `self` without moving it. This leaves the

core/src/ptr/mod.rs

Lines changed: 28 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1591,8 +1591,8 @@ pub(crate) unsafe fn align_offset<T: Sized>(p: *const T, a: usize) -> usize {
15911591
// FIXME(#75598): Direct use of these intrinsics improves codegen significantly at opt-level <=
15921592
// 1, where the method versions of these operations are not inlined.
15931593
use intrinsics::{
1594-
cttz_nonzero, exact_div, unchecked_rem, unchecked_shl, unchecked_shr, unchecked_sub,
1595-
wrapping_add, wrapping_mul, wrapping_sub,
1594+
cttz_nonzero, exact_div, mul_with_overflow, unchecked_rem, unchecked_shl, unchecked_shr,
1595+
unchecked_sub, wrapping_add, wrapping_mul, wrapping_sub,
15961596
};
15971597

15981598
/// Calculate multiplicative modular inverse of `x` modulo `m`.
@@ -1612,36 +1612,38 @@ pub(crate) unsafe fn align_offset<T: Sized>(p: *const T, a: usize) -> usize {
16121612
const INV_TABLE_MOD_16: [u8; 8] = [1, 11, 13, 7, 9, 3, 5, 15];
16131613
/// Modulo for which the `INV_TABLE_MOD_16` is intended.
16141614
const INV_TABLE_MOD: usize = 16;
1615-
/// INV_TABLE_MOD²
1616-
const INV_TABLE_MOD_SQUARED: usize = INV_TABLE_MOD * INV_TABLE_MOD;
16171615

1618-
let table_inverse = INV_TABLE_MOD_16[(x & (INV_TABLE_MOD - 1)) >> 1] as usize;
16191616
// SAFETY: `m` is required to be a power-of-two, hence non-zero.
16201617
let m_minus_one = unsafe { unchecked_sub(m, 1) };
1621-
if m <= INV_TABLE_MOD {
1622-
table_inverse & m_minus_one
1623-
} else {
1624-
// We iterate "up" using the following formula:
1625-
//
1626-
// $$ xy ≡ 1 (mod 2ⁿ) → xy (2 - xy) ≡ 1 (mod 2²ⁿ) $$
1618+
let mut inverse = INV_TABLE_MOD_16[(x & (INV_TABLE_MOD - 1)) >> 1] as usize;
1619+
let mut mod_gate = INV_TABLE_MOD;
1620+
// We iterate "up" using the following formula:
1621+
//
1622+
// $$ xy ≡ 1 (mod 2ⁿ) → xy (2 - xy) ≡ 1 (mod 2²ⁿ) $$
1623+
//
1624+
// This application needs to be applied at least until `2²ⁿ ≥ m`, at which point we can
1625+
// finally reduce the computation to our desired `m` by taking `inverse mod m`.
1626+
//
1627+
// This computation is `O(log log m)`, which is to say, that on 64-bit machines this loop
1628+
// will always finish in at most 4 iterations.
1629+
loop {
1630+
// y = y * (2 - xy) mod n
16271631
//
1628-
// until 2²ⁿ ≥ m. Then we can reduce to our desired `m` by taking the result `mod m`.
1629-
let mut inverse = table_inverse;
1630-
let mut going_mod = INV_TABLE_MOD_SQUARED;
1631-
loop {
1632-
// y = y * (2 - xy) mod n
1633-
//
1634-
// Note, that we use wrapping operations here intentionally – the original formula
1635-
// uses e.g., subtraction `mod n`. It is entirely fine to do them `mod
1636-
// usize::MAX` instead, because we take the result `mod n` at the end
1637-
// anyway.
1638-
inverse = wrapping_mul(inverse, wrapping_sub(2usize, wrapping_mul(x, inverse)));
1639-
if going_mod >= m {
1640-
return inverse & m_minus_one;
1641-
}
1642-
going_mod = wrapping_mul(going_mod, going_mod);
1632+
// Note, that we use wrapping operations here intentionally – the original formula
1633+
// uses e.g., subtraction `mod n`. It is entirely fine to do them `mod
1634+
// usize::MAX` instead, because we take the result `mod n` at the end
1635+
// anyway.
1636+
if mod_gate >= m {
1637+
break;
1638+
}
1639+
inverse = wrapping_mul(inverse, wrapping_sub(2usize, wrapping_mul(x, inverse)));
1640+
let (new_gate, overflow) = mul_with_overflow(mod_gate, mod_gate);
1641+
if overflow {
1642+
break;
16431643
}
1644+
mod_gate = new_gate;
16441645
}
1646+
inverse & m_minus_one
16451647
}
16461648

16471649
let addr = p.addr();

0 commit comments

Comments
 (0)