Importing rustc-1.61.0
Test: ./build.py --lto=thin
Change-Id: I5c65158a0532cb09120797ae48d6b353492c1011
diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs
index b522839..0744e9c 100644
--- a/library/core/src/intrinsics.rs
+++ b/library/core/src/intrinsics.rs
@@ -54,7 +54,7 @@
)]
#![allow(missing_docs)]
-use crate::marker::DiscriminantKind;
+use crate::marker::{Destruct, DiscriminantKind};
use crate::mem;
// These imports are used for simplifying intra-doc links
@@ -1168,7 +1168,7 @@
///
/// The stabilized version of this intrinsic is [`pointer::offset`].
#[must_use = "returns a new pointer rather than modifying its argument"]
- #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
+ #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
pub fn offset<T>(dst: *const T, offset: isize) -> *const T;
/// Calculates the offset from a pointer, potentially wrapping.
@@ -1185,7 +1185,7 @@
///
/// The stabilized version of this intrinsic is [`pointer::wrapping_offset`].
#[must_use = "returns a new pointer rather than modifying its argument"]
- #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
+ #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
pub fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
/// Equivalent to the appropriate `llvm.memcpy.p0i8.0i8.*` intrinsic, with
@@ -1638,7 +1638,7 @@
/// let num_trailing = unsafe { cttz_nonzero(x) };
/// assert_eq!(num_trailing, 3);
/// ```
- #[rustc_const_stable(feature = "const_cttz", since = "1.53.0")]
+ #[rustc_const_stable(feature = "const_cttz_nonzero", since = "1.53.0")]
pub fn cttz_nonzero<T: Copy>(x: T) -> T;
/// Reverses the bytes in an integer type `T`.
@@ -1718,7 +1718,7 @@
/// Safe wrappers for this intrinsic are available on the integer
/// primitives via the `checked_div` method. For example,
/// [`u32::checked_div`]
- #[rustc_const_stable(feature = "const_int_unchecked_arith", since = "1.52.0")]
+ #[rustc_const_stable(feature = "const_int_unchecked_div", since = "1.52.0")]
pub fn unchecked_div<T: Copy>(x: T, y: T) -> T;
/// Returns the remainder of an unchecked division, resulting in
/// undefined behavior when `y == 0` or `x == T::MIN && y == -1`
@@ -1726,7 +1726,7 @@
/// Safe wrappers for this intrinsic are available on the integer
/// primitives via the `checked_rem` method. For example,
/// [`u32::checked_rem`]
- #[rustc_const_stable(feature = "const_int_unchecked_arith", since = "1.52.0")]
+ #[rustc_const_stable(feature = "const_int_unchecked_rem", since = "1.52.0")]
pub fn unchecked_rem<T: Copy>(x: T, y: T) -> T;
/// Performs an unchecked left shift, resulting in undefined behavior when
@@ -1936,7 +1936,6 @@
/// - If the `ptr` is created in an another const, this intrinsic doesn't deallocate it.
/// - If the `ptr` is pointing to a local variable, this intrinsic doesn't deallocate it.
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
- #[cfg(not(bootstrap))]
pub fn const_deallocate(ptr: *mut u8, size: usize, align: usize);
/// Determines whether the raw bytes of the two values are equal.
@@ -1970,18 +1969,51 @@
// (`transmute` also falls into this category, but it cannot be wrapped due to the
// check that `T` and `U` have the same size.)
+/// Check that the preconditions of an unsafe function are followed, if debug_assertions are on,
+/// and only at runtime.
+///
+/// # Safety
+///
+/// Invoking this macro is only sound if the following code is already UB when the passed
+/// expression evaluates to false.
+///
+/// This macro expands to a check at runtime if debug_assertions is set. It has no effect at
+/// compile time, but the semantics of the contained `const_eval_select` must be the same at
+/// runtime and at compile time. Thus if the expression evaluates to false, this macro produces
+/// different behavior at compile time and at runtime, and invoking it is incorrect.
+///
+/// So in a sense it is UB if this macro is useful, but we expect callers of `unsafe fn` to make
+/// the occasional mistake, and this check should help them figure things out.
+#[allow_internal_unstable(const_eval_select)] // permit this to be called in stably-const fn
+macro_rules! assert_unsafe_precondition {
+ ($e:expr) => {
+ if cfg!(debug_assertions) {
+ // Use a closure so that we can capture arbitrary expressions from the invocation
+ let runtime = || {
+ if !$e {
+ // abort instead of panicking to reduce impact on code size
+ ::core::intrinsics::abort();
+ }
+ };
+ const fn comptime() {}
+
+ ::core::intrinsics::const_eval_select((), comptime, runtime);
+ }
+ };
+}
+pub(crate) use assert_unsafe_precondition;
+
/// Checks whether `ptr` is properly aligned with respect to
/// `align_of::<T>()`.
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
- !ptr.is_null() && ptr as usize % mem::align_of::<T>() == 0
+ !ptr.is_null() && ptr.addr() % mem::align_of::<T>() == 0
}
/// Checks whether the regions of memory starting at `src` and `dst` of size
/// `count * size_of::<T>()` do *not* overlap.
-#[cfg(debug_assertions)]
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
- let src_usize = src as usize;
- let dst_usize = dst as usize;
+ let src_usize = src.addr();
+ let dst_usize = dst.addr();
let size = mem::size_of::<T>().checked_mul(count).unwrap();
let diff = if src_usize > dst_usize { src_usize - dst_usize } else { dst_usize - src_usize };
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
@@ -2080,28 +2112,16 @@
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
}
- #[cfg(debug_assertions)]
- fn runtime_check<T>(src: *const T, dst: *mut T, count: usize) {
- if !is_aligned_and_not_null(src)
- || !is_aligned_and_not_null(dst)
- || !is_nonoverlapping(src, dst, count)
- {
- // Not panicking to keep codegen impact smaller.
- abort();
- }
- }
- #[cfg(debug_assertions)]
- const fn compiletime_check<T>(_src: *const T, _dst: *mut T, _count: usize) {}
- #[cfg(debug_assertions)]
- // SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
- // Therefore, compiletime_check and runtime_check are observably equivalent.
- unsafe {
- const_eval_select((src, dst, count), compiletime_check, runtime_check);
- }
-
// SAFETY: the safety contract for `copy_nonoverlapping` must be
// upheld by the caller.
- unsafe { copy_nonoverlapping(src, dst, count) }
+ unsafe {
+ assert_unsafe_precondition!(
+ is_aligned_and_not_null(src)
+ && is_aligned_and_not_null(dst)
+ && is_nonoverlapping(src, dst, count)
+ );
+ copy_nonoverlapping(src, dst, count)
+ }
}
/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
@@ -2174,24 +2194,11 @@
fn copy<T>(src: *const T, dst: *mut T, count: usize);
}
- #[cfg(debug_assertions)]
- fn runtime_check<T>(src: *const T, dst: *mut T) {
- if !is_aligned_and_not_null(src) || !is_aligned_and_not_null(dst) {
- // Not panicking to keep codegen impact smaller.
- abort();
- }
- }
- #[cfg(debug_assertions)]
- const fn compiletime_check<T>(_src: *const T, _dst: *mut T) {}
- #[cfg(debug_assertions)]
- // SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
- // Therefore, compiletime_check and runtime_check are observably equivalent.
- unsafe {
- const_eval_select((src, dst), compiletime_check, runtime_check);
- }
-
// SAFETY: the safety contract for `copy` must be upheld by the caller.
- unsafe { copy(src, dst, count) }
+ unsafe {
+ assert_unsafe_precondition!(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst));
+ copy(src, dst, count)
+ }
}
/// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
@@ -2275,24 +2282,11 @@
fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
}
- #[cfg(debug_assertions)]
- fn runtime_check<T>(ptr: *mut T) {
- debug_assert!(
- is_aligned_and_not_null(ptr),
- "attempt to write to unaligned or null pointer"
- );
- }
- #[cfg(debug_assertions)]
- const fn compiletime_check<T>(_ptr: *mut T) {}
- #[cfg(debug_assertions)]
- // SAFETY: runtime debug-assertions are a best-effort basis; it's fine to
- // not do them during compile time
- unsafe {
- const_eval_select((dst,), compiletime_check, runtime_check);
- }
-
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
- unsafe { write_bytes(dst, val, count) }
+ unsafe {
+ assert_unsafe_precondition!(is_aligned_and_not_null(dst));
+ write_bytes(dst, val, count)
+ }
}
/// Selects which function to call depending on the context.
@@ -2354,6 +2348,7 @@
#[rustc_const_unstable(feature = "const_eval_select", issue = "none")]
#[lang = "const_eval_select"]
#[rustc_do_not_const_check]
+#[cfg_attr(not(bootstrap), allow(drop_bounds))] // FIXME remove `~const Drop` and this attr when bumping
pub const unsafe fn const_eval_select<ARG, F, G, RET>(
arg: ARG,
_called_in_const: F,
@@ -2361,7 +2356,7 @@
) -> RET
where
F: ~const FnOnce<ARG, Output = RET>,
- G: FnOnce<ARG, Output = RET> + ~const Drop,
+ G: FnOnce<ARG, Output = RET> + ~const Drop + ~const Destruct,
{
called_at_rt.call_once(arg)
}
@@ -2373,6 +2368,7 @@
)]
#[rustc_const_unstable(feature = "const_eval_select", issue = "none")]
#[lang = "const_eval_select_ct"]
+#[cfg_attr(not(bootstrap), allow(drop_bounds))] // FIXME remove `~const Drop` and this attr when bumping
pub const unsafe fn const_eval_select_ct<ARG, F, G, RET>(
arg: ARG,
called_in_const: F,
@@ -2380,7 +2376,7 @@
) -> RET
where
F: ~const FnOnce<ARG, Output = RET>,
- G: FnOnce<ARG, Output = RET> + ~const Drop,
+ G: FnOnce<ARG, Output = RET> + ~const Drop + ~const Destruct,
{
called_in_const.call_once(arg)
}