Commit 13f9aa19 authored by bors's avatar bors

Auto merge of #74664 - pnadon:Miri-rename-undef-uninit, r=RalfJung

Miri rename undef uninit

Renamed parts of code within the `librustc_middle/mir/interpret/` directory.

Related issue [#71193](https://github.com/rust-lang/rust/issues/71193)
parents 461707c5 ef9c4f5c
......@@ -883,7 +883,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let ptr = Pointer::new(AllocId(0), offset);
alloc
.read_scalar(&bx, ptr, size)
.and_then(|s| s.not_undef())
.and_then(|s| s.check_init())
.unwrap_or_else(|e| {
bx.tcx().sess.span_err(
span,
......
......@@ -105,7 +105,7 @@ impl<Tag> Allocation<Tag> {
Allocation::from_bytes(slice, Align::from_bytes(1).unwrap())
}
pub fn undef(size: Size, align: Align) -> Self {
pub fn uninit(size: Size, align: Align) -> Self {
Allocation {
bytes: vec![0; size.bytes_usize()],
relocations: Relocations::new(),
......@@ -153,7 +153,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
self.size.bytes_usize()
}
/// Looks at a slice which may describe undefined bytes or describe a relocation. This differs
/// Looks at a slice which may describe uninitialized bytes or describe a relocation. This differs
/// from `get_bytes_with_undef_and_ptr` in that it does no relocation checks (even on the
/// edges) at all. It further ignores `AllocationExtra` callbacks.
/// This must not be used for reads affecting the interpreter execution.
......@@ -192,7 +192,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
offset.bytes_usize()..end
}
/// The last argument controls whether we error out when there are undefined
/// The last argument controls whether we error out when there are uninitialized
/// or pointer bytes. You should never call this, call `get_bytes` or
/// `get_bytes_with_undef_and_ptr` instead,
///
......@@ -206,12 +206,12 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
check_defined_and_ptr: bool,
check_init_and_ptr: bool,
) -> InterpResult<'tcx, &[u8]> {
let range = self.check_bounds(ptr.offset, size);
if check_defined_and_ptr {
self.check_defined(ptr, size)?;
if check_init_and_ptr {
self.check_init(ptr, size)?;
self.check_relocations(cx, ptr, size)?;
} else {
// We still don't want relocations on the *edges*.
......@@ -239,7 +239,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
self.get_bytes_internal(cx, ptr, size, true)
}
/// It is the caller's responsibility to handle undefined and pointer bytes.
/// It is the caller's responsibility to handle uninitialized and pointer bytes.
/// However, this still checks that there are no relocations on the *edges*.
///
/// It is the caller's responsibility to check bounds and alignment beforehand.
......@@ -267,7 +267,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
) -> InterpResult<'tcx, &mut [u8]> {
let range = self.check_bounds(ptr.offset, size);
self.mark_definedness(ptr, size, true);
self.mark_init(ptr, size, true);
self.clear_relocations(cx, ptr, size)?;
AllocationExtra::memory_written(self, ptr, size)?;
......@@ -303,7 +303,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
/// Validates that `ptr.offset` and `ptr.offset + size` do not point to the middle of a
/// relocation. If `allow_ptr_and_undef` is `false`, also enforces that the memory in the
/// given range contains neither relocations nor undef bytes.
/// given range contains neither relocations nor uninitialized bytes.
pub fn check_bytes(
&self,
cx: &impl HasDataLayout,
......@@ -313,9 +313,9 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
) -> InterpResult<'tcx> {
// Check bounds and relocations on the edges.
self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
// Check undef and ptr.
// Check uninit and ptr.
if !allow_ptr_and_undef {
self.check_defined(ptr, size)?;
self.check_init(ptr, size)?;
self.check_relocations(cx, ptr, size)?;
}
Ok(())
......@@ -364,7 +364,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
let bytes = self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
// Uninit check happens *after* we established that the alignment is correct.
// We must not return `Ok()` for unaligned pointers!
if self.is_defined(ptr, size).is_err() {
if self.is_init(ptr, size).is_err() {
// This inflates uninitialized bytes to the entire scalar, even if only a few
// bytes are uninitialized.
return Ok(ScalarMaybeUninit::Uninit);
......@@ -416,7 +416,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
let val = match val {
ScalarMaybeUninit::Scalar(scalar) => scalar,
ScalarMaybeUninit::Uninit => {
self.mark_definedness(ptr, type_size, false);
self.mark_init(ptr, type_size, false);
return Ok(());
}
};
......@@ -512,7 +512,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
let start = ptr.offset;
let end = start + size; // `Size` addition
// Mark parts of the outermost relocations as undefined if they partially fall outside the
// Mark parts of the outermost relocations as uninitialized if they partially fall outside the
// given range.
if first < start {
self.init_mask.set_range(first, start, false);
......@@ -542,20 +542,20 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}
}
/// Undefined bytes.
/// Uninitialized bytes.
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Checks whether the given range is entirely defined.
/// Checks whether the given range is entirely initialized.
///
/// Returns `Ok(())` if it's defined. Otherwise returns the range of byte
/// indexes of the first contiguous undefined access.
fn is_defined(&self, ptr: Pointer<Tag>, size: Size) -> Result<(), Range<Size>> {
/// Returns `Ok(())` if it's initialized. Otherwise returns the range of byte
/// indexes of the first contiguous uninitialized access.
fn is_init(&self, ptr: Pointer<Tag>, size: Size) -> Result<(), Range<Size>> {
self.init_mask.is_range_initialized(ptr.offset, ptr.offset + size) // `Size` addition
}
/// Checks that a range of bytes is defined. If not, returns the `InvalidUndefBytes`
/// error which will report the first range of bytes which is undefined.
fn check_defined(&self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
self.is_defined(ptr, size).or_else(|idx_range| {
/// Checks that a range of bytes is initialized. If not, returns the `InvalidUninitBytes`
/// error which will report the first range of bytes which is uninitialized.
fn check_init(&self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
self.is_init(ptr, size).or_else(|idx_range| {
throw_ub!(InvalidUninitBytes(Some(Box::new(UninitBytesAccess {
access_ptr: ptr.erase_tag(),
access_size: size,
......@@ -565,44 +565,44 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
})
}
pub fn mark_definedness(&mut self, ptr: Pointer<Tag>, size: Size, new_state: bool) {
pub fn mark_init(&mut self, ptr: Pointer<Tag>, size: Size, is_init: bool) {
if size.bytes() == 0 {
return;
}
self.init_mask.set_range(ptr.offset, ptr.offset + size, new_state);
self.init_mask.set_range(ptr.offset, ptr.offset + size, is_init);
}
}
/// Run-length encoding of the undef mask.
/// Run-length encoding of the uninit mask.
/// Used to copy parts of a mask multiple times to another allocation.
pub struct AllocationDefinedness {
/// The definedness of the first range.
pub struct InitMaskCompressed {
/// Whether the first range is initialized.
initial: bool,
/// The lengths of ranges that are run-length encoded.
/// The definedness of the ranges alternate starting with `initial`.
/// The initialization state of the ranges alternate starting with `initial`.
ranges: smallvec::SmallVec<[u64; 1]>,
}
impl AllocationDefinedness {
pub fn all_bytes_undef(&self) -> bool {
// The `ranges` are run-length encoded and of alternating definedness.
// So if `ranges.len() > 1` then the second block is a range of defined.
impl InitMaskCompressed {
pub fn no_bytes_init(&self) -> bool {
// The `ranges` are run-length encoded and of alternating initialization state.
// So if `ranges.len() > 1` then the second block is an initialized range.
!self.initial && self.ranges.len() == 1
}
}
/// Transferring the definedness mask to other allocations.
/// Transferring the initialization mask to other allocations.
impl<Tag, Extra> Allocation<Tag, Extra> {
/// Creates a run-length encoding of the undef mask.
pub fn compress_undef_range(&self, src: Pointer<Tag>, size: Size) -> AllocationDefinedness {
/// Creates a run-length encoding of the initialization mask.
pub fn compress_undef_range(&self, src: Pointer<Tag>, size: Size) -> InitMaskCompressed {
// Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
// a naive undef mask copying algorithm would repeatedly have to read the undef mask from
// a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from
// the source and write it to the destination. Even if we optimized the memory accesses,
// we'd be doing all of this `repeat` times.
// Therefore we precompute a compressed version of the undef mask of the source value and
// Therefore we precompute a compressed version of the initialization mask of the source value and
// then write it back `repeat` times without computing any more information from the source.
// A precomputed cache for ranges of defined/undefined bits
// A precomputed cache for ranges of initialized / uninitialized bits
// 0000010010001110 will become
// `[5, 1, 2, 1, 3, 3, 1]`,
// where each element toggles the state.
......@@ -613,7 +613,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
let mut cur = initial;
for i in 1..size.bytes() {
// FIXME: optimize to bitshift the current undef block's bits and read the top bit.
// FIXME: optimize to bitshift the current uninitialized block's bits and read the top bit.
if self.init_mask.get(src.offset + Size::from_bytes(i)) == cur {
cur_len += 1;
} else {
......@@ -625,13 +625,13 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
ranges.push(cur_len);
AllocationDefinedness { ranges, initial }
InitMaskCompressed { ranges, initial }
}
/// Applies multiple instances of the run-length encoding to the undef mask.
pub fn mark_compressed_undef_range(
/// Applies multiple instances of the run-length encoding to the initialization mask.
pub fn mark_compressed_init_range(
&mut self,
defined: &AllocationDefinedness,
defined: &InitMaskCompressed,
dest: Pointer<Tag>,
size: Size,
repeat: u64,
......@@ -740,7 +740,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
}
////////////////////////////////////////////////////////////////////////////////
// Undefined byte tracking
// Uninitialized byte tracking
////////////////////////////////////////////////////////////////////////////////
type Block = u64;
......@@ -778,11 +778,11 @@ impl InitMask {
match idx {
Some(idx) => {
let undef_end = (idx.bytes()..end.bytes())
let uninit_end = (idx.bytes()..end.bytes())
.map(Size::from_bytes)
.find(|&i| self.get(i))
.unwrap_or(end);
Err(idx..undef_end)
Err(idx..uninit_end)
}
None => Ok(()),
}
......
......@@ -606,7 +606,7 @@ impl<'tcx, Tag> ScalarMaybeUninit<Tag> {
}
#[inline]
pub fn not_undef(self) -> InterpResult<'static, Scalar<Tag>> {
pub fn check_init(self) -> InterpResult<'static, Scalar<Tag>> {
match self {
ScalarMaybeUninit::Scalar(scalar) => Ok(scalar),
ScalarMaybeUninit::Uninit => throw_ub!(InvalidUninitBytes(None)),
......@@ -615,72 +615,72 @@ impl<'tcx, Tag> ScalarMaybeUninit<Tag> {
#[inline(always)]
pub fn to_bool(self) -> InterpResult<'tcx, bool> {
self.not_undef()?.to_bool()
self.check_init()?.to_bool()
}
#[inline(always)]
pub fn to_char(self) -> InterpResult<'tcx, char> {
self.not_undef()?.to_char()
self.check_init()?.to_char()
}
#[inline(always)]
pub fn to_f32(self) -> InterpResult<'tcx, Single> {
self.not_undef()?.to_f32()
self.check_init()?.to_f32()
}
#[inline(always)]
pub fn to_f64(self) -> InterpResult<'tcx, Double> {
self.not_undef()?.to_f64()
self.check_init()?.to_f64()
}
#[inline(always)]
pub fn to_u8(self) -> InterpResult<'tcx, u8> {
self.not_undef()?.to_u8()
self.check_init()?.to_u8()
}
#[inline(always)]
pub fn to_u16(self) -> InterpResult<'tcx, u16> {
self.not_undef()?.to_u16()
self.check_init()?.to_u16()
}
#[inline(always)]
pub fn to_u32(self) -> InterpResult<'tcx, u32> {
self.not_undef()?.to_u32()
self.check_init()?.to_u32()
}
#[inline(always)]
pub fn to_u64(self) -> InterpResult<'tcx, u64> {
self.not_undef()?.to_u64()
self.check_init()?.to_u64()
}
#[inline(always)]
pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
self.not_undef()?.to_machine_usize(cx)
self.check_init()?.to_machine_usize(cx)
}
#[inline(always)]
pub fn to_i8(self) -> InterpResult<'tcx, i8> {
self.not_undef()?.to_i8()
self.check_init()?.to_i8()
}
#[inline(always)]
pub fn to_i16(self) -> InterpResult<'tcx, i16> {
self.not_undef()?.to_i16()
self.check_init()?.to_i16()
}
#[inline(always)]
pub fn to_i32(self) -> InterpResult<'tcx, i32> {
self.not_undef()?.to_i32()
self.check_init()?.to_i32()
}
#[inline(always)]
pub fn to_i64(self) -> InterpResult<'tcx, i64> {
self.not_undef()?.to_i64()
self.check_init()?.to_i64()
}
#[inline(always)]
pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> {
self.not_undef()?.to_machine_isize(cx)
self.check_init()?.to_machine_isize(cx)
}
}
......
......@@ -154,7 +154,7 @@ pub(super) fn op_to_const<'tcx>(
ScalarMaybeUninit::Uninit => to_const_value(op.assert_mem_place(ecx)),
},
Immediate::ScalarPair(a, b) => {
let (data, start) = match a.not_undef().unwrap() {
let (data, start) = match a.check_init().unwrap() {
Scalar::Ptr(ptr) => {
(ecx.tcx.global_alloc(ptr.alloc_id).unwrap_memory(), ptr.offset.bytes())
}
......
......@@ -150,7 +150,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
| sym::bitreverse => {
let ty = substs.type_at(0);
let layout_of = self.layout_of(ty)?;
let val = self.read_scalar(args[0])?.not_undef()?;
let val = self.read_scalar(args[0])?.check_init()?;
let bits = self.force_bits(val, layout_of.size)?;
let kind = match layout_of.abi {
Abi::Scalar(ref scalar) => scalar.value,
......@@ -281,9 +281,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// rotate_left: (X << (S % BW)) | (X >> ((BW - S) % BW))
// rotate_right: (X << ((BW - S) % BW)) | (X >> (S % BW))
let layout = self.layout_of(substs.type_at(0))?;
let val = self.read_scalar(args[0])?.not_undef()?;
let val = self.read_scalar(args[0])?.check_init()?;
let val_bits = self.force_bits(val, layout.size)?;
let raw_shift = self.read_scalar(args[1])?.not_undef()?;
let raw_shift = self.read_scalar(args[1])?.check_init()?;
let raw_shift_bits = self.force_bits(raw_shift, layout.size)?;
let width_bits = u128::from(layout.size.bits());
let shift_bits = raw_shift_bits % width_bits;
......@@ -298,7 +298,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.write_scalar(result, dest)?;
}
sym::offset => {
let ptr = self.read_scalar(args[0])?.not_undef()?;
let ptr = self.read_scalar(args[0])?.check_init()?;
let offset_count = self.read_scalar(args[1])?.to_machine_isize(self)?;
let pointee_ty = substs.type_at(0);
......@@ -306,7 +306,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.write_scalar(offset_ptr, dest)?;
}
sym::arith_offset => {
let ptr = self.read_scalar(args[0])?.not_undef()?;
let ptr = self.read_scalar(args[0])?.check_init()?;
let offset_count = self.read_scalar(args[1])?.to_machine_isize(self)?;
let pointee_ty = substs.type_at(0);
......
......@@ -171,7 +171,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
align: Align,
kind: MemoryKind<M::MemoryKind>,
) -> Pointer<M::PointerTag> {
let alloc = Allocation::undef(size, align);
let alloc = Allocation::uninit(size, align);
self.allocate_with(alloc, kind)
}
......@@ -907,18 +907,18 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
let dest_bytes = dest_bytes.as_mut_ptr();
// Prepare a copy of the undef mask.
// Prepare a copy of the initialization mask.
let compressed = self.get_raw(src.alloc_id)?.compress_undef_range(src, size);
if compressed.all_bytes_undef() {
// Fast path: If all bytes are `undef` then there is nothing to copy. The target range
// is marked as undef but we otherwise omit changing the byte representation which may
// be arbitrary for undef bytes.
if compressed.no_bytes_init() {
// Fast path: If all bytes are `uninit` then there is nothing to copy. The target range
// is marked as unititialized but we otherwise omit changing the byte representation which may
// be arbitrary for uninitialized bytes.
// This also avoids writing to the target bytes so that the backing allocation is never
// touched if the bytes stay undef for the whole interpreter execution. On contemporary
// touched if the bytes stay uninitialized for the whole interpreter execution. On contemporary
// operating system this can avoid physically allocating the page.
let dest_alloc = self.get_raw_mut(dest.alloc_id)?;
dest_alloc.mark_definedness(dest, size * length, false); // `Size` multiplication
dest_alloc.mark_init(dest, size * length, false); // `Size` multiplication
dest_alloc.mark_relocation_range(relocations);
return Ok(());
}
......@@ -958,7 +958,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
}
// now fill in all the data
self.get_raw_mut(dest.alloc_id)?.mark_compressed_undef_range(
self.get_raw_mut(dest.alloc_id)?.mark_compressed_init_range(
&compressed,
dest,
size,
......
......@@ -63,7 +63,7 @@ impl<'tcx, Tag> Immediate<Tag> {
}
#[inline]
pub fn to_scalar_or_undef(self) -> ScalarMaybeUninit<Tag> {
pub fn to_scalar_or_uninit(self) -> ScalarMaybeUninit<Tag> {
match self {
Immediate::Scalar(val) => val,
Immediate::ScalarPair(..) => bug!("Got a wide pointer where a scalar was expected"),
......@@ -72,14 +72,14 @@ impl<'tcx, Tag> Immediate<Tag> {
#[inline]
pub fn to_scalar(self) -> InterpResult<'tcx, Scalar<Tag>> {
self.to_scalar_or_undef().not_undef()
self.to_scalar_or_uninit().check_init()
}
#[inline]
pub fn to_scalar_pair(self) -> InterpResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
match self {
Immediate::Scalar(..) => bug!("Got a thin pointer where a scalar pair was expected"),
Immediate::ScalarPair(a, b) => Ok((a.not_undef()?, b.not_undef()?)),
Immediate::ScalarPair(a, b) => Ok((a.check_init()?, b.check_init()?)),
}
}
}
......@@ -333,7 +333,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&self,
op: OpTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, ScalarMaybeUninit<M::PointerTag>> {
Ok(self.read_immediate(op)?.to_scalar_or_undef())
Ok(self.read_immediate(op)?.to_scalar_or_uninit())
}
// Turn the wide MPlace into a string (must already be dereferenced!)
......
......@@ -292,9 +292,9 @@ where
val.layout.ty.builtin_deref(true).expect("`ref_to_mplace` called on non-ptr type").ty;
let layout = self.layout_of(pointee_type)?;
let (ptr, meta) = match *val {
Immediate::Scalar(ptr) => (ptr.not_undef()?, MemPlaceMeta::None),
Immediate::Scalar(ptr) => (ptr.check_init()?, MemPlaceMeta::None),
Immediate::ScalarPair(ptr, meta) => {
(ptr.not_undef()?, MemPlaceMeta::Meta(meta.not_undef()?))
(ptr.check_init()?, MemPlaceMeta::Meta(meta.check_init()?))
}
};
......@@ -541,7 +541,7 @@ where
let n = self.access_local(self.frame(), local, Some(layout))?;
let n = self.read_scalar(n)?;
let n = u64::try_from(
self.force_bits(n.not_undef()?, self.tcx.data_layout.pointer_size)?,
self.force_bits(n.check_init()?, self.tcx.data_layout.pointer_size)?,
)
.unwrap();
self.mplace_index(base, n)?
......
......@@ -58,7 +58,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let (fn_val, abi) = match func.layout.ty.kind {
ty::FnPtr(sig) => {
let caller_abi = sig.abi();
let fn_ptr = self.read_scalar(func)?.not_undef()?;
let fn_ptr = self.read_scalar(func)?.check_init()?;
let fn_val = self.memory.get_fn(fn_ptr)?;
(fn_val, caller_abi)
}
......
......@@ -118,7 +118,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.memory
.get_raw(vtable_slot.alloc_id)?
.read_ptr_sized(self, vtable_slot)?
.not_undef()?;
.check_init()?;
Ok(self.memory.get_fn(fn_ptr)?)
}
......@@ -137,7 +137,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
)?
.expect("cannot be a ZST");
let drop_fn =
self.memory.get_raw(vtable.alloc_id)?.read_ptr_sized(self, vtable)?.not_undef()?;
self.memory.get_raw(vtable.alloc_id)?.read_ptr_sized(self, vtable)?.check_init()?;
// We *need* an instance here, no other kind of function value, to be able
// to determine the type.
let drop_instance = self.memory.get_fn(drop_fn)?.as_instance()?;
......@@ -165,10 +165,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.check_ptr_access(vtable, 3 * pointer_size, self.tcx.data_layout.pointer_align.abi)?
.expect("cannot be a ZST");
let alloc = self.memory.get_raw(vtable.alloc_id)?;
let size = alloc.read_ptr_sized(self, vtable.offset(pointer_size, self)?)?.not_undef()?;
let size = alloc.read_ptr_sized(self, vtable.offset(pointer_size, self)?)?.check_init()?;
let size = u64::try_from(self.force_bits(size, pointer_size)?).unwrap();
let align =
alloc.read_ptr_sized(self, vtable.offset(pointer_size * 2, self)?)?.not_undef()?;
alloc.read_ptr_sized(self, vtable.offset(pointer_size * 2, self)?)?.check_init()?;
let align = u64::try_from(self.force_bits(align, pointer_size)?).unwrap();
if size >= self.tcx.data_layout.obj_size_bound() {
......
......@@ -500,7 +500,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
// types below!
if self.ref_tracking_for_consts.is_some() {
// Integers/floats in CTFE: Must be scalar bits, pointers are dangerous
let is_bits = value.not_undef().map_or(false, |v| v.is_bits());
let is_bits = value.check_init().map_or(false, |v| v.is_bits());
if !is_bits {
throw_validation_failure!(self.path,
{ "{}", value } expected { "initialized plain (non-pointer) bytes" }
......@@ -537,7 +537,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
ty::FnPtr(_sig) => {
let value = self.ecx.read_scalar(value)?;
let _fn = try_validation!(
value.not_undef().and_then(|ptr| self.ecx.memory.get_fn(ptr)),
value.check_init().and_then(|ptr| self.ecx.memory.get_fn(ptr)),
self.path,
err_ub!(DanglingIntPointer(..)) |
err_ub!(InvalidFunctionPointer(..)) |
......@@ -596,7 +596,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
}
// At least one value is excluded. Get the bits.
let value = try_validation!(
value.not_undef(),
value.check_init(),
self.path,
err_ub!(InvalidUninitBytes(None)) => { "{}", value }
expected { "something {}", wrapping_range_format(valid_range, max_hi) },
......
......@@ -2614,7 +2614,7 @@ fn specialize_one_pattern<'p, 'tcx>(
let pats = cx.pattern_arena.alloc_from_iter((0..n).filter_map(|i| {
let ptr = ptr.offset(layout.size * i, &cx.tcx).ok()?;
let scalar = alloc.read_scalar(&cx.tcx, ptr, layout.size).ok()?;
let scalar = scalar.not_undef().ok()?;
let scalar = scalar.check_init().ok()?;
let value = ty::Const::from_scalar(cx.tcx, scalar, ty);
let pattern = Pat { ty, span: pat.span, kind: box PatKind::Constant { value } };
Some(pattern)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment