Commit 56cbf2f2 authored by Tim Diekmann's avatar Tim Diekmann

Overhaul of the `AllocRef` trait to match allocator-wg's latest consens

parent 2fbb0752
......@@ -2,7 +2,7 @@
#![stable(feature = "alloc_module", since = "1.28.0")]
use core::intrinsics::{min_align_of_val, size_of_val};
use core::intrinsics::{self, min_align_of_val, size_of_val};
use core::ptr::{NonNull, Unique};
use core::usize;
......@@ -165,11 +165,19 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl AllocRef for Global {
#[inline]
fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
if layout.size() == 0 {
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull<u8>, usize), AllocErr> {
let new_size = layout.size();
if new_size == 0 {
Ok((layout.dangling(), 0))
} else {
unsafe { NonNull::new(alloc(layout)).ok_or(AllocErr).map(|p| (p, layout.size())) }
unsafe {
let raw_ptr = match init {
AllocInit::Uninitialized => alloc(layout),
AllocInit::Zeroed => alloc_zeroed(layout),
};
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
Ok((ptr, new_size))
}
}
}
......@@ -181,33 +189,77 @@ unsafe impl AllocRef for Global {
}
#[inline]
unsafe fn realloc(
unsafe fn grow(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
placement: ReallocPlacement,
init: AllocInit,
) -> Result<(NonNull<u8>, usize), AllocErr> {
match (layout.size(), new_size) {
(0, 0) => Ok((layout.dangling(), 0)),
(0, _) => self.alloc(Layout::from_size_align_unchecked(new_size, layout.align())),
(_, 0) => {
self.dealloc(ptr, layout);
Ok((layout.dangling(), 0))
let old_size = layout.size();
debug_assert!(
new_size >= old_size,
"`new_size` must be greater than or equal to `layout.size()`"
);
if old_size == new_size {
return Ok((ptr, new_size));
}
match placement {
ReallocPlacement::MayMove => {
if old_size == 0 {
self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)
} else {
// `realloc` probably checks for `new_size > old_size` or something similar.
// `new_size` must be greater than or equal to `old_size` due to the safety constraint,
// and `new_size` == `old_size` was caught before
intrinsics::assume(new_size > old_size);
let ptr =
NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)?;
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
init.initialize_offset(ptr, new_layout, old_size);
Ok((ptr, new_size))
}
}
(_, _) => NonNull::new(realloc(ptr.as_ptr(), layout, new_size))
.ok_or(AllocErr)
.map(|p| (p, new_size)),
ReallocPlacement::InPlace => Err(AllocErr),
}
}
#[inline]
fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
if layout.size() == 0 {
Ok((layout.dangling(), 0))
} else {
unsafe {
NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr).map(|p| (p, layout.size()))
unsafe fn shrink(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
placement: ReallocPlacement,
) -> Result<(NonNull<u8>, usize), AllocErr> {
let old_size = layout.size();
debug_assert!(
new_size <= old_size,
"`new_size` must be smaller than or equal to `layout.size()`"
);
if old_size == new_size {
return Ok((ptr, new_size));
}
match placement {
ReallocPlacement::MayMove => {
let ptr = if new_size == 0 {
self.dealloc(ptr, layout);
layout.dangling()
} else {
// `realloc` probably checks for `new_size > old_size` or something similar.
// `new_size` must be smaller than or equal to `old_size` due to the safety constraint,
// and `new_size` == `old_size` was caught before
intrinsics::assume(new_size < old_size);
NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)?
};
Ok((ptr, new_size))
}
ReallocPlacement::InPlace => Err(AllocErr),
}
}
}
......@@ -218,14 +270,10 @@ unsafe impl AllocRef for Global {
#[lang = "exchange_malloc"]
#[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
if size == 0 {
align as *mut u8
} else {
let layout = Layout::from_size_align_unchecked(size, align);
match Global.alloc(layout) {
Ok((ptr, _)) => ptr.as_ptr(),
Err(_) => handle_alloc_error(layout),
}
let layout = Layout::from_size_align_unchecked(size, align);
match Global.alloc(layout, AllocInit::Uninitialized) {
Ok((ptr, _)) => ptr.as_ptr(),
Err(_) => handle_alloc_error(layout),
}
}
......@@ -239,11 +287,8 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
if size != 0 {
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr.cast().into(), layout);
}
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr.cast().into(), layout)
}
/// Abort on memory allocation error or failure.
......
......@@ -8,8 +8,9 @@ use test::Bencher;
fn allocate_zeroed() {
unsafe {
let layout = Layout::from_size_align(1024, 1).unwrap();
let (ptr, _) =
Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout));
let (ptr, _) = Global
.alloc(layout.clone(), AllocInit::Zeroed)
.unwrap_or_else(|_| handle_alloc_error(layout));
let mut i = ptr.cast::<u8>().as_ptr();
let end = i.add(layout.size());
......
......@@ -146,7 +146,7 @@ use core::ptr::{self, NonNull, Unique};
use core::slice;
use core::task::{Context, Poll};
use crate::alloc::{self, AllocRef, Global};
use crate::alloc::{self, AllocInit, AllocRef, Global};
use crate::raw_vec::RawVec;
use crate::str::from_boxed_utf8_unchecked;
use crate::vec::Vec;
......@@ -196,14 +196,12 @@ impl<T> Box<T> {
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
unsafe {
let ptr = if layout.size() == 0 {
NonNull::dangling()
} else {
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast()
};
Box::from_raw(ptr.as_ptr())
}
let ptr = Global
.alloc(layout, AllocInit::Uninitialized)
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
.0
.cast();
unsafe { Box::from_raw(ptr.as_ptr()) }
}
/// Constructs a new `Box` with uninitialized contents, with the memory
......@@ -226,11 +224,13 @@ impl<T> Box<T> {
/// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> {
unsafe {
let mut uninit = Self::new_uninit();
ptr::write_bytes::<T>(uninit.as_mut_ptr(), 0, 1);
uninit
}
let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
let ptr = Global
.alloc(layout, AllocInit::Zeroed)
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
.0
.cast();
unsafe { Box::from_raw(ptr.as_ptr()) }
}
/// Constructs a new `Pin<Box<T>>`. If `T` does not implement `Unpin`, then
......@@ -266,14 +266,12 @@ impl<T> Box<[T]> {
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
let layout = alloc::Layout::array::<mem::MaybeUninit<T>>(len).unwrap();
unsafe {
let ptr = if layout.size() == 0 {
NonNull::dangling()
} else {
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast()
};
Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len))
}
let ptr = Global
.alloc(layout, AllocInit::Uninitialized)
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
.0
.cast();
unsafe { Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len)) }
}
}
......@@ -778,7 +776,7 @@ impl<T: Copy> From<&[T]> for Box<[T]> {
let buf = RawVec::with_capacity(len);
unsafe {
ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
buf.into_box()
buf.into_box().assume_init()
}
}
}
......
This diff is collapsed.
use super::*;
use core::ptr::NonNull;
#[test]
fn allocator_param() {
......@@ -20,12 +21,16 @@ fn allocator_param() {
fuel: usize,
}
unsafe impl AllocRef for BoundedAlloc {
fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
fn alloc(
&mut self,
layout: Layout,
init: AllocInit,
) -> Result<(NonNull<u8>, usize), AllocErr> {
let size = layout.size();
if size > self.fuel {
return Err(AllocErr);
}
match Global.alloc(layout) {
match Global.alloc(layout, init) {
ok @ Ok(_) => {
self.fuel -= size;
ok
......
......@@ -252,7 +252,7 @@ use core::ptr::{self, NonNull};
use core::slice::{self, from_raw_parts_mut};
use core::usize;
use crate::alloc::{box_free, handle_alloc_error, AllocRef, Global, Layout};
use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
use crate::string::String;
use crate::vec::Vec;
......@@ -936,7 +936,9 @@ impl<T: ?Sized> Rc<T> {
let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
// Allocate for the layout.
let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
let (mem, _) = Global
.alloc(layout, AllocInit::Uninitialized)
.unwrap_or_else(|_| handle_alloc_error(layout));
// Initialize the RcBox
let inner = mem_to_rcbox(mem.as_ptr());
......
......@@ -25,7 +25,7 @@ use core::sync::atomic;
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use core::{isize, usize};
use crate::alloc::{box_free, handle_alloc_error, AllocRef, Global, Layout};
use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
use crate::boxed::Box;
use crate::rc::is_dangling;
use crate::string::String;
......@@ -814,7 +814,9 @@ impl<T: ?Sized> Arc<T> {
// reference (see #54908).
let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
let (mem, _) = Global
.alloc(layout, AllocInit::Uninitialized)
.unwrap_or_else(|_| handle_alloc_error(layout));
// Initialize the ArcInner
let inner = mem_to_arcinner(mem.as_ptr());
......
use std::alloc::{AllocRef, Global, Layout, System};
use std::alloc::{AllocInit, AllocRef, Global, Layout, System};
/// Issue #45955 and #62251.
#[test]
......@@ -20,7 +20,13 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
unsafe {
let pointers: Vec<_> = (0..iterations)
.map(|_| {
allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap().0
allocator
.alloc(
Layout::from_size_align(size, align).unwrap(),
AllocInit::Uninitialized,
)
.unwrap()
.0
})
.collect();
for &ptr in &pointers {
......
......@@ -679,7 +679,7 @@ impl<T> Vec<T> {
self.shrink_to_fit();
let buf = ptr::read(&self.buf);
mem::forget(self);
buf.into_box()
buf.into_box().assume_init()
}
}
......
use crate::alloc::Layout;
use crate::cmp;
use crate::ptr;
/// A memory allocator that can be registered as the standard library’s default
/// through the `#[global_allocator]` attribute.
///
/// Some of the methods require that a memory block be *currently
/// allocated* via an allocator. This means that:
///
/// * the starting address for that memory block was previously
/// returned by a previous call to an allocation method
/// such as `alloc`, and
///
/// * the memory block has not been subsequently deallocated, where
/// blocks are deallocated either by being passed to a deallocation
/// method such as `dealloc` or by being
/// passed to a reallocation method that returns a non-null pointer.
///
///
/// # Example
///
/// ```no_run
/// use std::alloc::{GlobalAlloc, Layout, alloc};
/// use std::ptr::null_mut;
///
/// struct MyAllocator;
///
/// unsafe impl GlobalAlloc for MyAllocator {
/// unsafe fn alloc(&self, _layout: Layout) -> *mut u8 { null_mut() }
/// unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
/// }
///
/// #[global_allocator]
/// static A: MyAllocator = MyAllocator;
///
/// fn main() {
/// unsafe {
/// assert!(alloc(Layout::new::<u32>()).is_null())
/// }
/// }
/// ```
///
/// # Safety
///
/// The `GlobalAlloc` trait is an `unsafe` trait for a number of reasons, and
/// implementors must ensure that they adhere to these contracts:
///
/// * It's undefined behavior if global allocators unwind. This restriction may
/// be lifted in the future, but currently a panic from any of these
/// functions may lead to memory unsafety.
///
/// * `Layout` queries and calculations in general must be correct. Callers of
/// this trait are allowed to rely on the contracts defined on each method,
/// and implementors must ensure such contracts remain true.
#[stable(feature = "global_alloc", since = "1.28.0")]
pub unsafe trait GlobalAlloc {
/// Allocate memory as described by the given `layout`.
///
/// Returns a pointer to newly-allocated memory,
/// or null to indicate allocation failure.
///
/// # Safety
///
/// This function is unsafe because undefined behavior can result
/// if the caller does not ensure that `layout` has non-zero size.
///
/// (Extension subtraits might provide more specific bounds on
/// behavior, e.g., guarantee a sentinel address or a null pointer
/// in response to a zero-size allocation request.)
///
/// The allocated block of memory may or may not be initialized.
///
/// # Errors
///
/// Returning a null pointer indicates that either memory is exhausted
/// or `layout` does not meet this allocator's size or alignment constraints.
///
/// Implementations are encouraged to return null on memory
/// exhaustion rather than aborting, but this is not
/// a strict requirement. (Specifically: it is *legal* to
/// implement this trait atop an underlying native allocation
/// library that aborts on memory exhaustion.)
///
/// Clients wishing to abort computation in response to an
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
#[stable(feature = "global_alloc", since = "1.28.0")]
unsafe fn alloc(&self, layout: Layout) -> *mut u8;
/// Deallocate the block of memory at the given `ptr` pointer with the given `layout`.
///
/// # Safety
///
/// This function is unsafe because undefined behavior can result
/// if the caller does not ensure all of the following:
///
/// * `ptr` must denote a block of memory currently allocated via
/// this allocator,
///
/// * `layout` must be the same layout that was used
/// to allocate that block of memory,
#[stable(feature = "global_alloc", since = "1.28.0")]
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout);
/// Behaves like `alloc`, but also ensures that the contents
/// are set to zero before being returned.
///
/// # Safety
///
/// This function is unsafe for the same reasons that `alloc` is.
/// However the allocated block of memory is guaranteed to be initialized.
///
/// # Errors
///
/// Returning a null pointer indicates that either memory is exhausted
/// or `layout` does not meet allocator's size or alignment constraints,
/// just as in `alloc`.
///
/// Clients wishing to abort computation in response to an
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
#[stable(feature = "global_alloc", since = "1.28.0")]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
let size = layout.size();
let ptr = self.alloc(layout);
if !ptr.is_null() {
ptr::write_bytes(ptr, 0, size);
}
ptr
}
/// Shrink or grow a block of memory to the given `new_size`.
/// The block is described by the given `ptr` pointer and `layout`.
///
/// If this returns a non-null pointer, then ownership of the memory block
/// referenced by `ptr` has been transferred to this allocator.
/// The memory may or may not have been deallocated,
/// and should be considered unusable (unless of course it was
/// transferred back to the caller again via the return value of
/// this method). The new memory block is allocated with `layout`, but
/// with the `size` updated to `new_size`.
///
/// If this method returns null, then ownership of the memory
/// block has not been transferred to this allocator, and the
/// contents of the memory block are unaltered.
///
/// # Safety
///
/// This function is unsafe because undefined behavior can result
/// if the caller does not ensure all of the following:
///
/// * `ptr` must be currently allocated via this allocator,
///
/// * `layout` must be the same layout that was used
/// to allocate that block of memory,
///
/// * `new_size` must be greater than zero.
///
/// * `new_size`, when rounded up to the nearest multiple of `layout.align()`,
/// must not overflow (i.e., the rounded value must be less than `usize::MAX`).
///
/// (Extension subtraits might provide more specific bounds on
/// behavior, e.g., guarantee a sentinel address or a null pointer
/// in response to a zero-size allocation request.)
///
/// # Errors
///
/// Returns null if the new layout does not meet the size
/// and alignment constraints of the allocator, or if reallocation
/// otherwise fails.
///
/// Implementations are encouraged to return null on memory
/// exhaustion rather than panicking or aborting, but this is not
/// a strict requirement. (Specifically: it is *legal* to
/// implement this trait atop an underlying native allocation
/// library that aborts on memory exhaustion.)
///
/// Clients wishing to abort computation in response to a
/// reallocation error are encouraged to call the [`handle_alloc_error`] function,
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
#[stable(feature = "global_alloc", since = "1.28.0")]
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let new_ptr = self.alloc(new_layout);
if !new_ptr.is_null() {
ptr::copy_nonoverlapping(ptr, new_ptr, cmp::min(layout.size(), new_size));
self.dealloc(ptr, layout);
}
new_ptr
}
}
//! Memory allocation APIs
#![stable(feature = "alloc_module", since = "1.28.0")]
mod global;
mod layout;
#[stable(feature = "global_alloc", since = "1.28.0")]
pub use self::global::GlobalAlloc;
#[stable(feature = "alloc_layout", since = "1.28.0")]
pub use self::layout::{Layout, LayoutErr};
use crate::fmt;
use crate::ptr::{self, NonNull};
/// The `AllocErr` error indicates an allocation failure
/// that may be due to resource exhaustion or to
/// something wrong when combining the given input arguments with this
/// allocator.
#[unstable(feature = "allocator_api", issue = "32838")]
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct AllocErr;
// (we need this for downstream impl of trait Error)
#[unstable(feature = "allocator_api", issue = "32838")]
impl fmt::Display for AllocErr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("memory allocation failed")
}
}
/// A desired initial state for allocated memory.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[unstable(feature = "allocator_api", issue = "32838")]
pub enum AllocInit {
/// The contents of the new memory are undefined.
///
/// Reading uninitialized memory is Undefined Behavior; it must be initialized before use.
Uninitialized,
/// The new memory is guaranteed to be zeroed.
Zeroed,
}
impl AllocInit {
/// Initialize the memory block referenced by `ptr` and specified by `Layout`.
///
/// This behaves like calling [`AllocInit::initialize_offset(ptr, layout, 0)`][off].
///
/// [off]: AllocInit::initialize_offset
///
/// # Safety
///
/// * `layout` must [*fit*] the block of memory referenced by `ptr`
///
/// [*fit*]: trait.AllocRef.html#memory-fitting
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub unsafe fn initialize(self, ptr: NonNull<u8>, layout: Layout) {
self.initialize_offset(ptr, layout, 0)
}
/// Initialize the memory block referenced by `ptr` and specified by `Layout` at the specified
/// `offset`.
///
/// This is a no-op for [`AllocInit::Uninitialized`] and writes zeroes for [`AllocInit::Zeroed`]