Unverified Commit cd8d627e authored by Ian Douglas Scott's avatar Ian Douglas Scott
Browse files

Use new allocator API, and otherwise update for new Rust

- New allocator API
- Remove the "allocator" feature, which should be unnecessary due to how
the new allocator API works
- NonZero no longer implements Deref (https://github.com/rust-lang/rust/pull/41064)
- NonZero::new() returns an Option; use NonZero::new_unchecked()
- Thread locals are no longer 'static (https://github.com/rust-lang/rust/pull/43746)
- Changes to feature flags
- Use unsafe to access extern static (https://github.com/rust-lang/rust/issues/36247)
parent 56a91d85
......@@ -29,10 +29,9 @@ debug-assertions = false
codegen-units = 1
[features]
default = ["allocator", "tls"]
default = ["tls"]
# ---
alloc_id = []
allocator = []
debugger = []
log = ["write", "alloc_id"]
no_log_lock = ["log"]
......
......@@ -22,10 +22,10 @@ pub mod arch {
/// A thread destructor.
type Dtor = unsafe extern fn(dtor: unsafe extern fn(*mut u8), arg: *mut u8, dso_handle: *mut u8) -> i32;
// Make sure the symbols exist.
assert!(!__cxa_thread_atexit_impl.is_null());
unsafe {
// Make sure the symbols exist.
assert!(!__cxa_thread_atexit_impl.is_null());
mem::transmute::<*const u8, Dtor>(__cxa_thread_atexit_impl)
(dtor, t, &__dso_handle as *const _ as *mut _)
};
......
......@@ -300,7 +300,7 @@ impl Allocator for LocalAllocator {
pub fn alloc(size: usize, align: usize) -> *mut u8 {
log!(CALL, "Allocating buffer of size {} (align {}).", size, align);
get_allocator!(|alloc| *Pointer::from(alloc.alloc(size, align)))
get_allocator!(|alloc| Pointer::from(alloc.alloc(size, align)).get())
}
/// Free a buffer.
......@@ -353,11 +353,11 @@ pub unsafe fn realloc(ptr: *mut u8, old_size: usize, size: usize, align: usize)
log!(CALL, "Reallocating buffer of size {} to new size {}.", old_size, size);
get_allocator!(|alloc| {
*Pointer::from(alloc.realloc(
Pointer::from(alloc.realloc(
Block::from_raw_parts(Pointer::new(ptr), old_size),
size,
align
))
)).get()
})
}
......
......@@ -110,7 +110,7 @@ impl Block {
/// Is this block aligned to `align`?
#[inline]
pub fn aligned_to(&self, align: usize) -> bool {
*self.ptr as usize % align == 0
self.ptr.get() as usize % align == 0
}
/// memcpy the block to another pointer.
......@@ -129,7 +129,7 @@ impl Block {
// LAST AUDIT: 2016-08-21 (Ticki).
// From the invariants of `Block`, this copy is well-defined.
ptr::copy_nonoverlapping(*self.ptr, *block.ptr, self.size);
ptr::copy_nonoverlapping(self.ptr.get(), block.ptr.get(), self.size);
}
}
......@@ -145,7 +145,7 @@ impl Block {
// Since the memory of the block is inaccessible (read-wise), zeroing it is fully
// safe.
intrinsics::volatile_set_memory(*self.ptr, 0, self.size);
intrinsics::volatile_set_memory(self.ptr.get(), 0, self.size);
}
}
}
......@@ -162,7 +162,7 @@ impl Block {
#[inline]
pub fn left_to(&self, to: &Block) -> bool {
// This won't overflow due to the end being bounded by the address space.
self.size + *self.ptr as usize == *to.ptr as usize
self.size + self.ptr.get() as usize == to.ptr.get() as usize
}
/// Split the block at some position.
......@@ -207,7 +207,7 @@ impl Block {
// Calculate the aligner, which defines the smallest size required as precursor to align
// the block to `align`.
let aligner = (align - *self.ptr as usize % align) % align;
let aligner = (align - self.ptr.get() as usize % align) % align;
// ^^^^^^^^
// To avoid wasting space on the case where the block is already aligned, we calculate it
// modulo `align`.
......@@ -275,7 +275,7 @@ impl From<Block> for Pointer<u8> {
impl PartialOrd for Block {
#[inline]
fn partial_cmp(&self, other: &Block) -> Option<cmp::Ordering> {
self.ptr.partial_cmp(&other.ptr)
self.ptr.get().partial_cmp(&other.ptr.get())
}
}
......@@ -283,14 +283,14 @@ impl PartialOrd for Block {
impl Ord for Block {
#[inline]
fn cmp(&self, other: &Block) -> cmp::Ordering {
self.ptr.cmp(&other.ptr)
self.ptr.get().cmp(&other.ptr.get())
}
}
impl cmp::PartialEq for Block {
#[inline]
fn eq(&self, other: &Block) -> bool {
*self.ptr == *other.ptr
self.ptr.get() == other.ptr.get()
}
}
......@@ -298,7 +298,7 @@ impl cmp::Eq for Block {}
impl fmt::Debug for Block {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{:x}[{}]", *self.ptr as usize, self.size)
write!(f, "0x{:x}[{}]", self.ptr.get() as usize, self.size)
}
}
......
......@@ -46,7 +46,7 @@ impl BrkLock {
let expected_brk = self.current_brk().offset(size);
// Break it to me, babe!
let old_brk = Pointer::new(syscalls::brk(*expected_brk as *const u8) as *mut u8);
let old_brk = Pointer::new(syscalls::brk(expected_brk.get() as *const u8) as *mut u8);
/// AAAARGH WAY TOO MUCH LOGGING
///
......@@ -180,7 +180,7 @@ pub fn lock() -> BrkLock {
///
/// On failure the maximum pointer (`!0 as *mut u8`) is returned.
pub unsafe extern fn sbrk(size: isize) -> *mut u8 {
*lock().sbrk(size).unwrap_or_else(|()| Pointer::new(!0 as *mut u8))
lock().sbrk(size).unwrap_or_else(|()| Pointer::new(!0 as *mut u8)).get()
}
/// Get the current program break.
......
......@@ -9,20 +9,21 @@
//! relatively strong condition, which means that you are forced to rewrite primitives and make
//! sure no allocation ever happens.
#![cfg_attr(feature = "allocator", allocator)]
#![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#![no_std]
#![feature(allocator, const_fn, core_intrinsics, stmt_expr_attributes, drop_types_in_const,
#![feature(alloc, allocator_api, const_fn, core_intrinsics, stmt_expr_attributes, drop_types_in_const,
nonzero, optin_builtin_traits, type_ascription, thread_local, linkage,
try_from)]
try_from, const_unsafe_cell_new, const_atomic_bool_new, const_nonzero_new,
const_atomic_ptr_new)]
#![warn(missing_docs, cast_precision_loss, cast_sign_loss, cast_possible_wrap,
cast_possible_truncation, filter_map, if_not_else, items_after_statements,
invalid_upcast_comparisons, mutex_integer, nonminimal_bool, shadow_same, shadow_unrelated,
single_match_else, string_add, string_add_assign, wrong_pub_self_convention)]
extern crate alloc;
extern crate ralloc_shim as shim;
#[macro_use]
......@@ -30,8 +31,6 @@ mod log;
#[macro_use]
#[cfg(feature = "tls")]
mod tls;
#[cfg(feature = "allocator")]
mod symbols;
#[macro_use]
mod unborrow;
......@@ -49,8 +48,47 @@ mod ptr;
mod sync;
mod vec;
use alloc::heap::{Alloc, AllocErr, Layout, CannotReallocInPlace};
pub use allocator::{alloc, free, realloc, realloc_inplace};
pub use brk::sbrk;
pub use fail::set_oom_handler;
#[cfg(feature = "tls")]
pub use fail::set_thread_oom_handler;
pub struct Allocator;
unsafe impl<'a> Alloc for &'a Allocator {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
Ok(allocator::alloc(layout.size(), layout.align()))
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
allocator::free(ptr, layout.size());
}
unsafe fn realloc(&mut self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result<*mut u8, AllocErr> {
Ok(allocator::realloc(ptr, layout.size(), new_layout.size(), new_layout.align()))
}
unsafe fn grow_in_place(&mut self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> {
if allocator::realloc_inplace(ptr, layout.size(), new_layout.size()).is_ok() {
Ok(())
} else {
Err(CannotReallocInPlace)
}
}
unsafe fn shrink_in_place(&mut self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> {
if allocator::realloc_inplace(ptr, layout.size(), new_layout.size()).is_ok() {
Ok(())
} else {
Err(CannotReallocInPlace)
}
}
fn usable_size(&self, layout: &Layout) -> (usize, usize) {
// Yay! It matches exactly.
(layout.size(), layout.size())
}
}
......@@ -30,7 +30,7 @@ impl<T> Pointer<T> {
debug_assert!(!ptr.is_null(), "Null pointer!");
Pointer {
ptr: NonZero::new(ptr),
ptr: NonZero::new_unchecked(ptr),
_phantom: marker::PhantomData,
}
}
......@@ -45,7 +45,7 @@ impl<T> Pointer<T> {
// LAST AUDIT: 2016-08-21 (Ticki).
// 0x1 is non-zero.
NonZero::new(0x1 as *mut T)
NonZero::new_unchecked(0x1 as *mut T)
},
_phantom: marker::PhantomData,
}
......@@ -61,7 +61,7 @@ impl<T> Pointer<T> {
// LAST AUDIT: 2016-08-21 (Ticki).
// Casting the pointer will preserve its nullable state.
NonZero::new(*self as *mut U)
NonZero::new_unchecked(self.get() as *mut U)
},
_phantom: marker::PhantomData,
}
......@@ -76,7 +76,11 @@ impl<T> Pointer<T> {
/// This is unsafe, due to OOB offsets being undefined behavior.
#[inline]
pub unsafe fn offset(self, diff: isize) -> Pointer<T> {
Pointer::new(self.ptr.offset(diff))
Pointer::new(self.ptr.get().offset(diff))
}
pub fn get(&self) -> *mut T {
self.ptr.get()
}
}
......@@ -89,15 +93,6 @@ impl<T> Default for Pointer<T> {
unsafe impl<T: Send> Send for Pointer<T> {}
unsafe impl<T: Sync> Sync for Pointer<T> {}
impl<T> ops::Deref for Pointer<T> {
type Target = *mut T;
#[inline]
fn deref(&self) -> &*mut T {
&self.ptr
}
}
#[cfg(test)]
mod test {
use super::*;
......
//! Rust allocation symbols.
// TODO: Remove this, this is a false positive.
#![allow(private_no_mangle_fns)]
use allocator;
/// Rust allocation symbol.
#[linkage = "external"]
#[no_mangle]
#[inline]
pub extern fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
allocator::alloc(size, align)
}
/// Rust deallocation symbol.
#[linkage = "external"]
#[no_mangle]
#[inline]
pub unsafe extern fn __rust_deallocate(ptr: *mut u8, size: usize, _align: usize) {
allocator::free(ptr, size);
}
/// Rust reallocation symbol.
#[linkage = "external"]
#[no_mangle]
#[inline]
pub unsafe extern fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 {
allocator::realloc(ptr, old_size, size, align)
}
/// Rust reallocation inplace symbol.
#[linkage = "external"]
#[no_mangle]
#[inline]
pub unsafe extern fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, _align: usize) -> usize {
if allocator::realloc_inplace(ptr, old_size, size).is_ok() {
size
} else {
old_size
}
}
/// Get the usable size of the some number of bytes of allocated memory.
#[linkage = "external"]
#[no_mangle]
#[inline]
pub extern fn __rust_usable_size(size: usize, _align: usize) -> usize {
// Yay! It matches exactly.
size
}
......@@ -29,7 +29,7 @@ impl<T: 'static> Key<T> {
/// Having a reference newtype would be unsound, due to the ability to leak a reference to
/// another thread.
#[inline]
pub fn with<F, R>(&'static self, f: F) -> R
pub fn with<F, R>(&self, f: F) -> R
where F: FnOnce(&T) -> R {
// Logging.
log!(INTERNAL, "Accessing TLS variable.");
......@@ -42,7 +42,7 @@ impl<T: 'static> Key<T> {
/// Note that this has to be registered for every thread, it is needed for.
// TODO: Make this automatic on `Drop`.
#[inline]
pub fn register_thread_destructor(&'static self, dtor: extern fn(&T)) {
pub fn register_thread_destructor(&self, dtor: extern fn(&T)) {
// Logging.
log!(INTERNAL, "Registering thread destructor.");
......
......@@ -68,7 +68,7 @@ impl<T: Leak> Vec<T> {
// Due to the invariants of `Block`, this copy is safe (the pointer is valid and
// unaliased).
ptr::copy_nonoverlapping(*old.ptr, *self.ptr, old.len);
ptr::copy_nonoverlapping(old.ptr.get(), self.ptr.get(), old.len);
}
Block::from(old)
......@@ -95,7 +95,7 @@ impl<T: Leak> Vec<T> {
// By the invariants of this type (the size is bounded by the address space), this
// conversion isn't overflowing.
ptr::write((*self.ptr).offset(self.len as isize), elem);
ptr::write((self.ptr.get()).offset(self.len as isize), elem);
}
// Increment the length.
......@@ -193,7 +193,7 @@ impl<T: Leak> ops::Deref for Vec<T> {
// LAST AUDIT: 2016-08-21 (Ticki).
// The invariants maintains safety.
slice::from_raw_parts(*self.ptr as *const T, self.len)
slice::from_raw_parts(self.ptr.get() as *const T, self.len)
}
}
}
......@@ -205,7 +205,7 @@ impl<T: Leak> ops::DerefMut for Vec<T> {
// LAST AUDIT: 2016-08-21 (Ticki).
// The invariants maintains safety.
slice::from_raw_parts_mut(*self.ptr as *mut T, self.len)
slice::from_raw_parts_mut(self.ptr.get() as *mut T, self.len)
}
}
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment