Skip to content
This repository was archived by the owner on Nov 27, 2020. It is now read-only.

Make trait functions immutable #11

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 20 additions & 24 deletions src/alloc/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,33 +148,29 @@ pub trait BuildAllocRef: Sized {
/// * `layout` must *fit* that block of memory
/// * the alignment of the `layout` must match the alignment used to allocate that block of
/// memory
unsafe fn build_alloc_ref(
&mut self,
ptr: NonNull<u8>,
layout: Option<NonZeroLayout>,
) -> Self::Ref;
unsafe fn build_alloc_ref(&self, ptr: NonNull<u8>, layout: Option<NonZeroLayout>) -> Self::Ref;
}

pub trait DeallocRef: Sized {
type BuildAlloc: BuildAllocRef<Ref = Self>;

fn get_build_alloc(&mut self) -> Self::BuildAlloc;
fn get_build_alloc(&self) -> Self::BuildAlloc;

/// # Safety
///
/// * `ptr` must denote a block of memory currently allocated via this allocator
/// * `layout` must *fit* that block of memory
/// * the alignment of the `layout` must match the alignment used to allocate that block of
/// memory
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout);
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: NonZeroLayout);
}

pub trait AllocRef: DeallocRef {
type Error;

fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error>;
fn alloc(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error>;

fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc_zeroed(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
let size = layout.size();
let p = self.alloc(layout)?;
unsafe {
Expand All @@ -193,7 +189,7 @@ pub trait AllocRef: DeallocRef {
/// * `layout` must *fit* the `ptr` (see above); note the `new_size` argument need not fit it
/// * `new_size` must not be less than `layout.size()`
unsafe fn grow_in_place(
&mut self,
&self,
ptr: NonNull<u8>,
layout: NonZeroLayout,
new_size: NonZeroUsize,
Expand All @@ -212,7 +208,7 @@ pub trait AllocRef: DeallocRef {
/// * `layout` must *fit* the `ptr` (see above); note the `new_size` argument need not fit it
/// * `new_size` must not be greater than `layout.size()` (and must be greater than zero)
unsafe fn shrink_in_place(
&mut self,
&self,
ptr: NonNull<u8>,
layout: NonZeroLayout,
new_size: NonZeroUsize,
Expand Down Expand Up @@ -251,7 +247,7 @@ pub trait ReallocRef: AllocRef {
/// implement this trait atop an underlying native allocation
/// library that aborts on memory exhaustion.)
unsafe fn realloc(
&mut self,
&self,
ptr: NonNull<u8>,
old_layout: NonZeroLayout,
new_layout: NonZeroLayout,
Expand Down Expand Up @@ -297,7 +293,7 @@ macro_rules! impl_buildalloc_alloc_zst {
type Ref = Self;

unsafe fn build_alloc_ref(
&mut self,
&self,
_ptr: NonNull<u8>,
_layout: Option<NonZeroLayout>,
) -> Self::Ref {
Expand All @@ -314,11 +310,11 @@ impl_buildalloc_alloc_zst!(System);
impl DeallocRef for Global {
type BuildAlloc = Self;

fn get_build_alloc(&mut self) -> Self::BuildAlloc {
fn get_build_alloc(&self) -> Self::BuildAlloc {
Self
}

unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout) {
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: NonZeroLayout) {
#[allow(deprecated)]
dealloc(ptr.as_ptr(), layout.into())
}
Expand All @@ -327,14 +323,14 @@ impl DeallocRef for Global {
impl AllocRef for Global {
type Error = AllocErr;

fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
#[allow(deprecated)]
unsafe {
NonNull::new(alloc(layout.into())).ok_or(AllocErr)
}
}

fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc_zeroed(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
#[allow(deprecated)]
unsafe {
NonNull::new(alloc_zeroed(layout.into())).ok_or(AllocErr)
Expand All @@ -345,7 +341,7 @@ impl AllocRef for Global {
impl ReallocRef for Global {
// FIXME: Remove `else` branch. This is needed, as std provides old method.
unsafe fn realloc(
&mut self,
&self,
ptr: NonNull<u8>,
old_layout: NonZeroLayout,
new_layout: NonZeroLayout,
Expand All @@ -369,11 +365,11 @@ impl ReallocRef for Global {
impl DeallocRef for System {
type BuildAlloc = Self;

fn get_build_alloc(&mut self) -> Self::BuildAlloc {
fn get_build_alloc(&self) -> Self::BuildAlloc {
Self
}

unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout) {
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: NonZeroLayout) {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout.into())
}
}
Expand All @@ -382,11 +378,11 @@ impl DeallocRef for System {
impl AllocRef for System {
type Error = AllocErr;

fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
unsafe { NonNull::new(GlobalAlloc::alloc(self, layout.into())).ok_or(AllocErr) }
}

fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc_zeroed(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
unsafe { NonNull::new(GlobalAlloc::alloc_zeroed(self, layout.into())).ok_or(AllocErr) }
}
}
Expand All @@ -395,7 +391,7 @@ impl AllocRef for System {
impl ReallocRef for System {
// FIXME: Remove `else` branch. This is needed, as std provides old method.
unsafe fn realloc(
&mut self,
&self,
ptr: NonNull<u8>,
old_layout: NonZeroLayout,
new_layout: NonZeroLayout,
Expand All @@ -417,7 +413,7 @@ impl ReallocRef for System {

#[inline]
unsafe fn alloc_copy_dealloc<A: ReallocRef>(
alloc: &mut A,
alloc: &A,
ptr: NonNull<u8>,
old_layout: NonZeroLayout,
new_layout: NonZeroLayout,
Expand Down
24 changes: 14 additions & 10 deletions src/boxed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ impl<T, A: AllocRef> Box<T, A> {
/// # #[allow(unused_variables)]
/// let five = Box::new_in(5, Global);
/// ```
#[allow(clippy::inline_always)]
#[allow(clippy::inline_always, clippy::needless_pass_by_value)]
#[inline(always)]
pub fn new_in(x: T, a: A) -> Self {
unsafe { Self::try_new_in(x, a).unwrap_unchecked() }
Expand All @@ -201,7 +201,8 @@ impl<T, A: AllocRef> Box<T, A> {
/// let five = Box::try_new_in(5, Global)?;
/// # Ok::<_, alloc_wg::alloc::AllocErr>(())
/// ```
pub fn try_new_in(x: T, mut a: A) -> Result<Self, A::Error> {
#[allow(clippy::needless_pass_by_value)]
pub fn try_new_in(x: T, a: A) -> Result<Self, A::Error> {
let ptr = if let Ok(layout) = NonZeroLayout::new::<T>() {
let ptr = a.alloc(layout)?.cast::<T>();
unsafe {
Expand Down Expand Up @@ -232,7 +233,7 @@ impl<T, A: AllocRef> Box<T, A> {
///
/// assert_eq!(*five, 5)
/// ```
#[allow(clippy::inline_always)]
#[allow(clippy::inline_always, clippy::needless_pass_by_value)]
#[inline(always)]
pub fn new_uninit_in(a: A) -> Box<mem::MaybeUninit<T>, A> {
unsafe { Self::try_new_uninit_in(a).unwrap_unchecked() }
Expand All @@ -257,7 +258,8 @@ impl<T, A: AllocRef> Box<T, A> {
/// assert_eq!(*five, 5);
/// # Ok::<_, alloc_wg::alloc::AllocErr>(())
/// ```
pub fn try_new_uninit_in(mut a: A) -> Result<Box<mem::MaybeUninit<T>, A>, A::Error> {
#[allow(clippy::needless_pass_by_value)]
pub fn try_new_uninit_in(a: A) -> Result<Box<mem::MaybeUninit<T>, A>, A::Error> {
let ptr = if let Ok(layout) = NonZeroLayout::new::<T>() {
let ptr: NonNull<mem::MaybeUninit<T>> = a.alloc(layout)?.cast();
ptr
Expand All @@ -269,14 +271,15 @@ impl<T, A: AllocRef> Box<T, A> {

/// Constructs a new `Pin<Box<T, A>>` with the specified allocator. If `T` does not implement
/// `Unpin`, then `x` will be pinned in memory and unable to be moved.
#[allow(clippy::inline_always)]
#[allow(clippy::inline_always, clippy::needless_pass_by_value)]
#[inline(always)]
pub fn pin_in(x: T, a: A) -> Pin<Self> {
unsafe { Self::try_pin_in(x, a).unwrap_unchecked() }
}

/// Constructs a new `Pin<Box<T, A>>` with the specified allocator. If `T` does not implement
/// `Unpin`, then `x` will be pinned in memory and unable to be moved.
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn try_pin_in(x: T, a: A) -> Result<Pin<Self>, A::Error> {
Self::try_new_in(x, a).map(Pin::from)
Expand Down Expand Up @@ -335,7 +338,7 @@ impl<T, A: AllocRef> Box<[T], A> {
///
/// assert_eq!(*values, [1, 2, 3]);
/// ```
#[allow(clippy::inline_always)]
#[allow(clippy::inline_always, clippy::needless_pass_by_value)]
#[inline(always)]
pub fn new_uninit_slice_in(len: usize, a: A) -> Box<[mem::MaybeUninit<T>], A> {
unsafe { Self::try_new_uninit_slice_in(len, a).unwrap_unchecked() }
Expand Down Expand Up @@ -363,9 +366,10 @@ impl<T, A: AllocRef> Box<[T], A> {
/// assert_eq!(*values, [1, 2, 3]);
/// # Ok::<_, alloc_wg::collections::CollectionAllocErr<Global>>(())
/// ```
#[allow(clippy::needless_pass_by_value)]
pub fn try_new_uninit_slice_in(
len: usize,
mut a: A,
a: A,
) -> Result<Box<[mem::MaybeUninit<T>], A>, CollectionAllocErr<A>> {
let ptr = if mem::size_of::<T>() == 0 || len == 0 {
NonNull::dangling()
Expand Down Expand Up @@ -732,7 +736,7 @@ fn drop_box<T: ?Sized, A: DeallocRef>(boxed: &mut Box<T, A>) {
unsafe {
let ptr = boxed.ptr;
ptr::drop_in_place(ptr.as_ptr());
if let (mut alloc, Some(layout)) = boxed.alloc_ref() {
if let (alloc, Some(layout)) = boxed.alloc_ref() {
alloc.dealloc(ptr.cast().into(), layout)
}
}
Expand Down Expand Up @@ -807,7 +811,7 @@ where
/// ```
#[inline]
fn clone(&self) -> Self {
let mut b = self.build_alloc().clone();
let b = self.build_alloc().clone();
let old_ptr = self.ptr.cast();
let old_layout = NonZeroLayout::for_value(self.as_ref());

Expand Down Expand Up @@ -1276,7 +1280,7 @@ where
A::BuildAlloc: Clone,
{
fn clone(&self) -> Self {
let mut b = self.build_alloc().clone();
let b = self.build_alloc().clone();
let old_ptr = self.ptr.cast();
let old_layout = NonZeroLayout::for_value(self.as_ref());
let a = unsafe { b.build_alloc_ref(old_ptr.into(), old_layout) };
Expand Down
24 changes: 13 additions & 11 deletions src/raw_vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,8 @@ impl<T> RawVec<T> {

impl<T, A: DeallocRef> RawVec<T, A> {
/// Like `new` but parameterized over the choice of allocator for the returned `RawVec`.
pub fn new_in(mut a: A) -> Self {
#[allow(clippy::needless_pass_by_value)]
pub fn new_in(a: A) -> Self {
let capacity = if mem::size_of::<T>() == 0 { !0 } else { 0 };
Self {
ptr: Unique::empty(),
Expand All @@ -161,6 +162,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
///
/// * if the requested capacity exceeds `usize::MAX` bytes.
/// * on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
#[allow(clippy::needless_pass_by_value)]
pub fn with_capacity_in(capacity: usize, a: A) -> Self
where
A: AllocRef,
Expand All @@ -181,6 +183,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
/// * `CapacityOverflow` if the requested capacity exceeds `usize::MAX` bytes.
/// * `CapacityOverflow` on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
/// * `AllocError` on OOM
#[allow(clippy::needless_pass_by_value)]
pub fn try_with_capacity_in(capacity: usize, a: A) -> Result<Self, CollectionAllocErr<A>>
where
A: AllocRef,
Expand All @@ -196,6 +199,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
///
/// * if the requested capacity exceeds `usize::MAX` bytes.
/// * on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
#[allow(clippy::needless_pass_by_value)]
pub fn with_capacity_zeroed_in(capacity: usize, a: A) -> Self
where
A: AllocRef,
Expand All @@ -216,18 +220,16 @@ impl<T, A: DeallocRef> RawVec<T, A> {
/// * `CapacityOverflow` if the requested capacity exceeds `usize::MAX` bytes.
/// * `CapacityOverflow` on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
/// * `AllocError` on OOM
#[allow(clippy::needless_pass_by_value)]
pub fn try_with_capacity_zeroed_in(capacity: usize, a: A) -> Result<Self, CollectionAllocErr<A>>
where
A: AllocRef,
{
Self::allocate_in(capacity, true, a)
}

fn allocate_in(
capacity: usize,
zeroed: bool,
mut alloc: A,
) -> Result<Self, CollectionAllocErr<A>>
#[allow(clippy::needless_pass_by_value)]
fn allocate_in(capacity: usize, zeroed: bool, alloc: A) -> Result<Self, CollectionAllocErr<A>>
where
A: AllocRef,
{
Expand Down Expand Up @@ -443,7 +445,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
return Err(CollectionAllocErr::CapacityOverflow);
}

let (mut alloc, old_layout) = self.alloc_ref();
let (alloc, old_layout) = self.alloc_ref();
let (new_cap, ptr) = if let Some(old_layout) = old_layout {
// Since we guarantee that we never allocate more than
// `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as
Expand Down Expand Up @@ -524,7 +526,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
return Err(CapacityOverflow);
}

let (mut alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
let (alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
(alloc, layout)
} else {
return Ok(false); // nothing to double
Expand Down Expand Up @@ -701,7 +703,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
return Ok(false);
}

let (mut alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
let (alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
(alloc, layout)
} else {
return Ok(false); // nothing to double
Expand Down Expand Up @@ -846,7 +848,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {

let _ = alloc_guard(new_layout.size().get(), new_layout.align().get())?;

let (mut alloc, old_layout) = self.alloc_ref();
let (alloc, old_layout) = self.alloc_ref();
let result = if let Some(layout) = old_layout {
unsafe { alloc.realloc(self.ptr.cast().into(), layout, new_layout) }
} else {
Expand Down Expand Up @@ -888,7 +890,7 @@ enum ReserveStrategy {
impl<T, A: DeallocRef> RawVec<T, A> {
/// Frees the memory owned by the `RawVec` *without* trying to Drop its contents.
pub fn dealloc_buffer(&mut self) {
if let (mut alloc, Some(layout)) = self.alloc_ref() {
if let (alloc, Some(layout)) = self.alloc_ref() {
unsafe { alloc.dealloc(self.ptr.cast().into(), layout) }
}
}
Expand Down
Loading