diff --git a/Cargo.toml b/Cargo.toml index d90e58f..b5a1a64 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,6 +13,7 @@ readme = "README.md" documentation = "https://docs.rs/smallvec/" [features] +allocator_api = [] std = [] specialization = [] may_dangle = [] diff --git a/src/lib.rs b/src/lib.rs index 42a60ef..fd6d0c4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -59,6 +59,7 @@ #![no_std] #![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(feature = "allocator_api", feature(allocator_api))] #![cfg_attr(feature = "specialization", allow(incomplete_features))] #![cfg_attr(feature = "specialization", feature(specialization, trusted_len))] #![cfg_attr(feature = "may_dangle", feature(dropck_eyepatch))] @@ -76,6 +77,9 @@ use alloc::boxed::Box; use alloc::vec; use alloc::vec::Vec; +#[cfg(feature = "allocator_api")] +use alloc::alloc::{Global, Allocator}; + use alloc::alloc::Layout; use core::borrow::Borrow; use core::borrow::BorrowMut; @@ -104,6 +108,39 @@ use serde::{ #[cfg(feature = "std")] use std::io; +/// A macro that conditionally discards the last type parameter of its input depending on the "allocator_api" feature. +/// This is used to control the presence of the allocator parameter. +#[cfg(not(feature = "allocator_api"))] +macro_rules! alloc_param { + // While a more generic version of this macro is likely possible, it isn't needed for the puporses of this crate. + ($type:ident<$arg0:tt>) => { + $type + }; + ($type:ident<$arg0:tt, $arg1:tt $(,)?>) => { + $type<$arg0> + }; + ($type:ident<$arg0:tt, $arg1:tt, $arg2:tt $(,)?>) => { + $type<$arg0, $arg1> + }; + ($type:ident<$arg0:tt, $arg1:tt, $arg2:tt, $arg3:tt $(,)?>) => { + $type<$arg0, $arg1, $arg2> + }; + ($type:ident<$arg0:tt, $path:ident::$arg1:tt, $arg2:tt, $arg3:tt $(,)?>) => { + $type<$arg0, $path::$arg1, $arg2> + }; + ($type:ident<$arg0:tt, $arg1:tt, $arg2:tt, $arg3:tt, $arg4:tt $(,)?>) => { + $type<$arg0, $arg1, $arg2, $arg3> + }; +} + +#[cfg(feature = "allocator_api")] +macro_rules! alloc_param { + // This will return the tokens without change. + ($($args:tt)+) => { + $($args)* + }; +} + /// Error type for APIs with fallible heap allocation #[derive(Debug)] pub enum CollectionAllocErr { @@ -120,22 +157,10 @@ impl core::fmt::Display for CollectionAllocErr { write!(f, "Allocation error: {:?}", self) } } - impl core::error::Error for CollectionAllocErr {} -/// Either a stack array with `length <= N` or a heap array -/// whose pointer and capacity are stored here. -/// -/// We store a `NonNull` instead of a `*mut T`, so that -/// niche-optimization can be performed and the type is covariant -/// with respect to `T`. -#[repr(C)] -pub union RawSmallVec { - inline: ManuallyDrop>, - heap: (NonNull, usize), -} - #[inline] +#[track_caller] fn infallible(result: Result) -> T { match result { Ok(x) => x, @@ -179,22 +204,217 @@ where core::ops::Range { start, end } } -impl RawSmallVec { +#[inline(always)] +const fn is_zst() -> bool { + size_of::() == 0 +} + +#[inline(always)] +const fn inline_size() -> usize { + if is_zst::() { + usize::MAX + } else { + N + } +} + +/// Creates a [`Layout`] values for arrays of length `n` +/// for a given type without checking preconditions. +/// +/// # Safety +/// +/// The caller must ensure that an array of length `n` results +/// in a valid layout. +#[inline(always)] +const unsafe fn array_layout_unchecked(n: usize) -> Layout { + // SAFETY: The caller ensures that the an array of length `n` is possible + // which means that the multiplication can't overflow. + // The value returned by `align_of` will fulfill the safety conditions for + // `Layout::from_size_align_unchecked`. + unsafe { + Layout::from_size_align_unchecked(size_of::().unchecked_mul(n), align_of::()) + } +} + +/// A ZST from which memory allocation methods are exposed. +/// The methods correspond to the `allocator_api` Rust nightly feature. +#[derive(Clone, Copy)] +#[cfg(not(feature = "allocator_api"))] +struct A; + +#[cfg(not(feature = "allocator_api"))] +impl A { #[inline] - const fn is_zst() -> bool { - size_of::() == 0 + fn allocate(&self, layout: Layout) -> Result, CollectionAllocErr> { + let err = CollectionAllocErr::AllocErr { layout }; + if layout.size() == 0 { + Err(err) + } else { + // SAFETY: The size of the layout has been checked in the comparison above. + unsafe { + let ptr = alloc::alloc::alloc(layout); + let ptr = NonNull::new(ptr).ok_or(err)?; + Ok(NonNull::slice_from_raw_parts(ptr, layout.size())) + } + } } + #[inline(always)] + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + if layout.size() > 0 { + // SAFETY: Safety conditions are identical. + unsafe { + alloc::alloc::dealloc(ptr.as_ptr(), layout); + } + } + } + + /// # Safety + /// + /// The new layout must have a size that is larger than or equal to + /// the size of the old layout. + #[inline(always)] + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, CollectionAllocErr> { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + let old_size = old_layout.size(); + let old_align = old_layout.align(); + let new_size = new_layout.size(); + let new_align = new_layout.align(); + + // SAFETY: Safety condition is upheld by the caller. + unsafe { + core::hint::assert_unchecked(new_size >= old_size); + } + + let err = CollectionAllocErr::AllocErr { layout: new_layout }; + + if old_size == 0 { + self.allocate(new_layout) + } else if old_align == new_align { + // SAFETY: The caller ensures that the layout is valid. + // The pointers returned by the allocation methods are valid up to `new_size`. + unsafe { + let new_ptr = alloc::alloc::realloc(ptr.as_ptr(), old_layout, new_size); + if new_ptr.is_null() { + return Err(err); + } + let new_ptr = NonNull::new(new_ptr).ok_or(err)?; + Ok(NonNull::slice_from_raw_parts(new_ptr, new_size)) + } + } else { + // SAFETY: The caller ensures that the layout is valid. + // The pointers returned by the allocation methods are valid up to `new_size`. + unsafe { + let new_ptr = self.allocate(new_layout)?; + let raw = new_ptr.as_ptr().cast::(); + copy_nonoverlapping(ptr.as_ptr(), raw, old_size); + Ok(new_ptr) + } + } + } + + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, CollectionAllocErr> { + debug_assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" + ); + + let old_size = old_layout.size(); + let old_align = old_layout.align(); + let new_size = new_layout.size(); + let new_align = new_layout.align(); + + // SAFETY: The safety condition is upheld by the caller. + unsafe { + core::hint::assert_unchecked(new_size <= old_size); + } + + let err = CollectionAllocErr::AllocErr { layout: new_layout }; + + if new_size == 0 { + self.deallocate(ptr, old_layout); + // FIXME: Replace with `Layout::dangling` once the latter is stable. + // This version should work for now wrt. provenance. + let data = core::mem::transmute(new_align); + Ok(NonNull::slice_from_raw_parts(data, 0)) + } else if old_align == new_align { + // SAFETY: The caller ensures that the layout is valid. + // The pointers returned by the allocation methods are valid up to `new_size`. + unsafe { + let new_ptr = alloc::alloc::realloc(ptr.as_ptr(), old_layout, new_size); + if new_ptr.is_null() { + return Err(err); + } + let new_ptr = NonNull::new(new_ptr).ok_or(err)?; + Ok(NonNull::slice_from_raw_parts(new_ptr, new_size)) + } + } else { + // SAFETY: The caller ensures that the layout is valid. + // The pointers returned by the allocation methods are valid up to `new_size`. + unsafe { + let new_ptr = self.allocate(new_layout)?; + let raw = new_ptr.as_ptr().cast::(); + copy_nonoverlapping(ptr.as_ptr(), raw, new_size); + Ok(new_ptr) + } + } + } +} + +#[cfg(not(feature = "allocator_api"))] +const GLOBAL: A = A; + +#[cfg(feature = "allocator_api")] +const GLOBAL: Global = Global; + +struct DropDealloc<'a, #[cfg(feature = "allocator_api")] A: Allocator> { + ptr: NonNull, + layout: Layout, + allocator: &'a A, +} +impl<#[cfg(feature = "allocator_api")] A: Allocator> Drop for alloc_param!(DropDealloc<'_, A>) { #[inline] - const fn new() -> Self { - Self::new_inline(MaybeUninit::uninit()) + fn drop(&mut self) { + unsafe { + self.allocator.deallocate(self.ptr, self.layout); + } } +} + +/// Either a stack array with `length <= N` or a heap array +/// whose pointer and capacity are stored here. +/// +/// We store a `NonNull` instead of a `*mut T`, so that +/// niche-optimization can be performed and the type is covariant +/// with respect to `T`. +#[repr(C)] +pub union RawSmallVecUnion { + inline: ManuallyDrop>, + heap: (NonNull, usize), +} + +impl RawSmallVecUnion { #[inline] const fn new_inline(inline: MaybeUninit<[T; N]>) -> Self { Self { inline: ManuallyDrop::new(inline), } } + #[inline] const fn new_heap(ptr: NonNull, capacity: usize) -> Self { Self { @@ -218,73 +438,328 @@ impl RawSmallVec { /// # Safety /// - /// The vector must be on the heap + /// The vector must be on the heap. #[inline] const unsafe fn as_ptr_heap(&self) -> *const T { - self.heap.0.as_ptr() + // SAFETY: Safety conditions are identical. + unsafe { + self.heap.0.as_ptr() + } } /// # Safety /// - /// The vector must be on the heap + /// The vector must be on the heap. #[inline] const unsafe fn as_mut_ptr_heap(&mut self) -> *mut T { - self.heap.0.as_ptr() + // SAFETY: Safety conditions are identical. + unsafe { + self.heap.0.as_ptr() + } + } +} + +pub struct RawSmallVec { + pub inner: RawSmallVecUnion, + allocator: A, +} + +impl alloc_param!(RawSmallVec) { + #[inline] + pub const fn new() -> Self { + Self::new_in(GLOBAL) + } + + #[inline] + pub const fn new_inline(inline: MaybeUninit<[T; N]>) -> Self { + Self::new_inline_in(inline, GLOBAL) + } + + #[inline] + pub const fn new_heap(ptr: NonNull, capacity: usize) -> Self { + Self::new_heap_in(ptr, capacity, GLOBAL) + } + + #[inline] + #[track_caller] + pub fn with_capacity(capacity: usize) -> Self { + Self::with_capacity_in(capacity, GLOBAL) + } + + #[inline] + pub fn try_with_capacity(capacity: usize) -> Result { + Self::try_with_capacity_in(capacity, GLOBAL) + } +} + +impl alloc_param!(RawSmallVec) { + /// Turn a generic allocation error returned by a parametric allocator into a [`CollectionAllocErr`]. + #[cfg(feature = "allocator_api")] + #[inline(always)] + fn handle_alloc_error(r: Result, layout: Layout) -> Result { + r.map_err(|_| CollectionAllocErr::AllocErr { layout }) + } + + /// A fallback used whenever the "allocator_api" is disabled. All errors are already [`CollectionAllocErr`] values, + /// so nothing needs to be done. + #[cfg(not(feature = "allocator_api"))] + #[inline(always)] + fn handle_alloc_error(r: Result, _layout: Layout) -> Result { + r + } + + #[inline] + const fn new_in(allocator: A) -> Self { + Self::new_inline_in(MaybeUninit::uninit(), allocator) } + #[inline] + const fn new_inline_in(inline: MaybeUninit<[T; N]>, allocator: A) -> Self { + Self { + inner: RawSmallVecUnion::new_inline(inline), + allocator, + } + } + + #[inline] + const fn new_heap_in(ptr: NonNull, capacity: usize, allocator: A) -> Self { + Self { + inner: RawSmallVecUnion::new_heap(ptr, capacity), + allocator, + } + } + + #[inline] + #[track_caller] + fn with_capacity_in(capacity: usize, allocator: A) -> Self { + infallible(Self::try_with_capacity_in(capacity, allocator)) + } + + #[inline] + fn try_with_capacity_in(capacity: usize, allocator: A) -> Result { + if capacity <= const { inline_size::() } { + Ok(Self::new_inline_in(MaybeUninit::uninit(), allocator)) + } else { + let layout = Layout::array::(capacity).map_err(|_| CollectionAllocErr::CapacityOverflow)?; + let ptr = Self::handle_alloc_error(allocator.allocate(layout), layout)?; + let inner = RawSmallVecUnion { + heap: (ptr.cast(), capacity) + }; + Ok(Self { + inner, + allocator, + }) + } + } + + /// Gets a pointer to the contents of the vector, under the assumption + /// that the content is stored inline. + #[inline] + pub const fn as_ptr_inline(&self) -> *const T { + self.inner.as_ptr_inline() + } + + /// Gets a pointer to the contents of the vector, under the assumption + /// that the content is stored inline. + #[inline] + pub const fn as_mut_ptr_inline(&mut self) -> *mut T { + self.inner.as_mut_ptr_inline() + } + + /// Gets a pointer to the contents of the vector, under the assumption + /// that the content is stored on the heap. + /// /// # Safety /// - /// `new_capacity` must be non zero, and greater or equal to the length. - /// T must not be a ZST. - unsafe fn try_grow_raw( + /// The vector must be on the heap. + #[inline] + pub const unsafe fn as_ptr_heap(&self) -> *const T { + // SAFETY: The safety requirements are identical. + unsafe { + self.inner.as_ptr_heap() + } + } + + /// Gets a pointer to the contents of the vector, under the assumption + /// that the content is stored on the heap. + /// + /// # Safety + /// + /// The vector must be on the heap. + #[inline] + pub const unsafe fn as_mut_ptr_heap(&mut self) -> *mut T { + // SAFETY: The safety requirements are identical. + unsafe { + self.inner.as_mut_ptr_heap() + } + } + + /// Returns `true` if the elements are stored on the heap, and `false` otherwise. + /// + /// # Safety + /// + /// The way elements are stored in `self` must correspond to the tag in `len`. + unsafe fn try_reserve( &mut self, len: TaggedLen, - new_capacity: usize, - ) -> Result<(), CollectionAllocErr> { - use alloc::alloc::{alloc, realloc}; - debug_assert!(!Self::is_zst()); - debug_assert!(new_capacity > 0); - debug_assert!(new_capacity >= len.value(Self::is_zst())); - - let was_on_heap = len.on_heap(Self::is_zst()); - let ptr = if was_on_heap { - self.as_mut_ptr_heap() + additional: usize, + ) -> Result { + debug_assert!(!is_zst::()); + + let on_heap = len.on_heap(is_zst::()); + let len = len.value(is_zst::()); + + if additional == 0 { + return Ok(on_heap); + } + + let new_capacity = len.checked_add(additional).ok_or(CollectionAllocErr::CapacityOverflow)?; + + if on_heap { + // SAFETY: The caller ensures that the tag corresponds to the + // way in which data is stored. + let (old_ptr, old_capacity) = unsafe { self.inner.heap }; + + // Nothing needs to be done if the capacity is already sufficient. + if old_capacity >= new_capacity { + return Ok(true); + } + + // Ensure capacity growth is exponential. + let new_capacity = new_capacity.max(2 * old_capacity); + + // SAFETY: The stored capacity corresponds always to a valid layout. + let old_layout = unsafe { array_layout_unchecked::(old_capacity) }; + + let new_layout = Layout::array::(new_capacity).map_err(|_| CollectionAllocErr::CapacityOverflow)?; + let ptr = Self::handle_alloc_error(self.allocator.grow(old_ptr.cast(), old_layout, new_layout), new_layout)?; + + self.inner = RawSmallVecUnion::new_heap(ptr.cast(), new_capacity); + Ok(true) + } else if new_capacity > inline_size::() { + // Ensure capacity growth is exponential. + let new_capacity = (2 * N).max(new_capacity); + + let layout = Layout::array::(new_capacity).map_err(|_| CollectionAllocErr::CapacityOverflow)?; + let ptr = Self::handle_alloc_error(self.allocator.allocate(layout), layout)?; + + // SAFETY: The pointer returned by `allocate` is valid and its own memory region. + unsafe { + copy_nonoverlapping(self.as_mut_ptr_inline(), ptr.cast().as_ptr(), len); + } + + self.inner = RawSmallVecUnion::new_heap(ptr.cast(), new_capacity); + Ok(true) } else { - self.as_mut_ptr_inline() - }; - let len = len.value(Self::is_zst()); + Ok(on_heap) + } + } + + /// Returns `true` if the elements are stored on the heap, and `false` otherwise. + /// + /// # Safety + /// + /// The way elements are stored in `self` must correspond to the tag in `len`. + unsafe fn try_reserve_exact( + &mut self, + len: TaggedLen, + additional: usize, + ) -> Result { + debug_assert!(!is_zst::()); + + let on_heap = len.on_heap(is_zst::()); + let len = len.value(is_zst::()); + + if additional == 0 { + return Ok(on_heap); + } + + let new_capacity = len.checked_add(additional).ok_or(CollectionAllocErr::CapacityOverflow)?; + + if on_heap { + // SAFETY: The caller ensures that the tag corresponds to the + // way in which data is stored. + let (old_ptr, old_capacity) = unsafe { self.inner.heap }; + + // Nothing needs to be done if the capacity is already sufficient. + if old_capacity >= new_capacity { + return Ok(true); + } + + // SAFETY: The stored capacity corresponds always to a valid layout. + let old_layout = unsafe { array_layout_unchecked::(old_capacity) }; + + let new_layout = Layout::array::(new_capacity).map_err(|_| CollectionAllocErr::CapacityOverflow)?; + let ptr = Self::handle_alloc_error(self.allocator.grow(old_ptr.cast(), old_layout, new_layout), new_layout)?; + + self.inner = RawSmallVecUnion::new_heap(ptr.cast(), new_capacity); - let new_layout = - Layout::array::(new_capacity).map_err(|_| CollectionAllocErr::CapacityOverflow)?; - if new_layout.size() > isize::MAX as usize { - return Err(CollectionAllocErr::CapacityOverflow); + Ok(true) + } else if new_capacity > inline_size::() { + let layout = Layout::array::(new_capacity).map_err(|_| CollectionAllocErr::CapacityOverflow)?; + let ptr = Self::handle_alloc_error(self.allocator.allocate(layout), layout)?; + + // SAFETY: The pointer returned by `allocate` is valid and its own memory region. + unsafe { + copy_nonoverlapping(self.as_mut_ptr_inline(), ptr.cast().as_ptr(), len); + } + + self.inner = RawSmallVecUnion::new_heap(ptr.cast(), new_capacity); + + Ok(true) + } else { + Ok(on_heap) } + } + + /// Returns `true` if the elements are still stored on the heap, and `false` otherwise. + /// + /// # Safety + /// + /// The way elements are stored in `self` must correspond to `on_heap`. + unsafe fn shrink_to_fit( + &mut self, + on_heap: bool, + cap: usize, + ) -> Result { + debug_assert!(!is_zst::()); + + if on_heap { + // SAFETY: The caller ensures that the tag corresponds to the + // way in which data is stored. + let (old_ptr, old_capacity) = unsafe { self.inner.heap }; - let new_ptr = if len == 0 || !was_on_heap { - // get a fresh allocation - let new_ptr = alloc(new_layout) as *mut T; // `new_layout` has nonzero size. - let new_ptr = - NonNull::new(new_ptr).ok_or(CollectionAllocErr::AllocErr { layout: new_layout })?; - copy_nonoverlapping(ptr, new_ptr.as_ptr(), len); - new_ptr + // SAFETY: The stored capacity corresponds always to a valid layout. + let layout = unsafe { array_layout_unchecked::(old_capacity) }; + + if cap <= N { + self.inner = RawSmallVecUnion::new_inline(MaybeUninit::uninit()); + + // SAFETY: The memory regions don't overlap because one pointer is recently created inline storage. + // By taking the minimum value of both capabilities, the copying will only touch valid memory. + unsafe { + let count = cap.min(old_capacity); + copy_nonoverlapping(old_ptr.cast().as_ptr(), self.as_mut_ptr_inline(), count); + } + + self.allocator.deallocate(old_ptr.cast(), layout); + + Ok(false) + } else if cap < old_capacity { + // SAFETY: The new capacity is smaller than the old capacity, + // and it is already possible to construct a valid layout with the old capacity. + let new_layout = unsafe { array_layout_unchecked::(cap) }; + + let ptr = Self::handle_alloc_error(self.allocator.shrink(old_ptr.cast(), layout, new_layout), new_layout)?; + self.inner = RawSmallVecUnion::new_heap(ptr.cast(), cap); + + Ok(true) + } else { + Ok(true) + } } else { - // use realloc - - // this can't overflow since we already constructed an equivalent layout during - // the previous allocation - let old_layout = - Layout::from_size_align_unchecked(self.heap.1 * size_of::(), align_of::()); - - // SAFETY: ptr was allocated with this allocator - // old_layout is the same as the layout used to allocate the previous memory block - // new_layout.size() is greater than zero - // does not overflow when rounded up to alignment. since it was constructed - // with Layout::array - let new_ptr = realloc(ptr as *mut u8, old_layout, new_layout.size()) as *mut T; - NonNull::new(new_ptr).ok_or(CollectionAllocErr::AllocErr { layout: new_layout })? - }; - *self = Self::new_heap(new_ptr, new_capacity); - Ok(()) + Ok(on_heap) + } } } @@ -332,14 +807,14 @@ impl TaggedLen { } #[repr(C)] -pub struct SmallVec { +pub struct SmallVec { len: TaggedLen, - raw: RawSmallVec, + raw: alloc_param!(RawSmallVec), _marker: PhantomData, } -unsafe impl Send for SmallVec {} -unsafe impl Sync for SmallVec {} +unsafe impl Send for alloc_param!(SmallVec) {} +unsafe impl Sync for alloc_param!(SmallVec) {} impl Default for SmallVec { #[inline] @@ -353,7 +828,7 @@ impl Default for SmallVec { /// Returned from [`SmallVec::drain`][1]. /// /// [1]: struct.SmallVec.html#method.drain -pub struct Drain<'a, T: 'a, const N: usize> { +pub struct Drain<'a, T: 'a, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> { // `vec` points to a valid object within its lifetime. // This is ensured by the fact that we're holding an iterator to its items. // @@ -364,10 +839,10 @@ pub struct Drain<'a, T: 'a, const N: usize> { tail_start: usize, tail_len: usize, iter: core::slice::Iter<'a, T>, - vec: core::ptr::NonNull>, + vec: core::ptr::NonNull)>, } -impl<'a, T: 'a, const N: usize> Iterator for Drain<'a, T, N> { +impl<'a, T: 'a, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> Iterator for alloc_param!(Drain<'a, T, N, A>) { type Item = T; #[inline] @@ -385,7 +860,7 @@ impl<'a, T: 'a, const N: usize> Iterator for Drain<'a, T, N> { } } -impl<'a, T: 'a, const N: usize> DoubleEndedIterator for Drain<'a, T, N> { +impl<'a, T: 'a, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> DoubleEndedIterator for alloc_param!(Drain<'a, T, N, A>) { #[inline] fn next_back(&mut self) -> Option { // SAFETY: see above @@ -395,21 +870,21 @@ impl<'a, T: 'a, const N: usize> DoubleEndedIterator for Drain<'a, T, N> { } } -impl ExactSizeIterator for Drain<'_, T, N> { +impl ExactSizeIterator for alloc_param!(Drain<'_, T, N, A>) { #[inline] fn len(&self) -> usize { self.iter.len() } } -impl core::iter::FusedIterator for Drain<'_, T, N> {} +impl core::iter::FusedIterator for alloc_param!(Drain<'_, T, N, A>) {} -impl<'a, T: 'a, const N: usize> Drop for Drain<'a, T, N> { +impl<'a, T: 'a, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> Drop for alloc_param!(Drain<'a, T, N, A>) { fn drop(&mut self) { /// Moves back the un-`Drain`ed elements to restore the original `Vec`. - struct DropGuard<'r, 'a, T, const N: usize>(&'r mut Drain<'a, T, N>); + struct DropGuard<'r, 'a, T, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator>(&'r mut alloc_param!(Drain<'a, T, N, A>)); - impl<'r, 'a, T, const N: usize> Drop for DropGuard<'r, 'a, T, N> { + impl<'r, 'a, T, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> Drop for alloc_param!(DropGuard<'r, 'a, T, N, A>) { fn drop(&mut self) { if self.0.tail_len > 0 { unsafe { @@ -434,7 +909,7 @@ impl<'a, T: 'a, const N: usize> Drop for Drain<'a, T, N> { let mut vec = self.vec; - if SmallVec::::is_zst() { + if is_zst::() { // ZSTs have no identity, so we don't need to move them around, we only need to drop the correct amount. // this can be achieved by manipulating the Vec length instead of moving values out from `iter`. unsafe { @@ -475,7 +950,7 @@ impl<'a, T: 'a, const N: usize> Drop for Drain<'a, T, N> { } } -impl Drain<'_, T, N> { +impl alloc_param!(Drain<'_, T, N, A>) { #[must_use] pub fn as_slice(&self) -> &[T] { self.iter.as_slice() @@ -532,11 +1007,11 @@ impl Drain<'_, T, N> { /// Returned from [`SmallVec::extract_if`][1]. /// /// [1]: struct.SmallVec.html#method.extract_if -pub struct ExtractIf<'a, T, const N: usize, F> +pub struct ExtractIf<'a, T, const N: usize, F, #[cfg(feature = "allocator_api")] A: Allocator> where F: FnMut(&mut T) -> bool, { - vec: &'a mut SmallVec, + vec: &'a mut alloc_param!(SmallVec), /// The index of the item that will be inspected by the next call to `next`. idx: usize, /// Elements at and beyond this point will be retained. Must be equal or smaller than `old_len`. @@ -550,7 +1025,7 @@ where } #[cfg(feature = "extract_if")] -impl core::fmt::Debug for ExtractIf<'_, T, N, F> +impl core::fmt::Debug for alloc_param!(ExtractIf<'_, T, N, F, A>) where F: FnMut(&mut T) -> bool, T: core::fmt::Debug, @@ -563,7 +1038,7 @@ where } #[cfg(feature = "extract_if")] -impl Iterator for ExtractIf<'_, T, N, F> +impl Iterator for alloc_param!(ExtractIf<'_, T, N, F, A>) where F: FnMut(&mut T) -> bool, { @@ -599,7 +1074,7 @@ where } #[cfg(feature = "extract_if")] -impl Drop for ExtractIf<'_, T, N, F> +impl Drop for alloc_param!(ExtractIf<'_, T, N, F, A>) where F: FnMut(&mut T) -> bool, { @@ -623,12 +1098,13 @@ where } } -pub struct Splice<'a, I: Iterator + 'a, const N: usize> { - drain: Drain<'a, I::Item, N>, +pub struct Splice<'a, I: Iterator + 'a, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> { + drain: alloc_param!(Drain<'a, I::Item, N, A>), + //drain: Drain<'a, I::Item, N>, replace_with: I, } -impl<'a, I, const N: usize> core::fmt::Debug for Splice<'a, I, N> +impl<'a, I, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> core::fmt::Debug for alloc_param!(Splice<'a, I, N, A>) where I: Debug + Iterator + 'a, ::Item: Debug, @@ -638,7 +1114,7 @@ where } } -impl Iterator for Splice<'_, I, N> { +impl Iterator for alloc_param!(Splice<'_, I, N, A>) { type Item = I::Item; fn next(&mut self) -> Option { @@ -650,15 +1126,15 @@ impl Iterator for Splice<'_, I, N> { } } -impl DoubleEndedIterator for Splice<'_, I, N> { +impl DoubleEndedIterator for alloc_param!(Splice<'_, I, N, A>) { fn next_back(&mut self) -> Option { self.drain.next_back() } } -impl ExactSizeIterator for Splice<'_, I, N> {} +impl ExactSizeIterator for alloc_param!(Splice<'_, I, N, A>) {} -impl Drop for Splice<'_, I, N> { +impl Drop for alloc_param!(Splice<'_, I, N, A>) { fn drop(&mut self) { self.drain.by_ref().for_each(drop); // At this point draining is done and the only remaining tasks are splicing @@ -708,13 +1184,13 @@ impl Drop for Splice<'_, I, N> { /// Returned from [`SmallVec::into_iter`][1]. /// /// [1]: struct.SmallVec.html#method.into_iter -pub struct IntoIter { +pub struct IntoIter { // # Safety // // `end` decides whether the data lives on the heap or not // // The members from begin..end are initialized - raw: RawSmallVec, + raw: alloc_param!(RawSmallVec), begin: usize, end: TaggedLen, _marker: PhantomData, @@ -722,10 +1198,10 @@ pub struct IntoIter { // SAFETY: IntoIter has unique ownership of its contents. Sending (or sharing) an `IntoIter` // is equivalent to sending (or sharing) a `SmallVec`. -unsafe impl Send for IntoIter where T: Send {} -unsafe impl Sync for IntoIter where T: Sync {} +unsafe impl Send for alloc_param!(IntoIter) where T: Send {} +unsafe impl Sync for alloc_param!(IntoIter) where T: Sync {} -impl IntoIter { +impl alloc_param!(IntoIter) { #[inline] const fn is_zst() -> bool { size_of::() == 0 @@ -777,9 +1253,15 @@ impl IntoIter { ) } } + + /// Returns a reference to the underlying allocator. + #[cfg(feature = "allocator_api")] + pub fn allocator(&self) -> &A { + &self.raw.allocator + } } -impl Iterator for IntoIter { +impl Iterator for alloc_param!(IntoIter) { type Item = T; #[inline] @@ -804,7 +1286,7 @@ impl Iterator for IntoIter { } } -impl DoubleEndedIterator for IntoIter { +impl DoubleEndedIterator for alloc_param!(IntoIter) { #[inline] fn next_back(&mut self) -> Option { let mut end = self.end.value(Self::is_zst()); @@ -823,10 +1305,26 @@ impl DoubleEndedIterator for IntoIter { } } } -impl ExactSizeIterator for IntoIter {} -impl core::iter::FusedIterator for IntoIter {} +impl ExactSizeIterator for alloc_param!(IntoIter) {} +impl core::iter::FusedIterator for alloc_param!(IntoIter) {} impl SmallVec { + /// A stub for `Self::new_in` that can be used whenever the "allocator_api" feature is disabled. + /// This makes it possible to develop some functions independent from the feature, + /// reduceing code complexity. + #[cfg(not(feature = "allocator_api"))] + const fn new_in(_alloc: A) -> Self { + const { Self::new() } + } + + /// A stub for `Self::with_capacity_in` that can be used whenever the "allocator_api" feature is disabled. + /// This makes it possible to develop some functions independent from the feature, + /// reduceing code complexity. + #[cfg(not(feature = "allocator_api"))] + fn with_capacity_in(capacity: usize, _alloc: A) -> Self { + Self::with_capacity(capacity) + } + #[inline] pub const fn new() -> SmallVec { Self { @@ -838,11 +1336,12 @@ impl SmallVec { #[inline] pub fn with_capacity(capacity: usize) -> Self { - let mut this = Self::new(); - if capacity > Self::inline_size() { - this.grow(capacity); + let on_heap = capacity > inline_size::(); + Self { + len: TaggedLen::new(0, on_heap, Self::is_zst()), + raw: RawSmallVec::with_capacity(capacity), + _marker: PhantomData, } - this } #[inline] @@ -924,52 +1423,162 @@ impl SmallVec { _marker: PhantomData, } } + + /// Creates a `SmallVec` directly from the raw components of another `SmallVec`. + /// + /// # Safety + /// + /// This is highly unsafe, due to the number of invariants that aren’t checked: + /// + /// - `ptr` needs to have been previously allocated via `SmallVec` from its spilled storage (at least, it’s highly likely to be incorrect if it wasn’t). + /// - `ptr`’s `A::Item` type needs to be the same size and alignment that it was allocated with + /// - `length` needs to be less than or equal to `capacity`. + /// - `capacity` needs to be the capacity that the pointer was allocated with. + /// + /// Violating these may cause problems like corrupting the allocator’s internal data structures. + /// + /// Additionally, `capacity` must be greater than the amount of inline storage `A` has; that is, the new `SmallVec` must need to spill over into heap allocated storage. This condition is asserted against. + /// + /// The ownership of `ptr` is effectively transferred to the `SmallVec` which may then deallocate, reallocate or change the contents of memory pointed to by the pointer at will. Ensure that nothing else uses the pointer after calling this function. + /// + /// # Examples + /// + /// ``` + /// use smallvec::{SmallVec, smallvec}; + /// + /// let mut v: SmallVec<_, 1> = smallvec![1, 2, 3]; + /// + /// // Pull out the important parts of `v`. + /// let p = v.as_mut_ptr(); + /// let len = v.len(); + /// let cap = v.capacity(); + /// let spilled = v.spilled(); + /// + /// unsafe { + /// // Forget all about `v`. The heap allocation that stored the + /// // three values won't be deallocated. + /// std::mem::forget(v); + /// + /// // Overwrite memory with [4, 5, 6]. + /// // + /// // This is only safe if `spilled` is true! Otherwise, we are + /// // writing into the old `SmallVec`'s inline storage on the + /// // stack. + /// assert!(spilled); + /// for i in 0..len { + /// std::ptr::write(p.add(i), 4 + i); + /// } + /// + /// // Put everything back together into a SmallVec with a different + /// // amount of inline storage, but which is still less than `cap`. + /// let rebuilt = SmallVec::<_, 2>::from_raw_parts(p, len, cap); + /// assert_eq!(&*rebuilt, &[4, 5, 6]); + /// } + /// ``` + #[inline] + pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> SmallVec { + assert!(!Self::is_zst()); + + // SAFETY: We require caller to provide same ptr as we alloc + // and we never alloc null pointer. + let ptr = unsafe { + debug_assert!(!ptr.is_null(), "Called `from_raw_parts` with null pointer."); + NonNull::new_unchecked(ptr) + }; + + SmallVec { + len: TaggedLen::new(length, true, is_zst::()), + raw: RawSmallVec::new_heap(ptr, capacity), + _marker: PhantomData, + } + } } -impl SmallVec { +#[cfg(feature = "allocator_api")] +impl SmallVec { + #[inline(always)] + pub const fn allocator(&self) -> &A { + &self.raw.allocator + } + #[inline] - const fn is_zst() -> bool { - size_of::() == 0 + pub const fn new_in(alloc: A) -> Self { + Self { + len: TaggedLen::new(0, false, Self::is_zst()), + raw: RawSmallVec::new_in(alloc), + _marker: PhantomData, + } } #[inline] - pub fn from_vec(vec: Vec) -> Self { - if vec.capacity() == 0 { - return Self::new(); + pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { + let on_heap = capacity > inline_size::(); + Self { + len: TaggedLen::new(0, on_heap, Self::is_zst()), + raw: RawSmallVec::with_capacity_in(capacity, alloc), + _marker: PhantomData, } + } - if Self::is_zst() { - // "Move" elements to stack buffer. They're ZST so we don't actually have to do - // anything. Just make sure they're not dropped. - // We don't wrap the vector in ManuallyDrop so that when it's dropped, the memory is - // deallocated, if it needs to be. - let mut vec = vec; - let len = vec.len(); - - // SAFETY: `0` is less than the vector's capacity. - // old_len..new_len is an empty range. So there are no uninitialized elements - unsafe { vec.set_len(0) }; - Self { - len: TaggedLen::new(len, false, Self::is_zst()), - raw: RawSmallVec::new(), - _marker: PhantomData, - } - } else { - let mut vec = ManuallyDrop::new(vec); - let len = vec.len(); - let cap = vec.capacity(); - // SAFETY: vec.capacity is not `0` (checked above), so the pointer - // can not dangle and thus specifically cannot be null. - let ptr = unsafe { NonNull::new_unchecked(vec.as_mut_ptr()) }; + #[inline] + pub fn try_with_capacity_in( + capacity: usize, + alloc: A, + ) -> Result { + let on_heap = capacity > inline_size::(); + Ok(Self { + len: TaggedLen::new(0, on_heap, Self::is_zst()), + raw: RawSmallVec::try_with_capacity_in(capacity, alloc)?, + _marker: PhantomData, + }) + } - Self { - len: TaggedLen::new(len, true, Self::is_zst()), - raw: RawSmallVec::new_heap(ptr, cap), - _marker: PhantomData, - } + #[inline] + pub unsafe fn from_raw_parts_in( + ptr: *mut T, + length: usize, + capacity: usize, + alloc: A, + ) -> Self { + assert!(!Self::is_zst()); + + // SAFETY: We require caller to provide same ptr as we alloc + // and we never alloc null pointer. + let ptr = unsafe { + debug_assert!(!ptr.is_null(), "Called `from_raw_parts` with null pointer."); + NonNull::new_unchecked(ptr) + }; + + Self { + len: TaggedLen::new(length, true, is_zst::()), + raw: RawSmallVec::new_heap_in(ptr, capacity, alloc), + _marker: PhantomData, } } + #[inline] + pub unsafe fn from_parts_in( + ptr: NonNull, + length: usize, + capacity: usize, + alloc: A, + ) -> Self { + assert!(!Self::is_zst()); + + Self { + len: TaggedLen::new(length, true, is_zst::()), + raw: RawSmallVec::new_heap_in(ptr, capacity, alloc), + _marker: PhantomData, + } + } +} + +impl alloc_param!(SmallVec) { + #[inline] + const fn is_zst() -> bool { + size_of::() == 0 + } + /// Sets the tag to be on the heap /// /// # Safety @@ -1030,7 +1639,7 @@ impl SmallVec { pub const fn capacity(&self) -> usize { if self.len.on_heap(Self::is_zst()) { // SAFETY: raw.heap is active - unsafe { self.raw.heap.1 } + unsafe { self.raw.inner.heap.1 } } else { Self::inline_size() } @@ -1041,49 +1650,46 @@ impl SmallVec { self.len.on_heap(Self::is_zst()) } - /// Splits the collection into two at the given index. - /// - /// Returns a newly allocated vector containing the elements in the range - /// `[at, len)`. After the call, the original vector will be left containing - /// the elements `[0, at)` with its previous capacity unchanged. - /// - /// - If you want to take ownership of the entire contents and capacity of - /// the vector, see [`core::mem::take`] or [`core::mem::replace`]. - /// - If you don't need the returned vector at all, see [`SmallVec::truncate`]. - /// - If you want to take ownership of an arbitrary subslice, or you don't - /// necessarily want to store the removed items in a vector, see [`SmallVec::drain`]. - /// - /// # Panics - /// - /// Panics if `at > len`. - /// - /// # Examples - /// - /// ``` - /// let mut vec = vec![1, 2, 3]; - /// let vec2 = vec.split_off(1); - /// assert_eq!(vec, [1]); - /// assert_eq!(vec2, [2, 3]); - /// ``` #[inline] - pub fn split_off(&mut self, at: usize) -> Self { - let len = self.len(); - assert!(at <= len); - - let other_len = len - at; - let mut other = Self::with_capacity(other_len); + pub fn from_vec(vec: alloc_param!(Vec)) -> Self { + let mut vec = ManuallyDrop::new(vec); + let cap = vec.capacity(); + let len = vec.len(); + #[cfg(feature = "allocator_api")] + // SAFETY: casting a reference to a pointer results + // in a valid pointer. Since the value is wrapped inside a + // `ManuallyDrop`, the value is not dropped twice. + let alloc = unsafe { core::ptr::read(vec.allocator() as *const A) }; + #[cfg(not(feature = "allocator_api"))] + let alloc = A; - // Unsafely `set_len` and copy items to `other`. - unsafe { - self.set_len(at); - other.set_len(other_len); + if Self::is_zst() { + // Standard Rust ZST vecs don't allocate, so no memory management needs to be done. + Self { + len: TaggedLen::new(len, false, Self::is_zst()), + raw: RawSmallVec::new_in(alloc), + _marker: PhantomData, + } + } else if cap > 0 { + // FIXME: Replace with `Vec::as_non_null` once it is stable. + // SAFETY: The pointer of a vector is never null. + let ptr = unsafe { NonNull::new_unchecked(vec.as_mut_ptr()) }; - core::ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other_len); + Self { + len: TaggedLen::new(len, true, Self::is_zst()), + raw: RawSmallVec::new_heap_in(ptr, cap, alloc), + _marker: PhantomData, + } + } else { + Self { + len: TaggedLen::new(0, false, Self::is_zst()), + raw: RawSmallVec::new_inline_in(MaybeUninit::uninit(), alloc), + _marker: PhantomData, + } } - other } - pub fn drain(&mut self, range: R) -> Drain<'_, T, N> + pub fn drain(&mut self, range: R) -> alloc_param!(Drain<'_, T, N, A>) where R: core::ops::RangeBounds, { @@ -1179,7 +1785,7 @@ impl SmallVec { /// assert_eq!(items, SmallVec::::from(&[0, 0, 0, 0, 0, 0, 0, 2, 2, 2])); /// assert_eq!(ones.len(), 3); /// ``` - pub fn extract_if(&mut self, range: R, filter: F) -> ExtractIf<'_, T, N, F> + pub fn extract_if(&mut self, range: R, filter: F) -> alloc_param!(ExtractIf<'_, T, N, F, A>) where F: FnMut(&mut T) -> bool, R: core::ops::RangeBounds, @@ -1202,7 +1808,7 @@ impl SmallVec { } } - pub fn splice(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, N> + pub fn splice(&mut self, range: R, replace_with: I) -> alloc_param!(Splice<'_, I::IntoIter, N, A>) where R: core::ops::RangeBounds, I: IntoIterator, @@ -1246,7 +1852,7 @@ impl SmallVec { } #[inline] - pub fn append(&mut self, other: &mut SmallVec) { + pub fn append(&mut self, other: &mut alloc_param!(SmallVec)) { // can't overflow since both are smaller than isize::MAX and 2 * isize::MAX < usize::MAX let len = self.len(); let other_len = other.len(); @@ -1276,147 +1882,116 @@ impl SmallVec { } let len = self.len(); + let on_heap = self.spilled(); + let capacity = self.capacity(); assert!(new_capacity >= len); - if new_capacity > Self::inline_size() { - // SAFETY: we checked all the preconditions - let result = unsafe { self.raw.try_grow_raw(self.len, new_capacity) }; + if new_capacity <= Self::inline_size() && self.spilled() { + // If the desired capacity is smaller than the inline size, + // then store all elements inline. - if result.is_ok() { - // SAFETY: the allocation succeeded, so self.raw.heap is now active - unsafe { self.set_on_heap() }; + // SAFETY: The tag inside the length of the vector corresponds to the way + // elements are stored inside the vector. + // All elements will be stored inline because the vector will + // be shrunk to a capacity smaller than the inline size. + unsafe { + let on_heap = self.raw.shrink_to_fit(on_heap, len)?; + debug_assert!(!on_heap); + self.set_inline(); } - result + Ok(()) + } else if new_capacity > capacity { + self.try_reserve(new_capacity - capacity) } else { - // new_capacity <= Self::inline_size() - if self.spilled() { - unsafe { - // SAFETY: heap member is active - let (ptr, old_cap) = self.raw.heap; - // inline member is now active - - // SAFETY: len <= new_capacity <= Self::inline_size() - // so the copy is within bounds of the inline member - copy_nonoverlapping(ptr.as_ptr(), self.raw.as_mut_ptr_inline(), len); - drop(DropDealloc { - ptr: ptr.cast(), - size_bytes: old_cap * size_of::(), - align: align_of::(), - }); - self.set_inline(); - } - } Ok(()) } } #[inline] pub fn reserve(&mut self, additional: usize) { - // can't overflow since len <= capacity - if additional > self.capacity() - self.len() { - let new_capacity = infallible( - self.len() - .checked_add(additional) - .and_then(usize::checked_next_power_of_two) - .ok_or(CollectionAllocErr::CapacityOverflow), - ); - self.grow(new_capacity); - } + infallible(self.try_reserve(additional)) } #[inline] pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { - if additional > self.capacity() - self.len() { - let new_capacity = self - .len() - .checked_add(additional) - .and_then(usize::checked_next_power_of_two) - .ok_or(CollectionAllocErr::CapacityOverflow)?; - self.try_grow(new_capacity) - } else { - Ok(()) + if Self::is_zst() { + return Ok(()); } + + // SAFETY: The tag inside the length of the vector corresponds to the way + // elements are stored inside the vector. The same goes for the return value + // of the function. + unsafe { + let on_heap = self.raw.try_reserve(self.len, additional)?; + if on_heap { + self.set_on_heap(); + } else { + self.set_inline(); + } + }; + + Ok(()) } #[inline] pub fn reserve_exact(&mut self, additional: usize) { - // can't overflow since len <= capacity - if additional > self.capacity() - self.len() { - let new_capacity = infallible( - self.len() - .checked_add(additional) - .ok_or(CollectionAllocErr::CapacityOverflow), - ); - self.grow(new_capacity); - } + infallible(self.try_reserve_exact(additional)) } #[inline] pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { - if additional > self.capacity() - self.len() { - let new_capacity = self - .len() - .checked_add(additional) - .ok_or(CollectionAllocErr::CapacityOverflow)?; - self.try_grow(new_capacity) - } else { - Ok(()) + if is_zst::() { + return Ok(()); } + + // SAFETY: The tag inside the length of the vector corresponds to the way + // elements are stored inside the vector. The same goes for the return value + // of the function. + unsafe { + let on_heap = self.raw.try_reserve_exact(self.len, additional)?; + if on_heap { + self.set_on_heap(); + } else { + self.set_inline(); + } + }; + + Ok(()) } #[inline] pub fn shrink_to_fit(&mut self) { - if !self.spilled() { + if is_zst::() { return; } + let len = self.len(); - if len <= Self::inline_size() { - // SAFETY: self.spilled() is true, so we're on the heap - unsafe { - let (ptr, capacity) = self.raw.heap; - self.raw = RawSmallVec::new_inline(MaybeUninit::uninit()); - copy_nonoverlapping(ptr.as_ptr(), self.raw.as_mut_ptr_inline(), len); - self.set_inline(); - alloc::alloc::dealloc( - ptr.cast().as_ptr(), - Layout::from_size_align_unchecked(capacity * size_of::(), align_of::()), - ); - } - } else if len < self.capacity() { - // SAFETY: len > Self::inline_size() >= 0 - // so new capacity is non zero, it is equal to the length - // T can't be a ZST because SmallVec is never spilled. - unsafe { infallible(self.raw.try_grow_raw(self.len, len)) }; - } + let on_heap = self.spilled(); + + // SAFETY: The tag inside the length of the vector corresponds to the way + // elements are stored inside the vector. + let on_heap = unsafe { + infallible(self.raw.shrink_to_fit(on_heap, len)) + }; + self.len = TaggedLen::new(len, on_heap, is_zst::()); } #[inline] pub fn shrink_to(&mut self, min_capacity: usize) { - if !self.spilled() { + if is_zst::() { return; } - if self.capacity() > min_capacity { - let len = self.len(); - let target = core::cmp::max(len, min_capacity); - if target <= Self::inline_size() { - // SAFETY: self.spilled() is true, so we're on the heap - unsafe { - let (ptr, capacity) = self.raw.heap; - self.raw = RawSmallVec::new_inline(MaybeUninit::uninit()); - copy_nonoverlapping(ptr.as_ptr(), self.raw.as_mut_ptr_inline(), len); - self.set_inline(); - alloc::alloc::dealloc( - ptr.cast().as_ptr(), - Layout::from_size_align_unchecked(capacity * size_of::(), align_of::()), - ); - } - } else if target < self.capacity() { - // SAFETY: len > Self::inline_size() >= 0 - // so new capacity is non zero, it is equal to the length - // T can't be a ZST because SmallVec is never spilled. - unsafe { infallible(self.raw.try_grow_raw(self.len, target)) }; - } - } + + let len = self.len(); + let min_capacity = len.max(min_capacity); + let on_heap = self.spilled(); + + // SAFETY: The tag inside the length of the vector corresponds to the way + // elements are stored inside the vector. + let on_heap = unsafe { + infallible(self.raw.shrink_to_fit(on_heap, min_capacity)) + }; + self.len = TaggedLen::new(len, on_heap, is_zst::()); } #[inline] @@ -1540,11 +2115,25 @@ impl SmallVec { } #[inline] - pub fn into_vec(self) -> Vec { + pub fn into_vec(self) -> alloc_param!(Vec) { let len = self.len(); - if !self.spilled() { + let on_heap = self.spilled(); + let this = ManuallyDrop::new(self); + + // SAFETY: casting a reference to a pointer results + // in a valid pointer. Since the value is wrapped inside a + // `ManuallyDrop`, the value is not dropped twice. + #[cfg(feature = "allocator_api")] + let alloc = unsafe { + core::ptr::read(&this.raw.allocator as *const A) + }; + + if !on_heap { + #[cfg(not(feature = "allocator_api"))] let mut vec = Vec::with_capacity(len); - let this = ManuallyDrop::new(self); + #[cfg(feature = "allocator_api")] + let mut vec = Vec::with_capacity_in(len, alloc); + // SAFETY: we create a new vector with sufficient capacity, copy our elements into it // to transfer ownership and then set the length // we don't drop the elements we previously held @@ -1554,23 +2143,31 @@ impl SmallVec { } vec } else { - let this = ManuallyDrop::new(self); // SAFETY: - // - `ptr` was created with the global allocator + // - `ptr` was created with the appropriate allocator // - `ptr` was created with the appropriate alignment for `T` // - the allocation pointed to by ptr is exactly cap * sizeof(T) // - `len` is less than or equal to `cap` // - the first `len` entries are proper `T`-values // - the allocation is not larger than `isize::MAX` unsafe { - let (ptr, cap) = this.raw.heap; - Vec::from_raw_parts(ptr.as_ptr(), len, cap) + let (ptr, cap) = this.raw.inner.heap; + + #[cfg(not(feature = "allocator_api"))] + { + Vec::from_raw_parts(ptr.as_ptr(), len, cap) + } + + #[cfg(feature = "allocator_api")] + { + Vec::from_raw_parts_in(ptr.as_ptr(), len, cap, alloc) + } } } } #[inline] - pub fn into_boxed_slice(self) -> Box<[T]> { + pub fn into_boxed_slice(self) -> alloc_param!(Box<[T], A>) { self.into_vec().into_boxed_slice() } @@ -1701,78 +2298,85 @@ impl SmallVec { ) } } +} - /// Creates a `SmallVec` directly from the raw components of another `SmallVec`. - /// - /// # Safety - /// - /// This is highly unsafe, due to the number of invariants that aren’t checked: +impl alloc_param!(SmallVec) { + /// Splits the collection into two at the given index. /// - /// - `ptr` needs to have been previously allocated via `SmallVec` from its spilled storage (at least, it’s highly likely to be incorrect if it wasn’t). - /// - `ptr`’s `A::Item` type needs to be the same size and alignment that it was allocated with - /// - `length` needs to be less than or equal to `capacity`. - /// - `capacity` needs to be the capacity that the pointer was allocated with. + /// Returns a newly allocated vector containing the elements in the range + /// `[at, len)`. After the call, the original vector will be left containing + /// the elements `[0, at)` with its previous capacity unchanged. /// - /// Violating these may cause problems like corrupting the allocator’s internal data structures. + /// - If you want to take ownership of the entire contents and capacity of + /// the vector, see [`core::mem::take`] or [`core::mem::replace`]. + /// - If you don't need the returned vector at all, see [`SmallVec::truncate`]. + /// - If you want to take ownership of an arbitrary subslice, or you don't + /// necessarily want to store the removed items in a vector, see [`SmallVec::drain`]. /// - /// Additionally, `capacity` must be greater than the amount of inline storage `A` has; that is, the new `SmallVec` must need to spill over into heap allocated storage. This condition is asserted against. + /// # Panics /// - /// The ownership of `ptr` is effectively transferred to the `SmallVec` which may then deallocate, reallocate or change the contents of memory pointed to by the pointer at will. Ensure that nothing else uses the pointer after calling this function. + /// Panics if `at > len`. /// /// # Examples /// /// ``` - /// use smallvec::{SmallVec, smallvec}; - /// - /// let mut v: SmallVec<_, 1> = smallvec![1, 2, 3]; - /// - /// // Pull out the important parts of `v`. - /// let p = v.as_mut_ptr(); - /// let len = v.len(); - /// let cap = v.capacity(); - /// let spilled = v.spilled(); - /// - /// unsafe { - /// // Forget all about `v`. The heap allocation that stored the - /// // three values won't be deallocated. - /// std::mem::forget(v); - /// - /// // Overwrite memory with [4, 5, 6]. - /// // - /// // This is only safe if `spilled` is true! Otherwise, we are - /// // writing into the old `SmallVec`'s inline storage on the - /// // stack. - /// assert!(spilled); - /// for i in 0..len { - /// std::ptr::write(p.add(i), 4 + i); - /// } - /// - /// // Put everything back together into a SmallVec with a different - /// // amount of inline storage, but which is still less than `cap`. - /// let rebuilt = SmallVec::<_, 2>::from_raw_parts(p, len, cap); - /// assert_eq!(&*rebuilt, &[4, 5, 6]); - /// } + /// let mut vec = vec![1, 2, 3]; + /// let vec2 = vec.split_off(1); + /// assert_eq!(vec, [1]); + /// assert_eq!(vec2, [2, 3]); /// ``` #[inline] - pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> SmallVec { - assert!(!Self::is_zst()); + pub fn split_off(&mut self, at: usize) -> Self { + let len = self.len(); + assert!(at <= len); - // SAFETY: We require caller to provide same ptr as we alloc - // and we never alloc null pointer. - let ptr = unsafe { - debug_assert!(!ptr.is_null(), "Called `from_raw_parts` with null pointer."); - NonNull::new_unchecked(ptr) - }; + let other_len = len - at; - SmallVec { - len: TaggedLen::new(length, true, Self::is_zst()), - raw: RawSmallVec::new_heap(ptr, capacity), - _marker: PhantomData, + let mut other = Self::with_capacity_in(other_len, self.raw.allocator.clone()); + + // Unsafely `set_len` and copy items to `other`. + unsafe { + self.set_len(at); + other.set_len(other_len); + + core::ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other_len); } + other } } -impl SmallVec { +impl alloc_param!(SmallVec) { + /// Creates a [`SmallVec`] value from the slice `slice` with the specified allocator. + #[cfg(feature = "allocator_api")] + pub fn from_slice_in(slice: &[T], alloc: A) -> Self { + if slice.len() > Self::inline_size() { + // Standard Rust vectors are already specialized. + Self::from_vec(slice.to_vec_in(alloc)) + } else { + // SAFETY: The precondition is checked in the initial comparison above. + unsafe { + #[cfg(feature = "specialization")] + { + >::spec_from(slice, alloc) + } + + #[cfg(not(feature = "specialization"))] + { + Self::from_slice_fallback(slice, alloc) + } + } + } + } + + /// A stub for `Self::from_slice_in` that can be used whenever the "allocator_api" feature is disabled. + /// This makes it possible to develop some functions independent from the feature, + /// reduceing code complexity. + #[cfg(not(feature = "allocator_api"))] + #[inline(always)] + fn from_slice_in(slice: &[T], _alloc: A) -> Self { + Self::from(slice) + } + #[inline] pub fn resize(&mut self, len: usize, value: T) { let old_len = self.len(); @@ -1873,12 +2477,32 @@ impl SmallVec { } } + #[cfg(feature = "allocator_api")] /// A function for creating [`SmallVec`] values out of slices /// for types with the [`Copy`] trait. - pub fn from_slice_copy(slice: &[T]) -> Self + pub fn from_slice_copy_in(slice: &[T], alloc: A) -> Self where T: Copy { + let src = slice.as_ptr(); + let len = slice.len(); + let mut result = Self::with_capacity_in(len, alloc); + + // SAFETY: By using `with_capacity_in`, the pointer will point to valid memory. + unsafe { + let dst = result.as_mut_ptr(); + copy_nonoverlapping(src, dst, len); + result.set_len(len); + } + + result + } +} + +impl SmallVec { + /// A function for creating [`SmallVec`] values out of slices + /// for types with the [`Copy`] trait. + pub fn from_slice_copy(slice: &[T]) -> Self { let src = slice.as_ptr(); let len = slice.len(); let mut result = Self::with_capacity(len); @@ -1907,28 +2531,8 @@ impl Drop for DropGuard { } } -struct DropDealloc { - ptr: NonNull, - size_bytes: usize, - align: usize, -} - -impl Drop for DropDealloc { - #[inline] - fn drop(&mut self) { - unsafe { - if self.size_bytes > 0 { - alloc::alloc::dealloc( - self.ptr.as_ptr(), - Layout::from_size_align_unchecked(self.size_bytes, self.align), - ); - } - } - } -} - #[cfg(feature = "may_dangle")] -unsafe impl<#[may_dangle] T, const N: usize> Drop for SmallVec { +unsafe impl<#[may_dangle] T, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> Drop for alloc_param!(SmallVec) { fn drop(&mut self) { let on_heap = self.spilled(); let len = self.len(); @@ -1940,8 +2544,8 @@ unsafe impl<#[may_dangle] T, const N: usize> Drop for SmallVec { let capacity = self.capacity(); Some(DropDealloc { ptr: NonNull::new_unchecked(ptr as *mut u8), - size_bytes: capacity * size_of::(), - align: align_of::(), + layout: array_layout_unchecked::(capacity), + allocator: &self.raw.allocator, }) } else { None @@ -1952,7 +2556,7 @@ unsafe impl<#[may_dangle] T, const N: usize> Drop for SmallVec { } #[cfg(not(feature = "may_dangle"))] -impl Drop for SmallVec { +impl Drop for alloc_param!(SmallVec) { fn drop(&mut self) { let on_heap = self.spilled(); let len = self.len(); @@ -1963,8 +2567,8 @@ impl Drop for SmallVec { let capacity = self.capacity(); Some(DropDealloc { ptr: NonNull::new_unchecked(ptr as *mut u8), - size_bytes: capacity * size_of::(), - align: align_of::(), + layout: array_layout_unchecked::(capacity), + allocator: &self.raw.allocator, }) } else { None @@ -1974,7 +2578,7 @@ impl Drop for SmallVec { } } -impl Drop for IntoIter { +impl Drop for alloc_param!(IntoIter) { fn drop(&mut self) { // SAFETY: see above unsafe { @@ -1984,11 +2588,11 @@ impl Drop for IntoIter { let end = self.end.value(is_zst); let ptr = self.as_mut_ptr(); let _drop_dealloc = if on_heap { - let capacity = self.raw.heap.1; + let capacity = self.raw.inner.heap.1; Some(DropDealloc { ptr: NonNull::new_unchecked(ptr as *mut u8), - size_bytes: capacity * size_of::(), - align: align_of::(), + layout: array_layout_unchecked::(capacity), + allocator: &self.raw.allocator, }) } else { None @@ -1998,7 +2602,7 @@ impl Drop for IntoIter { } } -impl core::ops::Deref for SmallVec { +impl core::ops::Deref for alloc_param!(SmallVec) { type Target = [T]; #[inline] @@ -2006,7 +2610,7 @@ impl core::ops::Deref for SmallVec { self.as_slice() } } -impl core::ops::DerefMut for SmallVec { +impl core::ops::DerefMut for alloc_param!(SmallVec) { #[inline] fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut_slice() @@ -2025,13 +2629,44 @@ pub fn from_elem(elem: T, n: usize) -> SmallVec #[cfg(feature = "specialization")] { // SAFETY: The precondition is checked in the initial comparison above. - unsafe { as spec_traits::SpecFromElem>::spec_from_elem(elem, n) } + #[cfg(feature = "allocator_api")] + unsafe { + as spec_traits::SpecFromElem>::spec_from_elem(elem, n, GLOBAL) + } + + // SAFETY: The precondition is checked in the initial comparison above. + #[cfg(not(feature = "allocator_api"))] + unsafe { + as spec_traits::SpecFromElem>::spec_from_elem(elem, n, GLOBAL) + } + } + + #[cfg(not(feature = "specialization"))] + { + // SAFETY: The precondition is checked in the initial comparison above. + unsafe { SmallVec::::from_elem_fallback(elem, n, GLOBAL) } + } + } +} + +#[cfg(feature = "allocator_api")] +#[doc(hidden)] +#[track_caller] +pub fn from_elem_in(elem: T, n: usize, alloc: A) -> SmallVec { + if n > SmallVec::::inline_size() { + // Standard Rust vectors are already specialized. + SmallVec::::from_vec(alloc::vec::from_elem_in(elem, n, alloc)) + } else { + #[cfg(feature = "specialization")] + { + // SAFETY: The precondition is checked in the initial comparison above. + unsafe { as spec_traits::SpecFromElem>::spec_from_elem(elem, n, alloc) } } #[cfg(not(feature = "specialization"))] { // SAFETY: The precondition is checked in the initial comparison above. - unsafe { SmallVec::::from_elem_fallback(elem, n) } + unsafe { SmallVec::::from_elem_fallback(elem, n, alloc) } } } } @@ -2043,27 +2678,65 @@ mod spec_traits { /// A trait for specializing the implementation of [`from_elem`]. /// /// [`from_elem`]: crate::from_elem - pub(crate) trait SpecFromElem { + pub(crate) trait SpecFromElem { /// Creates a `Smallvec` value where `elem` is repeated `n` times. /// This will use the inline storage, not the heap. /// /// # Safety /// /// The caller must ensure that `n <= Self::inline_size()`. - unsafe fn spec_from_elem(elem: T, n: usize) -> Self; + unsafe fn spec_from_elem(elem: T, n: usize, alloc: A) -> Self; } + #[cfg(not(feature = "allocator_api"))] impl SpecFromElem for SmallVec { #[inline] - default unsafe fn spec_from_elem(elem: T, n: usize) -> Self { + default unsafe fn spec_from_elem(elem: T, n: usize, alloc: A) -> Self { // SAFETY: Safety conditions are identical. - unsafe { SmallVec::from_elem_fallback(elem, n) } + unsafe { SmallVec::from_elem_fallback(elem, n, alloc) } } } + #[cfg(feature = "allocator_api")] + impl SpecFromElem for SmallVec { + #[inline] + default unsafe fn spec_from_elem(elem: T, n: usize, alloc: A) -> Self { + // SAFETY: Safety conditions are identical. + unsafe { SmallVec::from_elem_fallback(elem, n, alloc) } + } + } + + #[cfg(not(feature = "allocator_api"))] impl SpecFromElem for SmallVec { - unsafe fn spec_from_elem(elem: T, n: usize) -> Self { - let mut result = Self::new(); + unsafe fn spec_from_elem(elem: T, n: usize, alloc: A) -> Self { + let mut result = Self::new_in(alloc); + + if n > 0 { + let ptr = result.raw.as_mut_ptr_inline(); + + // SAFETY: The caller ensures that the first `n` + // is smaller than the inline size. + unsafe { + for i in 0..n { + ptr.add(i).write(elem); + } + } + } + + // SAFETY: The first `n` elements of the vector + // have been initialized in the loop above. + unsafe { + result.set_len(n); + } + + result + } + } + + #[cfg(feature = "allocator_api")] + impl SpecFromElem for SmallVec { + unsafe fn spec_from_elem(elem: T, n: usize, alloc: A) -> Self { + let mut result = Self::new_in(alloc); if n > 0 { let ptr = result.raw.as_mut_ptr_inline(); @@ -2094,7 +2767,7 @@ mod spec_traits { fn spec_extend(&mut self, iter: I); } - impl SpecExtend for SmallVec + impl SpecExtend for alloc_param!(SmallVec) where I: Iterator, { @@ -2104,7 +2777,7 @@ mod spec_traits { } } - impl SpecExtend for SmallVec + impl SpecExtend for alloc_param!(SmallVec) where I: core::iter::TrustedLen, { @@ -2135,8 +2808,8 @@ mod spec_traits { } } - impl SpecExtend> for SmallVec { - fn spec_extend(&mut self, mut iter: IntoIter) { + impl SpecExtend)> for alloc_param!(SmallVec) { + fn spec_extend(&mut self, mut iter: alloc_param!(IntoIter)) { let slice = iter.as_slice(); let len = slice.len(); let old_len = self.len(); @@ -2161,7 +2834,7 @@ mod spec_traits { } } - impl<'a, T: 'a, const N: usize, I> SpecExtend<&'a T, I> for SmallVec + impl<'a, T: 'a, const N: usize, I, #[cfg(feature = "allocator_api")] A: Allocator> SpecExtend<&'a T, I> for alloc_param!(SmallVec) where I: Iterator, T: Clone, @@ -2172,7 +2845,7 @@ mod spec_traits { } } - impl<'a, T: 'a, const N: usize> SpecExtend<&'a T, core::slice::Iter<'a, T>> for SmallVec + impl<'a, T: 'a, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> SpecExtend<&'a T, core::slice::Iter<'a, T>> for alloc_param!(SmallVec) where T: Copy, { @@ -2213,7 +2886,7 @@ mod spec_traits { unsafe fn spec_extend_from_within(&mut self, src: core::ops::Range); } - impl SpecExtendFromWithin for SmallVec { + impl SpecExtendFromWithin for alloc_param!(SmallVec) { default unsafe fn spec_extend_from_within(&mut self, src: core::ops::Range) { // SAFETY: Safety conditions are identical. unsafe { @@ -2222,7 +2895,7 @@ mod spec_traits { } } - impl SpecExtendFromWithin for SmallVec { + impl SpecExtendFromWithin for alloc_param!(SmallVec) { unsafe fn spec_extend_from_within(&mut self, src: core::ops::Range) { let old_len = self.len(); @@ -2259,7 +2932,7 @@ mod spec_traits { { #[inline] default fn spec_from_iter(iter: I) -> Self { - Self::from_iter_fallback(iter) + Self::from_iter_fallback(iter, GLOBAL) } } @@ -2289,14 +2962,14 @@ mod spec_traits { fn spec_clone_from(&mut self, source: &[T]); } - impl SpecCloneFrom for SmallVec { + impl SpecCloneFrom for alloc_param!(SmallVec) { #[inline] default fn spec_clone_from(&mut self, source: &[T]) { self.clone_from_fallback(source); } } - impl SpecCloneFrom for SmallVec { + impl SpecCloneFrom for alloc_param!(SmallVec) { fn spec_clone_from(&mut self, source: &[T]) { self.clear(); self.extend_from_slice(source); @@ -2305,26 +2978,60 @@ mod spec_traits { /// A trait for specializing the implementation of [`From`] /// with the source type being slices. - pub(crate) trait SpecFromSlice { + pub(crate) trait SpecFromSlice { /// Creates a `SmallVec` value based on the contents of `slice`. /// This will use the inline storage, not the heap. /// /// # Safety /// /// The caller must ensure that `slice.len() <= Self::inline_size()`. - unsafe fn spec_from(slice: &[T]) -> Self; + unsafe fn spec_from(slice: &[T], alloc: A) -> Self; } + #[cfg(not(feature = "allocator_api"))] impl SpecFromSlice for SmallVec { - default unsafe fn spec_from(slice: &[T]) -> Self { + default unsafe fn spec_from(slice: &[T], alloc: A) -> Self { + // SAFETY: Safety conditions are identical. + unsafe { Self::from_slice_fallback(slice, alloc) } + } + } + + #[cfg(feature = "allocator_api")] + impl SpecFromSlice for SmallVec { + default unsafe fn spec_from(slice: &[T], alloc: A) -> Self { // SAFETY: Safety conditions are identical. - unsafe { Self::from_slice_fallback(slice) } + unsafe { Self::from_slice_fallback(slice, alloc) } } } + #[cfg(not(feature = "allocator_api"))] impl SpecFromSlice for SmallVec { - unsafe fn spec_from(slice: &[T]) -> Self { - let mut v = Self::new(); + unsafe fn spec_from(slice: &[T], alloc: A) -> Self { + let mut v = Self::new_in(alloc); + + let src = slice.as_ptr(); + let len = slice.len(); + let dst = v.as_mut_ptr(); + + // SAFETY: The caller ensures that the slice length is smaller + // than or equal to the inline length. + unsafe { + copy_nonoverlapping(src, dst, len); + } + + // SAFETY: The elements were initialized above. + unsafe { + v.set_len(len); + } + + v + } + } + + #[cfg(feature = "allocator_api")] + impl SpecFromSlice for SmallVec { + unsafe fn spec_from(slice: &[T], alloc: A) -> Self { + let mut v = Self::new_in(alloc); let src = slice.as_ptr(); let len = slice.len(); @@ -2348,18 +3055,18 @@ mod spec_traits { /// Fallback functions for various specialized methods. These are kept in /// a separate implementation block for easy access whenever specialization is disabled. -impl SmallVec { +impl alloc_param!(SmallVec) { /// Creates a `Smallvec` value where `elem` is repeated `n` times. /// This will use the inline storage, not the heap. /// /// # Safety /// /// The caller must ensure that `n <= Self::inline_size()`. - unsafe fn from_elem_fallback(elem: T, n: usize) -> Self + unsafe fn from_elem_fallback(elem: T, n: usize, alloc: A) -> Self where T: Clone, { - let mut result = Self::new(); + let mut result = Self::new_in(alloc); if n > 0 { let ptr = result.raw.as_mut_ptr_inline(); @@ -2438,12 +3145,12 @@ impl SmallVec { } } - fn from_iter_fallback(iter: I) -> Self + fn from_iter_fallback(iter: I, alloc: A) -> Self where I: Iterator, { let (size, _) = iter.size_hint(); - let mut v = Self::with_capacity(size); + let mut v = Self::with_capacity_in(size, alloc); for x in iter { v.push(x); } @@ -2474,11 +3181,11 @@ impl SmallVec { /// # Safety /// /// The caller must ensure that `slice.len() <= Self::inline_size()`. - unsafe fn from_slice_fallback(slice: &[T]) -> Self + unsafe fn from_slice_fallback(slice: &[T], alloc: A) -> Self where T: Clone, { - let mut v = Self::new(); + let mut v = Self::new_in(alloc); let src = slice.as_ptr(); let len = slice.len(); @@ -2516,12 +3223,20 @@ impl From<&[T]> for SmallVec { unsafe { #[cfg(feature = "specialization")] { - >::spec_from(slice) + #[cfg(feature = "allocator_api")] + { + >::spec_from(slice, GLOBAL) + } + + #[cfg(not(feature = "allocator_api"))] + { + >::spec_from(slice, GLOBAL) + } } #[cfg(not(feature = "specialization"))] { - Self::from_slice_fallback(slice) + Self::from_slice_fallback(slice, GLOBAL) } } } @@ -2570,16 +3285,17 @@ impl From<[T; M]> for SmallVec { } } -impl From> for SmallVec { - fn from(array: Vec) -> Self { - Self::from_vec(array) +impl From)> for alloc_param!(SmallVec) { + fn from(v: alloc_param!(Vec)) -> Self { + Self::from_vec(v) } } -impl Clone for SmallVec { +impl Clone for alloc_param!(SmallVec) { #[inline] - fn clone(&self) -> SmallVec { - SmallVec::from(self.as_slice()) + fn clone(&self) -> Self { + let alloc = self.raw.allocator.clone(); + Self::from_slice_in(self.as_slice(), alloc) } #[inline] @@ -2596,14 +3312,15 @@ impl Clone for SmallVec { } } -impl Clone for IntoIter { +impl Clone for alloc_param!(IntoIter) { #[inline] - fn clone(&self) -> IntoIter { - SmallVec::from(self.as_slice()).into_iter() + fn clone(&self) -> Self { + let alloc = self.raw.allocator.clone(); + SmallVec::from_slice_in(self.as_slice(), alloc).into_iter() } } -impl Extend for SmallVec { +impl Extend for alloc_param!(SmallVec) { #[inline] fn extend>(&mut self, iter: I) { #[cfg(feature = "specialization")] @@ -2618,7 +3335,7 @@ impl Extend for SmallVec { } } -impl<'a, T: Clone + 'a, const N: usize> Extend<&'a T> for SmallVec { +impl<'a, T: Clone + 'a, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> Extend<&'a T> for alloc_param!(SmallVec) { #[inline] fn extend>(&mut self, iter: I) { #[cfg(feature = "specialization")] @@ -2643,7 +3360,7 @@ impl core::iter::FromIterator for SmallVec { #[cfg(not(feature = "specialization"))] { - Self::from_iter_fallback(iter.into_iter()) + Self::from_iter_fallback(iter.into_iter(), GLOBAL) } } } @@ -2683,8 +3400,8 @@ macro_rules! smallvec_inline { }); } -impl IntoIterator for SmallVec { - type IntoIter = IntoIter; +impl IntoIterator for alloc_param!(SmallVec) { + type IntoIter = alloc_param!(IntoIter); type Item = T; fn into_iter(self) -> Self::IntoIter { // SAFETY: we move out of this.raw by reading the value at its address, which is fine since @@ -2693,7 +3410,7 @@ impl IntoIterator for SmallVec { // Set SmallVec len to zero as `IntoIter` drop handles dropping of the elements let this = ManuallyDrop::new(self); IntoIter { - raw: (&this.raw as *const RawSmallVec).read(), + raw: (&this.raw as *const alloc_param!(RawSmallVec)).read(), begin: 0, end: this.len, _marker: PhantomData, @@ -2702,7 +3419,7 @@ impl IntoIterator for SmallVec { } } -impl<'a, T, const N: usize> IntoIterator for &'a SmallVec { +impl<'a, T, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> IntoIterator for &'a alloc_param!(SmallVec) { type IntoIter = core::slice::Iter<'a, T>; type Item = &'a T; fn into_iter(self) -> Self::IntoIter { @@ -2710,7 +3427,7 @@ impl<'a, T, const N: usize> IntoIterator for &'a SmallVec { } } -impl<'a, T, const N: usize> IntoIterator for &'a mut SmallVec { +impl<'a, T, const N: usize, #[cfg(feature = "allocator_api")] A: Allocator> IntoIterator for &'a mut alloc_param!(SmallVec) { type IntoIter = core::slice::IterMut<'a, T>; type Item = &'a mut T; fn into_iter(self) -> Self::IntoIter { @@ -2718,18 +3435,21 @@ impl<'a, T, const N: usize> IntoIterator for &'a mut SmallVec { } } -impl PartialEq> for SmallVec +impl PartialEq)> for alloc_param!(SmallVec) where T: PartialEq, { #[inline] - fn eq(&self, other: &SmallVec) -> bool { + fn eq(&self, other: &alloc_param!(SmallVec)) -> bool { self.as_slice().eq(other.as_slice()) } } -impl Eq for SmallVec where T: Eq {} +impl Eq for alloc_param!(SmallVec) where T: Eq {} -impl PartialEq<[U; M]> for SmallVec +impl PartialEq<[U; M]> for alloc_param!(SmallVec) where T: PartialEq, { @@ -2739,7 +3459,7 @@ where } } -impl PartialEq<&[U; M]> for SmallVec +impl PartialEq<&[U; M]> for alloc_param!(SmallVec) where T: PartialEq, { @@ -2749,7 +3469,7 @@ where } } -impl PartialEq<[U]> for SmallVec +impl PartialEq<[U]> for alloc_param!(SmallVec) where T: PartialEq, { @@ -2759,7 +3479,7 @@ where } } -impl PartialEq<&[U]> for SmallVec +impl PartialEq<&[U]> for alloc_param!(SmallVec) where T: PartialEq, { @@ -2769,7 +3489,7 @@ where } } -impl PartialEq<&mut [U]> for SmallVec +impl PartialEq<&mut [U]> for alloc_param!(SmallVec) where T: PartialEq, { @@ -2779,73 +3499,76 @@ where } } -impl PartialOrd for SmallVec +impl PartialOrd)> for alloc_param!(SmallVec) where T: PartialOrd, { #[inline] - fn partial_cmp(&self, other: &SmallVec) -> Option { + fn partial_cmp(&self, other: &alloc_param!(SmallVec)) -> Option { self.as_slice().partial_cmp(other.as_slice()) } } -impl Ord for SmallVec +impl Ord for alloc_param!(SmallVec) where T: Ord, { #[inline] - fn cmp(&self, other: &SmallVec) -> core::cmp::Ordering { + fn cmp(&self, other: &Self) -> core::cmp::Ordering { self.as_slice().cmp(other.as_slice()) } } -impl Hash for SmallVec { +impl Hash for alloc_param!(SmallVec) { fn hash(&self, state: &mut H) { self.as_slice().hash(state) } } -impl Borrow<[T]> for SmallVec { +impl Borrow<[T]> for alloc_param!(SmallVec) { #[inline] fn borrow(&self) -> &[T] { self.as_slice() } } -impl BorrowMut<[T]> for SmallVec { +impl BorrowMut<[T]> for alloc_param!(SmallVec) { #[inline] fn borrow_mut(&mut self) -> &mut [T] { self.as_mut_slice() } } -impl AsRef<[T]> for SmallVec { +impl AsRef<[T]> for alloc_param!(SmallVec) { #[inline] fn as_ref(&self) -> &[T] { self.as_slice() } } -impl AsMut<[T]> for SmallVec { +impl AsMut<[T]> for alloc_param!(SmallVec) { #[inline] fn as_mut(&mut self) -> &mut [T] { self.as_mut_slice() } } -impl Debug for SmallVec { +impl Debug for alloc_param!(SmallVec) { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_list().entries(self.iter()).finish() } } -impl Debug for IntoIter { +impl Debug for alloc_param!(IntoIter) { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_tuple("IntoIter").field(&self.as_slice()).finish() } } -impl Debug for Drain<'_, T, N> { +impl Debug for alloc_param!(Drain<'_, T, N, A>) { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() } @@ -2853,7 +3576,7 @@ impl Debug for Drain<'_, T, N> { #[cfg(feature = "serde")] #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] -impl Serialize for SmallVec +impl Serialize for alloc_param!(SmallVec) where T: Serialize, { @@ -2913,7 +3636,7 @@ where } #[cfg(feature = "malloc_size_of")] -impl MallocShallowSizeOf for SmallVec { +impl MallocShallowSizeOf for alloc_param!(SmallVec) { fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize { if self.spilled() { unsafe { ops.malloc_size_of(self.as_ptr()) } @@ -2924,7 +3647,7 @@ impl MallocShallowSizeOf for SmallVec { } #[cfg(feature = "malloc_size_of")] -impl MallocSizeOf for SmallVec { +impl MallocSizeOf for alloc_param!(SmallVec) { fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { let mut n = self.shallow_size_of(ops); for elem in self.iter() { @@ -2936,7 +3659,7 @@ impl MallocSizeOf for SmallVec { #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] -impl io::Write for SmallVec { +impl io::Write for alloc_param!(SmallVec) { #[inline] fn write(&mut self, buf: &[u8]) -> io::Result { self.extend_from_slice(buf); @@ -2956,7 +3679,7 @@ impl io::Write for SmallVec { } #[cfg(feature = "bytes")] -unsafe impl BufMut for SmallVec { +unsafe impl BufMut for alloc_param!(SmallVec) { #[inline] fn remaining_mut(&self) -> usize { // A vector can never have more than isize::MAX bytes diff --git a/src/tests.rs b/src/tests.rs index 2082f96..11692fb 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -92,6 +92,11 @@ fn test_with_capacity() { assert!(!v.spilled()); assert_eq!(v.capacity(), 3); + let v: SmallVec = SmallVec::with_capacity(3); + assert!(v.is_empty()); + assert!(!v.spilled()); + assert_eq!(v.capacity(), 3); + let v: SmallVec = SmallVec::with_capacity(10); assert!(v.is_empty()); assert!(v.spilled()); @@ -662,6 +667,13 @@ fn shrink_after_from_empty_vec() { assert!(!v.spilled()) } +#[test] +fn shrink_after_from_small_vec() { + let mut v = SmallVec::::from_vec(vec![1]); + v.shrink_to_fit(); + assert!(!v.spilled()) +} + #[test] fn test_into_vec() { let vec = SmallVec::::from_iter(0..2);