1346 lines
51 KiB
Diff
1346 lines
51 KiB
Diff
From 568b4e71264f2c636c65da0671e80d3c734489c6 Mon Sep 17 00:00:00 2001
|
|
From: Yureka <yuka@yuka.dev>
|
|
Date: Tue, 16 Jan 2024 09:58:27 +0100
|
|
Subject: [PATCH] check in new alloc for 1.75.0
|
|
|
|
---
|
|
rust/alloc/alloc.rs | 63 +++++-----
|
|
rust/alloc/boxed.rs | 74 +++++++-----
|
|
rust/alloc/lib.rs | 27 +++--
|
|
rust/alloc/raw_vec.rs | 49 +++++---
|
|
rust/alloc/slice.rs | 2 +-
|
|
rust/alloc/vec/drain_filter.rs | 199 -------------------------------
|
|
rust/alloc/vec/extract_if.rs | 115 ++++++++++++++++++
|
|
rust/alloc/vec/mod.rs | 209 +++++++++++++++++++++++----------
|
|
rust/alloc/vec/spec_extend.rs | 8 +-
|
|
9 files changed, 389 insertions(+), 357 deletions(-)
|
|
delete mode 100644 rust/alloc/vec/drain_filter.rs
|
|
create mode 100644 rust/alloc/vec/extract_if.rs
|
|
|
|
diff --git a/rust/alloc/alloc.rs b/rust/alloc/alloc.rs
|
|
index 08eafb3de807..8a6be8c98173 100644
|
|
--- a/rust/alloc/alloc.rs
|
|
+++ b/rust/alloc/alloc.rs
|
|
@@ -6,9 +6,7 @@
|
|
|
|
#[cfg(not(test))]
|
|
use core::intrinsics;
|
|
-use core::intrinsics::{min_align_of_val, size_of_val};
|
|
|
|
-use core::ptr::Unique;
|
|
#[cfg(not(test))]
|
|
use core::ptr::{self, NonNull};
|
|
|
|
@@ -40,7 +38,6 @@
|
|
#[rustc_nounwind]
|
|
fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
|
|
|
|
- #[cfg(not(bootstrap))]
|
|
static __rust_no_alloc_shim_is_unstable: u8;
|
|
}
|
|
|
|
@@ -98,7 +95,6 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 {
|
|
unsafe {
|
|
// Make sure we don't accidentally allow omitting the allocator shim in
|
|
// stable code until it is actually stabilized.
|
|
- #[cfg(not(bootstrap))]
|
|
core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable);
|
|
|
|
__rust_alloc(layout.size(), layout.align())
|
|
@@ -339,23 +335,6 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
|
|
}
|
|
}
|
|
|
|
-#[cfg(not(version("1.72")))]
|
|
-#[cfg_attr(not(test), lang = "box_free")]
|
|
-#[inline]
|
|
-// This signature has to be the same as `Box`, otherwise an ICE will happen.
|
|
-// When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as
|
|
-// well.
|
|
-// For example if `Box` is changed to `struct Box<T: ?Sized, A: Allocator>(Unique<T>, A)`,
|
|
-// this function has to be changed to `fn box_free<T: ?Sized, A: Allocator>(Unique<T>, A)` as well.
|
|
-pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) {
|
|
- unsafe {
|
|
- let size = size_of_val(ptr.as_ref());
|
|
- let align = min_align_of_val(ptr.as_ref());
|
|
- let layout = Layout::from_size_align_unchecked(size, align);
|
|
- alloc.deallocate(From::from(ptr.cast()), layout)
|
|
- }
|
|
-}
|
|
-
|
|
// # Allocation error handler
|
|
|
|
#[cfg(not(no_global_oom_handling))]
|
|
@@ -366,18 +345,31 @@ pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A)
|
|
fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
|
|
}
|
|
|
|
-/// Abort on memory allocation error or failure.
|
|
+/// Signal a memory allocation error.
|
|
///
|
|
-/// Callers of memory allocation APIs wishing to abort computation
|
|
+/// Callers of memory allocation APIs wishing to cease execution
|
|
/// in response to an allocation error are encouraged to call this function,
|
|
-/// rather than directly invoking `panic!` or similar.
|
|
+/// rather than directly invoking [`panic!`] or similar.
|
|
+///
|
|
+/// This function is guaranteed to diverge (not return normally with a value), but depending on
|
|
+/// global configuration, it may either panic (resulting in unwinding or aborting as per
|
|
+/// configuration for all panics), or abort the process (with no unwinding).
|
|
///
|
|
-/// The default behavior of this function is to print a message to standard error
|
|
-/// and abort the process.
|
|
-/// It can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
|
|
+/// The default behavior is:
|
|
+///
|
|
+/// * If the binary links against `std` (typically the case), then
|
|
+/// print a message to standard error and abort the process.
|
|
+/// This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
|
|
+/// Future versions of Rust may panic by default instead.
|
|
+///
|
|
+/// * If the binary does not link against `std` (all of its crates are marked
|
|
+/// [`#![no_std]`][no_std]), then call [`panic!`] with a message.
|
|
+/// [The panic handler] applies as to any panic.
|
|
///
|
|
/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
|
|
/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
|
|
+/// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute
|
|
+/// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute
|
|
#[stable(feature = "global_alloc", since = "1.28.0")]
|
|
#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
|
|
#[cfg(all(not(no_global_oom_handling), not(test)))]
|
|
@@ -387,13 +379,20 @@ const fn ct_error(_: Layout) -> ! {
|
|
panic!("allocation failed");
|
|
}
|
|
|
|
+ #[inline]
|
|
fn rt_error(layout: Layout) -> ! {
|
|
unsafe {
|
|
__rust_alloc_error_handler(layout.size(), layout.align());
|
|
}
|
|
}
|
|
|
|
- unsafe { core::intrinsics::const_eval_select((layout,), ct_error, rt_error) }
|
|
+ #[cfg(not(feature = "panic_immediate_abort"))]
|
|
+ unsafe {
|
|
+ core::intrinsics::const_eval_select((layout,), ct_error, rt_error)
|
|
+ }
|
|
+
|
|
+ #[cfg(feature = "panic_immediate_abort")]
|
|
+ ct_error(layout)
|
|
}
|
|
|
|
// For alloc test `std::alloc::handle_alloc_error` can be used directly.
|
|
@@ -415,13 +414,13 @@ pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! {
|
|
static __rust_alloc_error_handler_should_panic: u8;
|
|
}
|
|
|
|
- #[allow(unused_unsafe)]
|
|
if unsafe { __rust_alloc_error_handler_should_panic != 0 } {
|
|
panic!("memory allocation of {size} bytes failed")
|
|
} else {
|
|
- core::panicking::panic_nounwind_fmt(format_args!(
|
|
- "memory allocation of {size} bytes failed"
|
|
- ))
|
|
+ core::panicking::panic_nounwind_fmt(
|
|
+ format_args!("memory allocation of {size} bytes failed"),
|
|
+ /* force_no_backtrace */ false,
|
|
+ )
|
|
}
|
|
}
|
|
}
|
|
diff --git a/rust/alloc/boxed.rs b/rust/alloc/boxed.rs
|
|
index ed7e2f666178..f5f40778a193 100644
|
|
--- a/rust/alloc/boxed.rs
|
|
+++ b/rust/alloc/boxed.rs
|
|
@@ -159,12 +159,12 @@
|
|
use core::iter::FusedIterator;
|
|
use core::marker::Tuple;
|
|
use core::marker::Unsize;
|
|
-use core::mem;
|
|
+use core::mem::{self, SizedTypeProperties};
|
|
use core::ops::{
|
|
- CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver,
|
|
+ CoerceUnsized, Coroutine, CoroutineState, Deref, DerefMut, DispatchFromDyn, Receiver,
|
|
};
|
|
use core::pin::Pin;
|
|
-use core::ptr::{self, Unique};
|
|
+use core::ptr::{self, NonNull, Unique};
|
|
use core::task::{Context, Poll};
|
|
|
|
#[cfg(not(no_global_oom_handling))]
|
|
@@ -211,7 +211,7 @@ impl<T> Box<T> {
|
|
/// ```
|
|
/// let five = Box::new(5);
|
|
/// ```
|
|
- #[cfg(all(not(no_global_oom_handling)))]
|
|
+ #[cfg(not(no_global_oom_handling))]
|
|
#[inline(always)]
|
|
#[stable(feature = "rust1", since = "1.0.0")]
|
|
#[must_use]
|
|
@@ -483,8 +483,12 @@ pub fn try_new_uninit_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocE
|
|
where
|
|
A: Allocator,
|
|
{
|
|
- let layout = Layout::new::<mem::MaybeUninit<T>>();
|
|
- let ptr = alloc.allocate(layout)?.cast();
|
|
+ let ptr = if T::IS_ZST {
|
|
+ NonNull::dangling()
|
|
+ } else {
|
|
+ let layout = Layout::new::<mem::MaybeUninit<T>>();
|
|
+ alloc.allocate(layout)?.cast()
|
|
+ };
|
|
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
|
|
}
|
|
|
|
@@ -553,8 +557,12 @@ pub fn try_new_zeroed_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocE
|
|
where
|
|
A: Allocator,
|
|
{
|
|
- let layout = Layout::new::<mem::MaybeUninit<T>>();
|
|
- let ptr = alloc.allocate_zeroed(layout)?.cast();
|
|
+ let ptr = if T::IS_ZST {
|
|
+ NonNull::dangling()
|
|
+ } else {
|
|
+ let layout = Layout::new::<mem::MaybeUninit<T>>();
|
|
+ alloc.allocate_zeroed(layout)?.cast()
|
|
+ };
|
|
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
|
|
}
|
|
|
|
@@ -679,14 +687,16 @@ pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
|
|
#[unstable(feature = "allocator_api", issue = "32838")]
|
|
#[inline]
|
|
pub fn try_new_uninit_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
|
|
- unsafe {
|
|
+ let ptr = if T::IS_ZST || len == 0 {
|
|
+ NonNull::dangling()
|
|
+ } else {
|
|
let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
|
|
Ok(l) => l,
|
|
Err(_) => return Err(AllocError),
|
|
};
|
|
- let ptr = Global.allocate(layout)?;
|
|
- Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len))
|
|
- }
|
|
+ Global.allocate(layout)?.cast()
|
|
+ };
|
|
+ unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
|
|
}
|
|
|
|
/// Constructs a new boxed slice with uninitialized contents, with the memory
|
|
@@ -711,14 +721,16 @@ pub fn try_new_uninit_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, Al
|
|
#[unstable(feature = "allocator_api", issue = "32838")]
|
|
#[inline]
|
|
pub fn try_new_zeroed_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
|
|
- unsafe {
|
|
+ let ptr = if T::IS_ZST || len == 0 {
|
|
+ NonNull::dangling()
|
|
+ } else {
|
|
let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
|
|
Ok(l) => l,
|
|
Err(_) => return Err(AllocError),
|
|
};
|
|
- let ptr = Global.allocate_zeroed(layout)?;
|
|
- Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len))
|
|
- }
|
|
+ Global.allocate_zeroed(layout)?.cast()
|
|
+ };
|
|
+ unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
|
|
}
|
|
}
|
|
|
|
@@ -1215,12 +1227,6 @@ pub const fn into_pin(boxed: Self) -> Pin<Self>
|
|
|
|
#[stable(feature = "rust1", since = "1.0.0")]
|
|
unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box<T, A> {
|
|
- #[cfg(not(version("1.72")))]
|
|
- fn drop(&mut self) {
|
|
- // FIXME: Do nothing, drop is currently performed by compiler.
|
|
- }
|
|
-
|
|
- #[cfg(version("1.72"))]
|
|
#[inline]
|
|
fn drop(&mut self) {
|
|
// the T in the Box is dropped by the compiler before the destructor is run
|
|
@@ -1229,7 +1235,9 @@ fn drop(&mut self) {
|
|
|
|
unsafe {
|
|
let layout = Layout::for_value_raw(ptr.as_ptr());
|
|
- self.1.deallocate(From::from(ptr.cast()), layout)
|
|
+ if layout.size() != 0 {
|
|
+ self.1.deallocate(From::from(ptr.cast()), layout);
|
|
+ }
|
|
}
|
|
}
|
|
}
|
|
@@ -2102,28 +2110,28 @@ fn as_mut(&mut self) -> &mut T {
|
|
#[stable(feature = "pin", since = "1.33.0")]
|
|
impl<T: ?Sized, A: Allocator> Unpin for Box<T, A> where A: 'static {}
|
|
|
|
-#[unstable(feature = "generator_trait", issue = "43122")]
|
|
-impl<G: ?Sized + Generator<R> + Unpin, R, A: Allocator> Generator<R> for Box<G, A>
|
|
+#[unstable(feature = "coroutine_trait", issue = "43122")]
|
|
+impl<G: ?Sized + Coroutine<R> + Unpin, R, A: Allocator> Coroutine<R> for Box<G, A>
|
|
where
|
|
A: 'static,
|
|
{
|
|
type Yield = G::Yield;
|
|
type Return = G::Return;
|
|
|
|
- fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
|
|
+ fn resume(mut self: Pin<&mut Self>, arg: R) -> CoroutineState<Self::Yield, Self::Return> {
|
|
G::resume(Pin::new(&mut *self), arg)
|
|
}
|
|
}
|
|
|
|
-#[unstable(feature = "generator_trait", issue = "43122")]
|
|
-impl<G: ?Sized + Generator<R>, R, A: Allocator> Generator<R> for Pin<Box<G, A>>
|
|
+#[unstable(feature = "coroutine_trait", issue = "43122")]
|
|
+impl<G: ?Sized + Coroutine<R>, R, A: Allocator> Coroutine<R> for Pin<Box<G, A>>
|
|
where
|
|
A: 'static,
|
|
{
|
|
type Yield = G::Yield;
|
|
type Return = G::Return;
|
|
|
|
- fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
|
|
+ fn resume(mut self: Pin<&mut Self>, arg: R) -> CoroutineState<Self::Yield, Self::Return> {
|
|
G::resume((*self).as_mut(), arg)
|
|
}
|
|
}
|
|
@@ -2179,7 +2187,7 @@ pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn E
|
|
let err: Box<dyn Error> = self;
|
|
<dyn Error>::downcast(err).map_err(|s| unsafe {
|
|
// Reapply the `Send` marker.
|
|
- mem::transmute::<Box<dyn Error>, Box<dyn Error + Send>>(s)
|
|
+ Box::from_raw(Box::into_raw(s) as *mut (dyn Error + Send))
|
|
})
|
|
}
|
|
}
|
|
@@ -2193,7 +2201,7 @@ pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<Self>
|
|
let err: Box<dyn Error> = self;
|
|
<dyn Error>::downcast(err).map_err(|s| unsafe {
|
|
// Reapply the `Send + Sync` marker.
|
|
- mem::transmute::<Box<dyn Error>, Box<dyn Error + Send + Sync>>(s)
|
|
+ Box::from_raw(Box::into_raw(s) as *mut (dyn Error + Send + Sync))
|
|
})
|
|
}
|
|
}
|
|
@@ -2440,4 +2448,8 @@ fn cause(&self) -> Option<&dyn core::error::Error> {
|
|
fn source(&self) -> Option<&(dyn core::error::Error + 'static)> {
|
|
core::error::Error::source(&**self)
|
|
}
|
|
+
|
|
+ fn provide<'b>(&'b self, request: &mut core::error::Request<'b>) {
|
|
+ core::error::Error::provide(&**self, request);
|
|
+ }
|
|
}
|
|
diff --git a/rust/alloc/lib.rs b/rust/alloc/lib.rs
|
|
index 65b7a02d0956..345cf5c9cf92 100644
|
|
--- a/rust/alloc/lib.rs
|
|
+++ b/rust/alloc/lib.rs
|
|
@@ -57,8 +57,12 @@
|
|
//! [`Cell`]: core::cell
|
|
//! [`Rc`]: rc
|
|
//! [`RefCell`]: core::cell
|
|
-#![feature(doc_cfg_hide)]
|
|
|
|
+// To run alloc tests without x.py without ending up with two copies of alloc, Miri needs to be
|
|
+// able to "empty" this crate. See <https://github.com/rust-lang/miri-test-libstd/issues/4>.
|
|
+// rustc itself never sets the feature, so this line has no effect there.
|
|
+#![cfg(any(not(feature = "miri-test-libstd"), test, doctest))]
|
|
+//
|
|
#![allow(unused_attributes)]
|
|
#![stable(feature = "alloc", since = "1.36.0")]
|
|
#![doc(
|
|
@@ -76,13 +80,10 @@
|
|
not(no_sync),
|
|
target_has_atomic = "ptr"
|
|
))]
|
|
+#![cfg_attr(not(bootstrap), doc(rust_logo))]
|
|
+#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
|
|
#![no_std]
|
|
#![needs_allocator]
|
|
-// To run alloc tests without x.py without ending up with two copies of alloc, Miri needs to be
|
|
-// able to "empty" this crate. See <https://github.com/rust-lang/miri-test-libstd/issues/4>.
|
|
-// rustc itself never sets the feature, so this line has no affect there.
|
|
-#![cfg(any(not(feature = "miri-test-libstd"), test, doctest))]
|
|
-//
|
|
// Lints:
|
|
#![deny(unsafe_op_in_unsafe_fn)]
|
|
#![deny(fuzzy_provenance_casts)]
|
|
@@ -91,6 +92,8 @@
|
|
#![warn(missing_docs)]
|
|
#![allow(explicit_outlives_requirements)]
|
|
#![warn(multiple_supertrait_upcastable)]
|
|
+#![allow(internal_features)]
|
|
+#![allow(rustdoc::redundant_explicit_links)]
|
|
//
|
|
// Library features:
|
|
// tidy-alphabetical-start
|
|
@@ -107,21 +110,20 @@
|
|
#![feature(ascii_char)]
|
|
#![feature(assert_matches)]
|
|
#![feature(async_iterator)]
|
|
-#![feature(cfg_version)]
|
|
#![feature(coerce_unsized)]
|
|
#![feature(const_align_of_val)]
|
|
-#![cfg_attr(not(version("1.73")), feature(const_box))]
|
|
+#![feature(const_box)]
|
|
#![cfg_attr(not(no_borrow), feature(const_cow_is_borrowed))]
|
|
#![feature(const_eval_select)]
|
|
#![feature(const_maybe_uninit_as_mut_ptr)]
|
|
#![feature(const_maybe_uninit_write)]
|
|
-#![feature(const_maybe_uninit_zeroed)]
|
|
#![feature(const_pin)]
|
|
#![feature(const_refs_to_cell)]
|
|
#![feature(const_size_of_val)]
|
|
#![feature(const_waker)]
|
|
#![feature(core_intrinsics)]
|
|
#![feature(core_panic)]
|
|
+#![feature(deprecated_suggestion)]
|
|
#![feature(dispatch_from_dyn)]
|
|
#![feature(error_generic_member_access)]
|
|
#![feature(error_in_core)]
|
|
@@ -140,13 +142,11 @@
|
|
#![feature(maybe_uninit_uninit_array)]
|
|
#![feature(maybe_uninit_uninit_array_transpose)]
|
|
#![feature(pattern)]
|
|
-#![feature(pointer_byte_offsets)]
|
|
-#![cfg_attr(not(version("1.73")), feature(provide_any))]
|
|
+#![feature(ptr_addr_eq)]
|
|
#![feature(ptr_internals)]
|
|
#![feature(ptr_metadata)]
|
|
#![feature(ptr_sub_ptr)]
|
|
#![feature(receiver_trait)]
|
|
-#![feature(saturating_int_impl)]
|
|
#![feature(set_ptr_value)]
|
|
#![feature(sized_type_properties)]
|
|
#![feature(slice_from_ptr_range)]
|
|
@@ -169,7 +169,7 @@
|
|
//
|
|
// Language features:
|
|
// tidy-alphabetical-start
|
|
-#![cfg_attr(not(test), feature(generator_trait))]
|
|
+#![cfg_attr(not(test), feature(coroutine_trait))]
|
|
#![cfg_attr(test, feature(panic_update_hook))]
|
|
#![cfg_attr(test, feature(test))]
|
|
#![feature(allocator_internals)]
|
|
@@ -204,6 +204,7 @@
|
|
//
|
|
// Rustdoc features:
|
|
#![feature(doc_cfg)]
|
|
+#![feature(doc_cfg_hide)]
|
|
// Technically, this is a bug in rustdoc: rustdoc sees the documentation on `#[lang = slice_alloc]`
|
|
// blocks is for `&[T]`, which also has documentation using this feature in `core`, and gets mad
|
|
// that the feature-gate isn't enabled. Ideally, it wouldn't check for the feature gate for docs
|
|
diff --git a/rust/alloc/raw_vec.rs b/rust/alloc/raw_vec.rs
|
|
index 65d5ce15828e..f1b8cec8cc62 100644
|
|
--- a/rust/alloc/raw_vec.rs
|
|
+++ b/rust/alloc/raw_vec.rs
|
|
@@ -338,10 +338,13 @@ pub fn reserve_for_push(&mut self, len: usize) {
|
|
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
|
|
pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
|
|
if self.needs_to_grow(len, additional) {
|
|
- self.grow_amortized(len, additional)
|
|
- } else {
|
|
- Ok(())
|
|
+ self.grow_amortized(len, additional)?;
|
|
+ }
|
|
+ unsafe {
|
|
+ // Inform the optimizer that the reservation has succeeded or wasn't needed
|
|
+ core::intrinsics::assume(!self.needs_to_grow(len, additional));
|
|
}
|
|
+ Ok(())
|
|
}
|
|
|
|
/// The same as `reserve_for_push`, but returns on errors instead of panicking or aborting.
|
|
@@ -378,7 +381,14 @@ pub fn try_reserve_exact(
|
|
len: usize,
|
|
additional: usize,
|
|
) -> Result<(), TryReserveError> {
|
|
- if self.needs_to_grow(len, additional) { self.grow_exact(len, additional) } else { Ok(()) }
|
|
+ if self.needs_to_grow(len, additional) {
|
|
+ self.grow_exact(len, additional)?;
|
|
+ }
|
|
+ unsafe {
|
|
+ // Inform the optimizer that the reservation has succeeded or wasn't needed
|
|
+ core::intrinsics::assume(!self.needs_to_grow(len, additional));
|
|
+ }
|
|
+ Ok(())
|
|
}
|
|
|
|
/// Shrinks the buffer down to the specified capacity. If the given amount
|
|
@@ -471,16 +481,26 @@ fn shrink(&mut self, cap: usize) -> Result<(), TryReserveError> {
|
|
let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
|
|
// See current_memory() why this assert is here
|
|
let _: () = const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) };
|
|
- let ptr = unsafe {
|
|
- // `Layout::array` cannot overflow here because it would have
|
|
- // overflowed earlier when capacity was larger.
|
|
- let new_size = mem::size_of::<T>().unchecked_mul(cap);
|
|
- let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
|
- self.alloc
|
|
- .shrink(ptr, layout, new_layout)
|
|
- .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
|
|
- };
|
|
- self.set_ptr_and_cap(ptr, cap);
|
|
+
|
|
+ // If shrinking to 0, deallocate the buffer. We don't reach this point
|
|
+ // for the T::IS_ZST case since current_memory() will have returned
|
|
+ // None.
|
|
+ if cap == 0 {
|
|
+ unsafe { self.alloc.deallocate(ptr, layout) };
|
|
+ self.ptr = Unique::dangling();
|
|
+ self.cap = 0;
|
|
+ } else {
|
|
+ let ptr = unsafe {
|
|
+ // `Layout::array` cannot overflow here because it would have
|
|
+ // overflowed earlier when capacity was larger.
|
|
+ let new_size = mem::size_of::<T>().unchecked_mul(cap);
|
|
+ let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
|
+ self.alloc
|
|
+ .shrink(ptr, layout, new_layout)
|
|
+ .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
|
|
+ };
|
|
+ self.set_ptr_and_cap(ptr, cap);
|
|
+ }
|
|
Ok(())
|
|
}
|
|
}
|
|
@@ -559,6 +579,7 @@ fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
|
|
// ensure that the code generation related to these panics is minimal as there's
|
|
// only one location which panics rather than a bunch throughout the module.
|
|
#[cfg(not(no_global_oom_handling))]
|
|
+#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
|
|
fn capacity_overflow() -> ! {
|
|
panic!("capacity overflow");
|
|
}
|
|
diff --git a/rust/alloc/slice.rs b/rust/alloc/slice.rs
|
|
index 6ac463bd3edc..1181836da5f4 100644
|
|
--- a/rust/alloc/slice.rs
|
|
+++ b/rust/alloc/slice.rs
|
|
@@ -594,7 +594,7 @@ pub fn join<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Outp
|
|
/// ```
|
|
#[rustc_allow_incoherent_impl]
|
|
#[stable(feature = "rust1", since = "1.0.0")]
|
|
- #[deprecated(since = "1.3.0", note = "renamed to join")]
|
|
+ #[deprecated(since = "1.3.0", note = "renamed to join", suggestion = "join")]
|
|
pub fn connect<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Output
|
|
where
|
|
Self: Join<Separator>,
|
|
diff --git a/rust/alloc/vec/drain_filter.rs b/rust/alloc/vec/drain_filter.rs
|
|
deleted file mode 100644
|
|
index 09efff090e42..000000000000
|
|
--- a/rust/alloc/vec/drain_filter.rs
|
|
+++ /dev/null
|
|
@@ -1,199 +0,0 @@
|
|
-// SPDX-License-Identifier: Apache-2.0 OR MIT
|
|
-
|
|
-use crate::alloc::{Allocator, Global};
|
|
-use core::mem::{ManuallyDrop, SizedTypeProperties};
|
|
-use core::ptr;
|
|
-use core::slice;
|
|
-
|
|
-use super::Vec;
|
|
-
|
|
-/// An iterator which uses a closure to determine if an element should be removed.
|
|
-///
|
|
-/// This struct is created by [`Vec::drain_filter`].
|
|
-/// See its documentation for more.
|
|
-///
|
|
-/// # Example
|
|
-///
|
|
-/// ```
|
|
-/// #![feature(drain_filter)]
|
|
-///
|
|
-/// let mut v = vec![0, 1, 2];
|
|
-/// let iter: std::vec::DrainFilter<'_, _, _> = v.drain_filter(|x| *x % 2 == 0);
|
|
-/// ```
|
|
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
|
|
-#[derive(Debug)]
|
|
-pub struct DrainFilter<
|
|
- 'a,
|
|
- T,
|
|
- F,
|
|
- #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
|
|
-> where
|
|
- F: FnMut(&mut T) -> bool,
|
|
-{
|
|
- pub(super) vec: &'a mut Vec<T, A>,
|
|
- /// The index of the item that will be inspected by the next call to `next`.
|
|
- pub(super) idx: usize,
|
|
- /// The number of items that have been drained (removed) thus far.
|
|
- pub(super) del: usize,
|
|
- /// The original length of `vec` prior to draining.
|
|
- pub(super) old_len: usize,
|
|
- /// The filter test predicate.
|
|
- pub(super) pred: F,
|
|
- /// A flag that indicates a panic has occurred in the filter test predicate.
|
|
- /// This is used as a hint in the drop implementation to prevent consumption
|
|
- /// of the remainder of the `DrainFilter`. Any unprocessed items will be
|
|
- /// backshifted in the `vec`, but no further items will be dropped or
|
|
- /// tested by the filter predicate.
|
|
- pub(super) panic_flag: bool,
|
|
-}
|
|
-
|
|
-impl<T, F, A: Allocator> DrainFilter<'_, T, F, A>
|
|
-where
|
|
- F: FnMut(&mut T) -> bool,
|
|
-{
|
|
- /// Returns a reference to the underlying allocator.
|
|
- #[unstable(feature = "allocator_api", issue = "32838")]
|
|
- #[inline]
|
|
- pub fn allocator(&self) -> &A {
|
|
- self.vec.allocator()
|
|
- }
|
|
-
|
|
- /// Keep unyielded elements in the source `Vec`.
|
|
- ///
|
|
- /// # Examples
|
|
- ///
|
|
- /// ```
|
|
- /// #![feature(drain_filter)]
|
|
- /// #![feature(drain_keep_rest)]
|
|
- ///
|
|
- /// let mut vec = vec!['a', 'b', 'c'];
|
|
- /// let mut drain = vec.drain_filter(|_| true);
|
|
- ///
|
|
- /// assert_eq!(drain.next().unwrap(), 'a');
|
|
- ///
|
|
- /// // This call keeps 'b' and 'c' in the vec.
|
|
- /// drain.keep_rest();
|
|
- ///
|
|
- /// // If we wouldn't call `keep_rest()`,
|
|
- /// // `vec` would be empty.
|
|
- /// assert_eq!(vec, ['b', 'c']);
|
|
- /// ```
|
|
- #[unstable(feature = "drain_keep_rest", issue = "101122")]
|
|
- pub fn keep_rest(self) {
|
|
- // At this moment layout looks like this:
|
|
- //
|
|
- // _____________________/-- old_len
|
|
- // / \
|
|
- // [kept] [yielded] [tail]
|
|
- // \_______/ ^-- idx
|
|
- // \-- del
|
|
- //
|
|
- // Normally `Drop` impl would drop [tail] (via .for_each(drop), ie still calling `pred`)
|
|
- //
|
|
- // 1. Move [tail] after [kept]
|
|
- // 2. Update length of the original vec to `old_len - del`
|
|
- // a. In case of ZST, this is the only thing we want to do
|
|
- // 3. Do *not* drop self, as everything is put in a consistent state already, there is nothing to do
|
|
- let mut this = ManuallyDrop::new(self);
|
|
-
|
|
- unsafe {
|
|
- // ZSTs have no identity, so we don't need to move them around.
|
|
- if !T::IS_ZST && this.idx < this.old_len && this.del > 0 {
|
|
- let ptr = this.vec.as_mut_ptr();
|
|
- let src = ptr.add(this.idx);
|
|
- let dst = src.sub(this.del);
|
|
- let tail_len = this.old_len - this.idx;
|
|
- src.copy_to(dst, tail_len);
|
|
- }
|
|
-
|
|
- let new_len = this.old_len - this.del;
|
|
- this.vec.set_len(new_len);
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
|
|
-impl<T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A>
|
|
-where
|
|
- F: FnMut(&mut T) -> bool,
|
|
-{
|
|
- type Item = T;
|
|
-
|
|
- fn next(&mut self) -> Option<T> {
|
|
- unsafe {
|
|
- while self.idx < self.old_len {
|
|
- let i = self.idx;
|
|
- let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
|
|
- self.panic_flag = true;
|
|
- let drained = (self.pred)(&mut v[i]);
|
|
- self.panic_flag = false;
|
|
- // Update the index *after* the predicate is called. If the index
|
|
- // is updated prior and the predicate panics, the element at this
|
|
- // index would be leaked.
|
|
- self.idx += 1;
|
|
- if drained {
|
|
- self.del += 1;
|
|
- return Some(ptr::read(&v[i]));
|
|
- } else if self.del > 0 {
|
|
- let del = self.del;
|
|
- let src: *const T = &v[i];
|
|
- let dst: *mut T = &mut v[i - del];
|
|
- ptr::copy_nonoverlapping(src, dst, 1);
|
|
- }
|
|
- }
|
|
- None
|
|
- }
|
|
- }
|
|
-
|
|
- fn size_hint(&self) -> (usize, Option<usize>) {
|
|
- (0, Some(self.old_len - self.idx))
|
|
- }
|
|
-}
|
|
-
|
|
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
|
|
-impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A>
|
|
-where
|
|
- F: FnMut(&mut T) -> bool,
|
|
-{
|
|
- fn drop(&mut self) {
|
|
- struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator>
|
|
- where
|
|
- F: FnMut(&mut T) -> bool,
|
|
- {
|
|
- drain: &'b mut DrainFilter<'a, T, F, A>,
|
|
- }
|
|
-
|
|
- impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A>
|
|
- where
|
|
- F: FnMut(&mut T) -> bool,
|
|
- {
|
|
- fn drop(&mut self) {
|
|
- unsafe {
|
|
- if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
|
|
- // This is a pretty messed up state, and there isn't really an
|
|
- // obviously right thing to do. We don't want to keep trying
|
|
- // to execute `pred`, so we just backshift all the unprocessed
|
|
- // elements and tell the vec that they still exist. The backshift
|
|
- // is required to prevent a double-drop of the last successfully
|
|
- // drained item prior to a panic in the predicate.
|
|
- let ptr = self.drain.vec.as_mut_ptr();
|
|
- let src = ptr.add(self.drain.idx);
|
|
- let dst = src.sub(self.drain.del);
|
|
- let tail_len = self.drain.old_len - self.drain.idx;
|
|
- src.copy_to(dst, tail_len);
|
|
- }
|
|
- self.drain.vec.set_len(self.drain.old_len - self.drain.del);
|
|
- }
|
|
- }
|
|
- }
|
|
-
|
|
- let backshift = BackshiftOnDrop { drain: self };
|
|
-
|
|
- // Attempt to consume any remaining elements if the filter predicate
|
|
- // has not yet panicked. We'll backshift any remaining elements
|
|
- // whether we've already panicked or if the consumption here panics.
|
|
- if !backshift.drain.panic_flag {
|
|
- backshift.drain.for_each(drop);
|
|
- }
|
|
- }
|
|
-}
|
|
diff --git a/rust/alloc/vec/extract_if.rs b/rust/alloc/vec/extract_if.rs
|
|
new file mode 100644
|
|
index 000000000000..f314a51d4d3d
|
|
--- /dev/null
|
|
+++ b/rust/alloc/vec/extract_if.rs
|
|
@@ -0,0 +1,115 @@
|
|
+// SPDX-License-Identifier: Apache-2.0 OR MIT
|
|
+
|
|
+use crate::alloc::{Allocator, Global};
|
|
+use core::ptr;
|
|
+use core::slice;
|
|
+
|
|
+use super::Vec;
|
|
+
|
|
+/// An iterator which uses a closure to determine if an element should be removed.
|
|
+///
|
|
+/// This struct is created by [`Vec::extract_if`].
|
|
+/// See its documentation for more.
|
|
+///
|
|
+/// # Example
|
|
+///
|
|
+/// ```
|
|
+/// #![feature(extract_if)]
|
|
+///
|
|
+/// let mut v = vec![0, 1, 2];
|
|
+/// let iter: std::vec::ExtractIf<'_, _, _> = v.extract_if(|x| *x % 2 == 0);
|
|
+/// ```
|
|
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
|
|
+#[derive(Debug)]
|
|
+#[must_use = "iterators are lazy and do nothing unless consumed"]
|
|
+pub struct ExtractIf<
|
|
+ 'a,
|
|
+ T,
|
|
+ F,
|
|
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
|
|
+> where
|
|
+ F: FnMut(&mut T) -> bool,
|
|
+{
|
|
+ pub(super) vec: &'a mut Vec<T, A>,
|
|
+ /// The index of the item that will be inspected by the next call to `next`.
|
|
+ pub(super) idx: usize,
|
|
+ /// The number of items that have been drained (removed) thus far.
|
|
+ pub(super) del: usize,
|
|
+ /// The original length of `vec` prior to draining.
|
|
+ pub(super) old_len: usize,
|
|
+ /// The filter test predicate.
|
|
+ pub(super) pred: F,
|
|
+}
|
|
+
|
|
+impl<T, F, A: Allocator> ExtractIf<'_, T, F, A>
|
|
+where
|
|
+ F: FnMut(&mut T) -> bool,
|
|
+{
|
|
+ /// Returns a reference to the underlying allocator.
|
|
+ #[unstable(feature = "allocator_api", issue = "32838")]
|
|
+ #[inline]
|
|
+ pub fn allocator(&self) -> &A {
|
|
+ self.vec.allocator()
|
|
+ }
|
|
+}
|
|
+
|
|
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
|
|
+impl<T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A>
|
|
+where
|
|
+ F: FnMut(&mut T) -> bool,
|
|
+{
|
|
+ type Item = T;
|
|
+
|
|
+ fn next(&mut self) -> Option<T> {
|
|
+ unsafe {
|
|
+ while self.idx < self.old_len {
|
|
+ let i = self.idx;
|
|
+ let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
|
|
+ let drained = (self.pred)(&mut v[i]);
|
|
+ // Update the index *after* the predicate is called. If the index
|
|
+ // is updated prior and the predicate panics, the element at this
|
|
+ // index would be leaked.
|
|
+ self.idx += 1;
|
|
+ if drained {
|
|
+ self.del += 1;
|
|
+ return Some(ptr::read(&v[i]));
|
|
+ } else if self.del > 0 {
|
|
+ let del = self.del;
|
|
+ let src: *const T = &v[i];
|
|
+ let dst: *mut T = &mut v[i - del];
|
|
+ ptr::copy_nonoverlapping(src, dst, 1);
|
|
+ }
|
|
+ }
|
|
+ None
|
|
+ }
|
|
+ }
|
|
+
|
|
+ fn size_hint(&self) -> (usize, Option<usize>) {
|
|
+ (0, Some(self.old_len - self.idx))
|
|
+ }
|
|
+}
|
|
+
|
|
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
|
|
+impl<T, F, A: Allocator> Drop for ExtractIf<'_, T, F, A>
|
|
+where
|
|
+ F: FnMut(&mut T) -> bool,
|
|
+{
|
|
+ fn drop(&mut self) {
|
|
+ unsafe {
|
|
+ if self.idx < self.old_len && self.del > 0 {
|
|
+ // This is a pretty messed up state, and there isn't really an
|
|
+ // obviously right thing to do. We don't want to keep trying
|
|
+ // to execute `pred`, so we just backshift all the unprocessed
|
|
+ // elements and tell the vec that they still exist. The backshift
|
|
+ // is required to prevent a double-drop of the last successfully
|
|
+ // drained item prior to a panic in the predicate.
|
|
+ let ptr = self.vec.as_mut_ptr();
|
|
+ let src = ptr.add(self.idx);
|
|
+ let dst = src.sub(self.del);
|
|
+ let tail_len = self.old_len - self.idx;
|
|
+ src.copy_to(dst, tail_len);
|
|
+ }
|
|
+ self.vec.set_len(self.old_len - self.del);
|
|
+ }
|
|
+ }
|
|
+}
|
|
diff --git a/rust/alloc/vec/mod.rs b/rust/alloc/vec/mod.rs
|
|
index 05c70de0227e..0d95fd7ef337 100644
|
|
--- a/rust/alloc/vec/mod.rs
|
|
+++ b/rust/alloc/vec/mod.rs
|
|
@@ -74,10 +74,10 @@
|
|
use crate::collections::{TryReserveError, TryReserveErrorKind};
|
|
use crate::raw_vec::RawVec;
|
|
|
|
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
|
|
-pub use self::drain_filter::DrainFilter;
|
|
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
|
|
+pub use self::extract_if::ExtractIf;
|
|
|
|
-mod drain_filter;
|
|
+mod extract_if;
|
|
|
|
#[cfg(not(no_global_oom_handling))]
|
|
#[stable(feature = "vec_splice", since = "1.21.0")]
|
|
@@ -216,7 +216,7 @@
|
|
///
|
|
/// # Indexing
|
|
///
|
|
-/// The `Vec` type allows to access values by index, because it implements the
|
|
+/// The `Vec` type allows access to values by index, because it implements the
|
|
/// [`Index`] trait. An example will be more explicit:
|
|
///
|
|
/// ```
|
|
@@ -618,22 +618,20 @@ pub fn try_with_capacity(capacity: usize) -> Result<Self, TryReserveError> {
|
|
/// Using memory that was allocated elsewhere:
|
|
///
|
|
/// ```rust
|
|
- /// #![feature(allocator_api)]
|
|
- ///
|
|
- /// use std::alloc::{AllocError, Allocator, Global, Layout};
|
|
+ /// use std::alloc::{alloc, Layout};
|
|
///
|
|
/// fn main() {
|
|
/// let layout = Layout::array::<u32>(16).expect("overflow cannot happen");
|
|
///
|
|
/// let vec = unsafe {
|
|
- /// let mem = match Global.allocate(layout) {
|
|
- /// Ok(mem) => mem.cast::<u32>().as_ptr(),
|
|
- /// Err(AllocError) => return,
|
|
- /// };
|
|
+ /// let mem = alloc(layout).cast::<u32>();
|
|
+ /// if mem.is_null() {
|
|
+ /// return;
|
|
+ /// }
|
|
///
|
|
/// mem.write(1_000_000);
|
|
///
|
|
- /// Vec::from_raw_parts_in(mem, 1, 16, Global)
|
|
+ /// Vec::from_raw_parts(mem, 1, 16)
|
|
/// };
|
|
///
|
|
/// assert_eq!(vec, &[1_000_000]);
|
|
@@ -876,19 +874,22 @@ pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserv
|
|
/// Using memory that was allocated elsewhere:
|
|
///
|
|
/// ```rust
|
|
- /// use std::alloc::{alloc, Layout};
|
|
+ /// #![feature(allocator_api)]
|
|
+ ///
|
|
+ /// use std::alloc::{AllocError, Allocator, Global, Layout};
|
|
///
|
|
/// fn main() {
|
|
/// let layout = Layout::array::<u32>(16).expect("overflow cannot happen");
|
|
+ ///
|
|
/// let vec = unsafe {
|
|
- /// let mem = alloc(layout).cast::<u32>();
|
|
- /// if mem.is_null() {
|
|
- /// return;
|
|
- /// }
|
|
+ /// let mem = match Global.allocate(layout) {
|
|
+ /// Ok(mem) => mem.cast::<u32>().as_ptr(),
|
|
+ /// Err(AllocError) => return,
|
|
+ /// };
|
|
///
|
|
/// mem.write(1_000_000);
|
|
///
|
|
- /// Vec::from_raw_parts(mem, 1, 16)
|
|
+ /// Vec::from_raw_parts_in(mem, 1, 16, Global)
|
|
/// };
|
|
///
|
|
/// assert_eq!(vec, &[1_000_000]);
|
|
@@ -1227,8 +1228,8 @@ pub fn into_boxed_slice(mut self) -> Box<[T], A> {
|
|
/// Shortens the vector, keeping the first `len` elements and dropping
|
|
/// the rest.
|
|
///
|
|
- /// If `len` is greater than the vector's current length, this has no
|
|
- /// effect.
|
|
+ /// If `len` is greater or equal to the vector's current length, this has
|
|
+ /// no effect.
|
|
///
|
|
/// The [`drain`] method can emulate `truncate`, but causes the excess
|
|
/// elements to be returned instead of dropped.
|
|
@@ -1335,6 +1336,15 @@ pub fn as_mut_slice(&mut self) -> &mut [T] {
|
|
/// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
|
|
/// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
|
|
///
|
|
+ /// This method guarantees that for the purpose of the aliasing model, this method
|
|
+ /// does not materialize a reference to the underlying slice, and thus the returned pointer
|
|
+ /// will remain valid when mixed with other calls to [`as_ptr`] and [`as_mut_ptr`].
|
|
+ /// Note that calling other methods that materialize mutable references to the slice,
|
|
+ /// or mutable references to specific elements you are planning on accessing through this pointer,
|
|
+ /// as well as writing to those elements, may still invalidate this pointer.
|
|
+ /// See the second example below for how this guarantee can be used.
|
|
+ ///
|
|
+ ///
|
|
/// # Examples
|
|
///
|
|
/// ```
|
|
@@ -1348,8 +1358,25 @@ pub fn as_mut_slice(&mut self) -> &mut [T] {
|
|
/// }
|
|
/// ```
|
|
///
|
|
+ /// Due to the aliasing guarantee, the following code is legal:
|
|
+ ///
|
|
+ /// ```rust
|
|
+ /// unsafe {
|
|
+ /// let mut v = vec![0, 1, 2];
|
|
+ /// let ptr1 = v.as_ptr();
|
|
+ /// let _ = ptr1.read();
|
|
+ /// let ptr2 = v.as_mut_ptr().offset(2);
|
|
+ /// ptr2.write(2);
|
|
+ /// // Notably, the write to `ptr2` did *not* invalidate `ptr1`
|
|
+ /// // because it mutated a different element:
|
|
+ /// let _ = ptr1.read();
|
|
+ /// }
|
|
+ /// ```
|
|
+ ///
|
|
/// [`as_mut_ptr`]: Vec::as_mut_ptr
|
|
+ /// [`as_ptr`]: Vec::as_ptr
|
|
#[stable(feature = "vec_as_ptr", since = "1.37.0")]
|
|
+ #[rustc_never_returns_null_ptr]
|
|
#[inline]
|
|
pub fn as_ptr(&self) -> *const T {
|
|
// We shadow the slice method of the same name to avoid going through
|
|
@@ -1365,6 +1392,15 @@ pub fn as_ptr(&self) -> *const T {
|
|
/// Modifying the vector may cause its buffer to be reallocated,
|
|
/// which would also make any pointers to it invalid.
|
|
///
|
|
+ /// This method guarantees that for the purpose of the aliasing model, this method
|
|
+ /// does not materialize a reference to the underlying slice, and thus the returned pointer
|
|
+ /// will remain valid when mixed with other calls to [`as_ptr`] and [`as_mut_ptr`].
|
|
+ /// Note that calling other methods that materialize references to the slice,
|
|
+ /// or references to specific elements you are planning on accessing through this pointer,
|
|
+ /// may still invalidate this pointer.
|
|
+ /// See the second example below for how this guarantee can be used.
|
|
+ ///
|
|
+ ///
|
|
/// # Examples
|
|
///
|
|
/// ```
|
|
@@ -1382,7 +1418,25 @@ pub fn as_ptr(&self) -> *const T {
|
|
/// }
|
|
/// assert_eq!(&*x, &[0, 1, 2, 3]);
|
|
/// ```
|
|
+ ///
|
|
+ /// Due to the aliasing guarantee, the following code is legal:
|
|
+ ///
|
|
+ /// ```rust
|
|
+ /// unsafe {
|
|
+ /// let mut v = vec![0];
|
|
+ /// let ptr1 = v.as_mut_ptr();
|
|
+ /// ptr1.write(1);
|
|
+ /// let ptr2 = v.as_mut_ptr();
|
|
+ /// ptr2.write(2);
|
|
+ /// // Notably, the write to `ptr2` did *not* invalidate `ptr1`:
|
|
+ /// ptr1.write(3);
|
|
+ /// }
|
|
+ /// ```
|
|
+ ///
|
|
+ /// [`as_mut_ptr`]: Vec::as_mut_ptr
|
|
+ /// [`as_ptr`]: Vec::as_ptr
|
|
#[stable(feature = "vec_as_ptr", since = "1.37.0")]
|
|
+ #[rustc_never_returns_null_ptr]
|
|
#[inline]
|
|
pub fn as_mut_ptr(&mut self) -> *mut T {
|
|
// We shadow the slice method of the same name to avoid going through
|
|
@@ -1511,7 +1565,8 @@ pub unsafe fn set_len(&mut self, new_len: usize) {
|
|
#[stable(feature = "rust1", since = "1.0.0")]
|
|
pub fn swap_remove(&mut self, index: usize) -> T {
|
|
#[cold]
|
|
- #[inline(never)]
|
|
+ #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
|
|
+ #[track_caller]
|
|
fn assert_failed(index: usize, len: usize) -> ! {
|
|
panic!("swap_remove index (is {index}) should be < len (is {len})");
|
|
}
|
|
@@ -1552,7 +1607,8 @@ fn assert_failed(index: usize, len: usize) -> ! {
|
|
#[stable(feature = "rust1", since = "1.0.0")]
|
|
pub fn insert(&mut self, index: usize, element: T) {
|
|
#[cold]
|
|
- #[inline(never)]
|
|
+ #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
|
|
+ #[track_caller]
|
|
fn assert_failed(index: usize, len: usize) -> ! {
|
|
panic!("insertion index (is {index}) should be <= len (is {len})");
|
|
}
|
|
@@ -1613,7 +1669,7 @@ fn assert_failed(index: usize, len: usize) -> ! {
|
|
#[track_caller]
|
|
pub fn remove(&mut self, index: usize) -> T {
|
|
#[cold]
|
|
- #[inline(never)]
|
|
+ #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
|
|
#[track_caller]
|
|
fn assert_failed(index: usize, len: usize) -> ! {
|
|
panic!("removal index (is {index}) should be < len (is {len})");
|
|
@@ -2043,6 +2099,7 @@ pub fn pop(&mut self) -> Option<T> {
|
|
} else {
|
|
unsafe {
|
|
self.len -= 1;
|
|
+ core::intrinsics::assume(self.len < self.capacity());
|
|
Some(ptr::read(self.as_ptr().add(self.len())))
|
|
}
|
|
}
|
|
@@ -2245,7 +2302,8 @@ pub fn split_off(&mut self, at: usize) -> Self
|
|
A: Clone,
|
|
{
|
|
#[cold]
|
|
- #[inline(never)]
|
|
+ #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
|
|
+ #[track_caller]
|
|
fn assert_failed(at: usize, len: usize) -> ! {
|
|
panic!("`at` split index (is {at}) should be <= len (is {len})");
|
|
}
|
|
@@ -2507,7 +2565,7 @@ pub fn resize(&mut self, new_len: usize, value: T) {
|
|
let len = self.len();
|
|
|
|
if new_len > len {
|
|
- self.extend_with(new_len - len, ExtendElement(value))
|
|
+ self.extend_with(new_len - len, value)
|
|
} else {
|
|
self.truncate(new_len);
|
|
}
|
|
@@ -2545,7 +2603,7 @@ pub fn try_resize(&mut self, new_len: usize, value: T) -> Result<(), TryReserveE
|
|
let len = self.len();
|
|
|
|
if new_len > len {
|
|
- self.try_extend_with(new_len - len, ExtendElement(value))
|
|
+ self.try_extend_with(new_len - len, value)
|
|
} else {
|
|
self.truncate(new_len);
|
|
Ok(())
|
|
@@ -2684,26 +2742,10 @@ pub fn into_flattened(self) -> Vec<T, A> {
|
|
}
|
|
}
|
|
|
|
-// This code generalizes `extend_with_{element,default}`.
|
|
-trait ExtendWith<T> {
|
|
- fn next(&mut self) -> T;
|
|
- fn last(self) -> T;
|
|
-}
|
|
-
|
|
-struct ExtendElement<T>(T);
|
|
-impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
|
|
- fn next(&mut self) -> T {
|
|
- self.0.clone()
|
|
- }
|
|
- fn last(self) -> T {
|
|
- self.0
|
|
- }
|
|
-}
|
|
-
|
|
-impl<T, A: Allocator> Vec<T, A> {
|
|
+impl<T: Clone, A: Allocator> Vec<T, A> {
|
|
#[cfg(not(no_global_oom_handling))]
|
|
- /// Extend the vector by `n` values, using the given generator.
|
|
- fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
|
|
+ /// Extend the vector by `n` clones of value.
|
|
+ fn extend_with(&mut self, n: usize, value: T) {
|
|
self.reserve(n);
|
|
|
|
unsafe {
|
|
@@ -2715,15 +2757,15 @@ fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
|
|
|
|
// Write all elements except the last one
|
|
for _ in 1..n {
|
|
- ptr::write(ptr, value.next());
|
|
+ ptr::write(ptr, value.clone());
|
|
ptr = ptr.add(1);
|
|
- // Increment the length in every step in case next() panics
|
|
+ // Increment the length in every step in case clone() panics
|
|
local_len.increment_len(1);
|
|
}
|
|
|
|
if n > 0 {
|
|
// We can write the last element directly without cloning needlessly
|
|
- ptr::write(ptr, value.last());
|
|
+ ptr::write(ptr, value);
|
|
local_len.increment_len(1);
|
|
}
|
|
|
|
@@ -2731,8 +2773,8 @@ fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
|
|
}
|
|
}
|
|
|
|
- /// Try to extend the vector by `n` values, using the given generator.
|
|
- fn try_extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) -> Result<(), TryReserveError> {
|
|
+ /// Try to extend the vector by `n` clones of value.
|
|
+ fn try_extend_with(&mut self, n: usize, value: T) -> Result<(), TryReserveError> {
|
|
self.try_reserve(n)?;
|
|
|
|
unsafe {
|
|
@@ -2744,15 +2786,15 @@ fn try_extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) -> Resul
|
|
|
|
// Write all elements except the last one
|
|
for _ in 1..n {
|
|
- ptr::write(ptr, value.next());
|
|
+ ptr::write(ptr, value.clone());
|
|
ptr = ptr.add(1);
|
|
- // Increment the length in every step in case next() panics
|
|
+ // Increment the length in every step in case clone() panics
|
|
local_len.increment_len(1);
|
|
}
|
|
|
|
if n > 0 {
|
|
// We can write the last element directly without cloning needlessly
|
|
- ptr::write(ptr, value.last());
|
|
+ ptr::write(ptr, value);
|
|
local_len.increment_len(1);
|
|
}
|
|
|
|
@@ -3210,6 +3252,12 @@ pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoI
|
|
/// If the closure returns false, the element will remain in the vector and will not be yielded
|
|
/// by the iterator.
|
|
///
|
|
+ /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
|
|
+ /// or the iteration short-circuits, then the remaining elements will be retained.
|
|
+ /// Use [`retain`] with a negated predicate if you do not need the returned iterator.
|
|
+ ///
|
|
+ /// [`retain`]: Vec::retain
|
|
+ ///
|
|
/// Using this method is equivalent to the following code:
|
|
///
|
|
/// ```
|
|
@@ -3228,10 +3276,10 @@ pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoI
|
|
/// # assert_eq!(vec, vec![1, 4, 5]);
|
|
/// ```
|
|
///
|
|
- /// But `drain_filter` is easier to use. `drain_filter` is also more efficient,
|
|
+ /// But `extract_if` is easier to use. `extract_if` is also more efficient,
|
|
/// because it can backshift the elements of the array in bulk.
|
|
///
|
|
- /// Note that `drain_filter` also lets you mutate every element in the filter closure,
|
|
+ /// Note that `extract_if` also lets you mutate every element in the filter closure,
|
|
/// regardless of whether you choose to keep or remove it.
|
|
///
|
|
/// # Examples
|
|
@@ -3239,17 +3287,17 @@ pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoI
|
|
/// Splitting an array into evens and odds, reusing the original allocation:
|
|
///
|
|
/// ```
|
|
- /// #![feature(drain_filter)]
|
|
+ /// #![feature(extract_if)]
|
|
/// let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15];
|
|
///
|
|
- /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
|
|
+ /// let evens = numbers.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
|
|
/// let odds = numbers;
|
|
///
|
|
/// assert_eq!(evens, vec![2, 4, 6, 8, 14]);
|
|
/// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]);
|
|
/// ```
|
|
- #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
|
|
- pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F, A>
|
|
+ #[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
|
|
+ pub fn extract_if<F>(&mut self, filter: F) -> ExtractIf<'_, T, F, A>
|
|
where
|
|
F: FnMut(&mut T) -> bool,
|
|
{
|
|
@@ -3260,7 +3308,7 @@ pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F, A>
|
|
self.set_len(0);
|
|
}
|
|
|
|
- DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false }
|
|
+ ExtractIf { vec: self, idx: 0, del: 0, old_len, pred: filter }
|
|
}
|
|
}
|
|
|
|
@@ -3272,7 +3320,7 @@ pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F, A>
|
|
/// [`copy_from_slice`]: slice::copy_from_slice
|
|
#[cfg(not(no_global_oom_handling))]
|
|
#[stable(feature = "extend_ref", since = "1.2.0")]
|
|
-impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec<T, A> {
|
|
+impl<'a, T: Copy + 'a, A: Allocator> Extend<&'a T> for Vec<T, A> {
|
|
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
|
|
self.spec_extend(iter.into_iter())
|
|
}
|
|
@@ -3290,9 +3338,14 @@ fn extend_reserve(&mut self, additional: usize) {
|
|
|
|
/// Implements comparison of vectors, [lexicographically](Ord#lexicographical-comparison).
|
|
#[stable(feature = "rust1", since = "1.0.0")]
|
|
-impl<T: PartialOrd, A: Allocator> PartialOrd for Vec<T, A> {
|
|
+impl<T, A1, A2> PartialOrd<Vec<T, A2>> for Vec<T, A1>
|
|
+where
|
|
+ T: PartialOrd,
|
|
+ A1: Allocator,
|
|
+ A2: Allocator,
|
|
+{
|
|
#[inline]
|
|
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
+ fn partial_cmp(&self, other: &Vec<T, A2>) -> Option<Ordering> {
|
|
PartialOrd::partial_cmp(&**self, &**other)
|
|
}
|
|
}
|
|
@@ -3407,6 +3460,36 @@ fn from(s: &mut [T]) -> Vec<T> {
|
|
}
|
|
}
|
|
|
|
+#[cfg(not(no_global_oom_handling))]
|
|
+#[stable(feature = "vec_from_array_ref", since = "1.74.0")]
|
|
+impl<T: Clone, const N: usize> From<&[T; N]> for Vec<T> {
|
|
+ /// Allocate a `Vec<T>` and fill it by cloning `s`'s items.
|
|
+ ///
|
|
+ /// # Examples
|
|
+ ///
|
|
+ /// ```
|
|
+ /// assert_eq!(Vec::from(&[1, 2, 3]), vec![1, 2, 3]);
|
|
+ /// ```
|
|
+ fn from(s: &[T; N]) -> Vec<T> {
|
|
+ Self::from(s.as_slice())
|
|
+ }
|
|
+}
|
|
+
|
|
+#[cfg(not(no_global_oom_handling))]
|
|
+#[stable(feature = "vec_from_array_ref", since = "1.74.0")]
|
|
+impl<T: Clone, const N: usize> From<&mut [T; N]> for Vec<T> {
|
|
+ /// Allocate a `Vec<T>` and fill it by cloning `s`'s items.
|
|
+ ///
|
|
+ /// # Examples
|
|
+ ///
|
|
+ /// ```
|
|
+ /// assert_eq!(Vec::from(&mut [1, 2, 3]), vec![1, 2, 3]);
|
|
+ /// ```
|
|
+ fn from(s: &mut [T; N]) -> Vec<T> {
|
|
+ Self::from(s.as_mut_slice())
|
|
+ }
|
|
+}
|
|
+
|
|
#[cfg(not(no_global_oom_handling))]
|
|
#[stable(feature = "vec_from_array", since = "1.44.0")]
|
|
impl<T, const N: usize> From<[T; N]> for Vec<T> {
|
|
diff --git a/rust/alloc/vec/spec_extend.rs b/rust/alloc/vec/spec_extend.rs
|
|
index a6a735201e59..ada919537446 100644
|
|
--- a/rust/alloc/vec/spec_extend.rs
|
|
+++ b/rust/alloc/vec/spec_extend.rs
|
|
@@ -77,7 +77,7 @@ fn try_spec_extend(&mut self, mut iterator: IntoIter<T>) -> Result<(), TryReserv
|
|
}
|
|
|
|
#[cfg(not(no_global_oom_handling))]
|
|
-impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec<T, A>
|
|
+impl<'a, T: 'a, I, A: Allocator> SpecExtend<&'a T, I> for Vec<T, A>
|
|
where
|
|
I: Iterator<Item = &'a T>,
|
|
T: Clone,
|
|
@@ -87,7 +87,7 @@ impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec<T, A>
|
|
}
|
|
}
|
|
|
|
-impl<'a, T: 'a, I, A: Allocator + 'a> TrySpecExtend<&'a T, I> for Vec<T, A>
|
|
+impl<'a, T: 'a, I, A: Allocator> TrySpecExtend<&'a T, I> for Vec<T, A>
|
|
where
|
|
I: Iterator<Item = &'a T>,
|
|
T: Clone,
|
|
@@ -98,7 +98,7 @@ impl<'a, T: 'a, I, A: Allocator + 'a> TrySpecExtend<&'a T, I> for Vec<T, A>
|
|
}
|
|
|
|
#[cfg(not(no_global_oom_handling))]
|
|
-impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
|
|
+impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
|
|
where
|
|
T: Copy,
|
|
{
|
|
@@ -108,7 +108,7 @@ fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
|
|
}
|
|
}
|
|
|
|
-impl<'a, T: 'a, A: Allocator + 'a> TrySpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
|
|
+impl<'a, T: 'a, A: Allocator> TrySpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
|
|
where
|
|
T: Copy,
|
|
{
|
|
--
|
|
2.42.0
|
|
|