From eb4fc640b0f881e240ad0ebb9d05f56547e42036 Mon Sep 17 00:00:00 2001 From: woppopo Date: Thu, 23 Dec 2021 22:03:12 +0900 Subject: [PATCH] Constify `Box` methods --- library/alloc/src/alloc.rs | 23 +++++-- library/alloc/src/boxed.rs | 92 ++++++++++++++++++++-------- library/alloc/src/lib.rs | 15 +++++ library/alloc/tests/boxed.rs | 114 ++++++++++++++++++++++++++++++++++- library/alloc/tests/lib.rs | 12 ++++ 5 files changed, 224 insertions(+), 32 deletions(-) diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index 66ef92558d8..fd4b37ca6a8 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -323,17 +323,21 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { #[cfg_attr(not(test), lang = "box_free")] #[inline] +#[rustc_const_unstable(feature = "const_box", issue = "none")] // This signature has to be the same as `Box`, otherwise an ICE will happen. // When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as // well. // For example if `Box` is changed to `struct Box(Unique, A)`, // this function has to be changed to `fn box_free(Unique, A)` as well. -pub(crate) unsafe fn box_free(ptr: Unique, alloc: A) { +pub(crate) const unsafe fn box_free( + ptr: Unique, + alloc: A, +) { unsafe { let size = size_of_val(ptr.as_ref()); let align = min_align_of_val(ptr.as_ref()); let layout = Layout::from_size_align_unchecked(size, align); - alloc.deallocate(ptr.cast().into(), layout) + alloc.deallocate(From::from(ptr.cast()), layout) } } @@ -361,13 +365,22 @@ pub(crate) unsafe fn box_free(ptr: Unique, alloc: A) /// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html /// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html #[stable(feature = "global_alloc", since = "1.28.0")] +#[rustc_const_unstable(feature = "const_box", issue = "none")] #[cfg(all(not(no_global_oom_handling), not(test)))] #[rustc_allocator_nounwind] #[cold] -pub fn handle_alloc_error(layout: Layout) -> ! { - unsafe { - __rust_alloc_error_handler(layout.size(), layout.align()); +pub const fn handle_alloc_error(layout: Layout) -> ! { + const fn ct_error(_: Layout) -> ! { + panic!("allocation failed"); } + + fn rt_error(layout: Layout) -> ! { + unsafe { + __rust_alloc_error_handler(layout.size(), layout.align()); + } + } + + unsafe { core::intrinsics::const_eval_select((layout,), ct_error, rt_error) } } // For alloc test `std::alloc::handle_alloc_error` can be used directly. diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index ab41f5646e5..59e4ebb181f 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -346,9 +346,13 @@ impl Box { /// ``` #[cfg(not(no_global_oom_handling))] #[unstable(feature = "allocator_api", issue = "32838")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[must_use] #[inline] - pub fn new_in(x: T, alloc: A) -> Self { + pub const fn new_in(x: T, alloc: A) -> Self + where + A: ~const Allocator + ~const Drop, + { let mut boxed = Self::new_uninit_in(alloc); unsafe { boxed.as_mut_ptr().write(x); @@ -372,8 +376,13 @@ pub fn new_in(x: T, alloc: A) -> Self { /// # Ok::<(), std::alloc::AllocError>(()) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[inline] - pub fn try_new_in(x: T, alloc: A) -> Result { + pub const fn try_new_in(x: T, alloc: A) -> Result + where + T: ~const Drop, + A: ~const Allocator + ~const Drop, + { let mut boxed = Self::try_new_uninit_in(alloc)?; unsafe { boxed.as_mut_ptr().write(x); @@ -402,10 +411,14 @@ pub fn try_new_in(x: T, alloc: A) -> Result { /// assert_eq!(*five, 5) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[cfg(not(no_global_oom_handling))] #[must_use] // #[unstable(feature = "new_uninit", issue = "63291")] - pub fn new_uninit_in(alloc: A) -> Box, A> { + pub const fn new_uninit_in(alloc: A) -> Box, A> + where + A: ~const Allocator + ~const Drop, + { let layout = Layout::new::>(); // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. // That would make code size bigger. @@ -439,7 +452,11 @@ pub fn new_uninit_in(alloc: A) -> Box, A> { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] - pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { + #[rustc_const_unstable(feature = "const_box", issue = "none")] + pub const fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> + where + A: ~const Allocator + ~const Drop, + { let layout = Layout::new::>(); let ptr = alloc.allocate(layout)?.cast(); unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) } @@ -466,10 +483,14 @@ pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocE /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "allocator_api", issue = "32838")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[cfg(not(no_global_oom_handling))] // #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] - pub fn new_zeroed_in(alloc: A) -> Box, A> { + pub const fn new_zeroed_in(alloc: A) -> Box, A> + where + A: ~const Allocator + ~const Drop, + { let layout = Layout::new::>(); // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. // That would make code size bigger. @@ -503,7 +524,11 @@ pub fn new_zeroed_in(alloc: A) -> Box, A> { /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] - pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { + #[rustc_const_unstable(feature = "const_box", issue = "none")] + pub const fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> + where + A: ~const Allocator + ~const Drop, + { let layout = Layout::new::>(); let ptr = alloc.allocate_zeroed(layout)?.cast(); unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) } @@ -513,20 +538,22 @@ pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocE /// `x` will be pinned in memory and unable to be moved. #[cfg(not(no_global_oom_handling))] #[unstable(feature = "allocator_api", issue = "32838")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[must_use] #[inline(always)] - pub fn pin_in(x: T, alloc: A) -> Pin + pub const fn pin_in(x: T, alloc: A) -> Pin where - A: 'static, + A: 'static + ~const Allocator + ~const Drop, { - Self::new_in(x, alloc).into() + Self::into_pin(Self::new_in(x, alloc)) } /// Converts a `Box` into a `Box<[T]>` /// /// This conversion does not allocate on the heap and happens in place. #[unstable(feature = "box_into_boxed_slice", issue = "71582")] - pub fn into_boxed_slice(boxed: Self) -> Box<[T], A> { + #[rustc_const_unstable(feature = "const_box", issue = "none")] + pub const fn into_boxed_slice(boxed: Self) -> Box<[T], A> { let (raw, alloc) = Box::into_raw_with_allocator(boxed); unsafe { Box::from_raw_in(raw as *mut [T; 1], alloc) } } @@ -543,8 +570,12 @@ pub fn into_boxed_slice(boxed: Self) -> Box<[T], A> { /// assert_eq!(Box::into_inner(c), 5); /// ``` #[unstable(feature = "box_into_inner", issue = "80437")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[inline] - pub fn into_inner(boxed: Self) -> T { + pub const fn into_inner(boxed: Self) -> T + where + Self: ~const Drop, + { *boxed } } @@ -758,8 +789,9 @@ impl Box, A> { /// assert_eq!(*five, 5) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[inline] - pub unsafe fn assume_init(self) -> Box { + pub const unsafe fn assume_init(self) -> Box { let (raw, alloc) = Box::into_raw_with_allocator(self); unsafe { Box::from_raw_in(raw as *mut T, alloc) } } @@ -792,8 +824,9 @@ pub unsafe fn assume_init(self) -> Box { /// } /// ``` #[unstable(feature = "new_uninit", issue = "63291")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[inline] - pub fn write(mut boxed: Self, value: T) -> Box { + pub const fn write(mut boxed: Self, value: T) -> Box { unsafe { (*boxed).write(value); boxed.assume_init() @@ -938,8 +971,9 @@ impl Box { /// [memory layout]: self#memory-layout /// [`Layout`]: crate::Layout #[unstable(feature = "allocator_api", issue = "32838")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[inline] - pub unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self { + pub const unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self { Box(unsafe { Unique::new_unchecked(raw) }, alloc) } @@ -1035,8 +1069,9 @@ pub fn into_raw(b: Self) -> *mut T { /// /// [memory layout]: self#memory-layout #[unstable(feature = "allocator_api", issue = "32838")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[inline] - pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) { + pub const fn into_raw_with_allocator(b: Self) -> (*mut T, A) { let (leaked, alloc) = Box::into_unique(b); (leaked.as_ptr(), alloc) } @@ -1046,9 +1081,10 @@ pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) { issue = "none", reason = "use `Box::leak(b).into()` or `Unique::from(Box::leak(b))` instead" )] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[inline] #[doc(hidden)] - pub fn into_unique(b: Self) -> (Unique, A) { + pub const fn into_unique(b: Self) -> (Unique, A) { // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a // raw pointer for the type system. Turning it directly into a raw pointer would not be // recognized as "releasing" the unique pointer to permit aliased raw accesses, @@ -1064,8 +1100,9 @@ pub fn into_unique(b: Self) -> (Unique, A) { /// to call it as `Box::allocator(&b)` instead of `b.allocator()`. This /// is so that there is no conflict with a method on the inner type. #[unstable(feature = "allocator_api", issue = "32838")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[inline] - pub fn allocator(b: &Self) -> &A { + pub const fn allocator(b: &Self) -> &A { &b.1 } @@ -1105,8 +1142,9 @@ pub fn allocator(b: &Self) -> &A { /// assert_eq!(*static_ref, [4, 2, 3]); /// ``` #[stable(feature = "box_leak", since = "1.26.0")] + #[rustc_const_unstable(feature = "const_box", issue = "none")] #[inline] - pub fn leak<'a>(b: Self) -> &'a mut T + pub const fn leak<'a>(b: Self) -> &'a mut T where A: 'a, { @@ -1119,7 +1157,8 @@ pub fn leak<'a>(b: Self) -> &'a mut T /// /// This is also available via [`From`]. #[unstable(feature = "box_into_pin", issue = "62370")] - pub fn into_pin(boxed: Self) -> Pin + #[rustc_const_unstable(feature = "const_box", issue = "none")] + pub const fn into_pin(boxed: Self) -> Pin where A: 'static, { @@ -1131,7 +1170,8 @@ pub fn into_pin(boxed: Self) -> Pin } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box { +#[rustc_const_unstable(feature = "const_box", issue = "none")] +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> const Drop for Box { fn drop(&mut self) { // FIXME: Do nothing, drop is currently performed by compiler. } @@ -1341,7 +1381,8 @@ fn from(t: T) -> Self { } #[stable(feature = "pin", since = "1.33.0")] -impl From> for Pin> +#[rustc_const_unstable(feature = "const_box", issue = "none")] +impl const From> for Pin> where A: 'static, { @@ -1720,7 +1761,8 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Box { +#[rustc_const_unstable(feature = "const_box", issue = "none")] +impl const Deref for Box { type Target = T; fn deref(&self) -> &T { @@ -1729,7 +1771,8 @@ fn deref(&self) -> &T { } #[stable(feature = "rust1", since = "1.0.0")] -impl DerefMut for Box { +#[rustc_const_unstable(feature = "const_box", issue = "none")] +impl const DerefMut for Box { fn deref_mut(&mut self) -> &mut T { &mut **self } @@ -1908,7 +1951,8 @@ fn as_mut(&mut self) -> &mut T { * could have a method to project a Pin from it. */ #[stable(feature = "pin", since = "1.33.0")] -impl Unpin for Box where A: 'static {} +#[rustc_const_unstable(feature = "const_box", issue = "none")] +impl const Unpin for Box where A: 'static {} #[unstable(feature = "generator_trait", issue = "43122")] impl + Unpin, R, A: Allocator> Generator for Box diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 600862c4224..d1a34e49175 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -97,8 +97,18 @@ #![feature(async_stream)] #![feature(coerce_unsized)] #![cfg_attr(not(no_global_oom_handling), feature(const_btree_new))] +#![feature(const_box)] #![feature(const_cow_is_borrowed)] +#![feature(const_convert)] +#![feature(const_size_of_val)] +#![feature(const_align_of_val)] +#![feature(const_ptr_read)] +#![feature(const_maybe_uninit_write)] +#![feature(const_maybe_uninit_as_mut_ptr)] +#![feature(const_refs_to_cell)] #![feature(core_intrinsics)] +#![feature(const_eval_select)] +#![feature(const_pin)] #![feature(dispatch_from_dyn)] #![feature(exact_size_is_empty)] #![feature(extend_one)] @@ -134,8 +144,13 @@ #![feature(box_syntax)] #![feature(cfg_sanitize)] #![feature(cfg_target_has_atomic)] +#![feature(const_deref)] #![feature(const_fn_trait_bound)] +#![feature(const_mut_refs)] +#![feature(const_ptr_write)] +#![feature(const_precise_live_drops)] #![feature(const_trait_impl)] +#![feature(const_try)] #![cfg_attr(bootstrap, feature(destructuring_assignment))] #![feature(dropck_eyepatch)] #![feature(exclusive_range_pattern)] diff --git a/library/alloc/tests/boxed.rs b/library/alloc/tests/boxed.rs index bfe66b2687e..0d7acfed8c6 100644 --- a/library/alloc/tests/boxed.rs +++ b/library/alloc/tests/boxed.rs @@ -1,6 +1,7 @@ -use std::cell::Cell; -use std::mem::MaybeUninit; -use std::ptr::NonNull; +use core::alloc::{AllocError, Allocator, Layout}; +use core::cell::Cell; +use core::mem::MaybeUninit; +use core::ptr::NonNull; #[test] fn uninitialized_zero_size_box() { @@ -57,3 +58,110 @@ fn box_deref_lval() { x.set(1000); assert_eq!(x.get(), 1000); } + +pub struct ConstAllocator; + +unsafe impl const Allocator for ConstAllocator { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + match layout.size() { + 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), + _ => unsafe { + let ptr = core::intrinsics::const_allocate(layout.size(), layout.align()); + Ok(NonNull::new_unchecked(ptr as *mut [u8; 0] as *mut [u8])) + }, + } + } + + unsafe fn deallocate(&self, _ptr: NonNull, layout: Layout) { + match layout.size() { + 0 => { /* do nothing */ } + _ => { /* do nothing too */ } + } + } + + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + let ptr = self.allocate(layout)?; + if layout.size() > 0 { + unsafe { + ptr.as_mut_ptr().write_bytes(0, layout.size()); + } + } + Ok(ptr) + } + + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate(new_layout)?; + if new_layout.size() > 0 { + new_ptr.as_mut_ptr().copy_from_nonoverlapping(ptr.as_ptr(), old_layout.size()); + self.deallocate(ptr, old_layout); + } + Ok(new_ptr) + } + + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + let new_ptr = self.grow(ptr, old_layout, new_layout)?; + if new_layout.size() > 0 { + let old_size = old_layout.size(); + let new_size = new_layout.size(); + let raw_ptr = new_ptr.as_mut_ptr(); + raw_ptr.add(old_size).write_bytes(0, new_size - old_size); + } + Ok(new_ptr) + } + + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + debug_assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate(new_layout)?; + if new_layout.size() > 0 { + new_ptr.as_mut_ptr().copy_from_nonoverlapping(ptr.as_ptr(), new_layout.size()); + self.deallocate(ptr, old_layout); + } + Ok(new_ptr) + } + + fn by_ref(&self) -> &Self + where + Self: Sized, + { + self + } +} + +#[test] +fn const_box() { + const VALUE: u32 = { + let mut boxed = Box::new_in(1u32, ConstAllocator); + assert!(*boxed == 1); + + *boxed = 42; + assert!(*boxed == 42); + + *boxed + }; + + assert!(VALUE == 42); +} diff --git a/library/alloc/tests/lib.rs b/library/alloc/tests/lib.rs index 68e48348b07..eec24a5c3f7 100644 --- a/library/alloc/tests/lib.rs +++ b/library/alloc/tests/lib.rs @@ -1,8 +1,19 @@ #![feature(allocator_api)] +#![feature(alloc_layout_extra)] #![feature(assert_matches)] #![feature(box_syntax)] #![feature(cow_is_borrowed)] +#![feature(const_box)] +#![feature(const_convert)] #![feature(const_cow_is_borrowed)] +#![feature(const_heap)] +#![feature(const_intrinsic_copy)] +#![feature(const_mut_refs)] +#![feature(const_nonnull_slice_from_raw_parts)] +#![feature(const_ptr_offset)] +#![feature(const_ptr_write)] +#![feature(const_try)] +#![feature(core_intrinsics)] #![feature(drain_filter)] #![feature(exact_size_is_empty)] #![feature(new_uninit)] @@ -26,6 +37,7 @@ #![feature(const_default_impls)] #![feature(const_trait_impl)] #![feature(const_str_from_utf8)] +#![feature(nonnull_slice_from_raw_parts)] use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher};