Skip to content

Commit 8ebd675

Browse files
committed
Generalize {Rc,Arc}::make_mut() to unsized types.
This requires introducing a new internal type `RcUninit`, which can own an `RcBox<T>` without requiring it to be initialized, sized, or a slice.
1 parent c3d98de commit 8ebd675

File tree

5 files changed

+224
-26
lines changed

5 files changed

+224
-26
lines changed

library/alloc/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,7 @@
141141
#![feature(maybe_uninit_uninit_array_transpose)]
142142
#![feature(pattern)]
143143
#![feature(pointer_byte_offsets)]
144+
#![feature(ptr_from_ref)]
144145
#![feature(ptr_internals)]
145146
#![feature(ptr_metadata)]
146147
#![feature(ptr_sub_ptr)]

library/alloc/src/rc.rs

Lines changed: 95 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -258,8 +258,7 @@ use core::intrinsics::abort;
258258
use core::iter;
259259
use core::marker::{PhantomData, Unsize};
260260
#[cfg(not(no_global_oom_handling))]
261-
use core::mem::size_of_val;
262-
use core::mem::{self, align_of_val_raw, forget, ManuallyDrop};
261+
use core::mem::{self, align_of_val_raw, forget, size_of_val, ManuallyDrop};
263262
use core::ops::{CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Receiver};
264263
use core::panic::{RefUnwindSafe, UnwindSafe};
265264
#[cfg(not(no_global_oom_handling))]
@@ -1651,7 +1650,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
16511650
}
16521651
}
16531652

1654-
impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
1653+
impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
16551654
/// Makes a mutable reference into the given `Rc`.
16561655
///
16571656
/// If there are other `Rc` pointers to the same allocation, then `make_mut` will
@@ -1706,27 +1705,47 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
17061705
#[inline]
17071706
#[stable(feature = "rc_unique", since = "1.4.0")]
17081707
pub fn make_mut(this: &mut Self) -> &mut T {
1708+
let size_of_val = mem::size_of_val::<T>(&**this);
1709+
17091710
if Rc::strong_count(this) != 1 {
17101711
// Gotta clone the data, there are other Rcs.
1711-
// Pre-allocate memory to allow writing the cloned value directly.
1712-
let mut rc = Self::new_uninit_in(this.alloc.clone());
1713-
unsafe {
1714-
let data = Rc::get_mut_unchecked(&mut rc);
1715-
(**this).clone_to_uninit(data.as_mut_ptr());
1716-
*this = rc.assume_init();
1717-
}
1712+
1713+
let this_data_ref: &T = &**this;
1714+
// `in_progress` drops the allocation if we panic before finishing initializing it.
1715+
let mut in_progress: RcUninit<T, A> = RcUninit::new(this_data_ref, this.alloc.clone());
1716+
1717+
// Initialize with clone of this.
1718+
let initialized_clone = unsafe {
1719+
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
1720+
this_data_ref.clone_to_uninit(in_progress.data_ptr());
1721+
// Cast type of pointer, now that it is initialized.
1722+
in_progress.into_rc()
1723+
};
1724+
1725+
// Replace `this` with newly constructed Rc.
1726+
*this = initialized_clone;
17181727
} else if Rc::weak_count(this) != 0 {
17191728
// Can just steal the data, all that's left is Weaks
1720-
let mut rc = Self::new_uninit_in(this.alloc.clone());
1729+
1730+
// We don't need panic-protection like the above branch does, but we might as well
1731+
// use the same mechanism.
1732+
let mut in_progress: RcUninit<T, A> = RcUninit::new(&**this, this.alloc.clone());
17211733
unsafe {
1722-
let data = Rc::get_mut_unchecked(&mut rc);
1723-
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
1734+
// Initialize `in_progress` with move of **this.
1735+
// We have to express this in terms of bytes because `T: ?Sized`; there is no
1736+
// operation that just copies a value based on its `size_of_val()`.
1737+
ptr::copy_nonoverlapping(
1738+
ptr::from_ref(&**this).cast::<u8>(),
1739+
in_progress.data_ptr().cast::<u8>(),
1740+
size_of_val,
1741+
);
17241742

17251743
this.inner().dec_strong();
17261744
// Remove implicit strong-weak ref (no need to craft a fake
17271745
// Weak here -- we know other Weaks can clean up for us)
17281746
this.inner().dec_weak();
1729-
ptr::write(this, rc.assume_init());
1747+
// Replace `this` with newly constructed Rc that has the moved data.
1748+
ptr::write(this, in_progress.into_rc());
17301749
}
17311750
}
17321751
// This unsafety is ok because we're guaranteed that the pointer
@@ -1736,7 +1755,9 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
17361755
// reference to the allocation.
17371756
unsafe { &mut this.ptr.as_mut().value }
17381757
}
1758+
}
17391759

1760+
impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
17401761
/// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
17411762
/// clone.
17421763
///
@@ -3360,6 +3381,66 @@ fn data_offset_align(align: usize) -> usize {
33603381
layout.size() + layout.padding_needed_for(align)
33613382
}
33623383

3384+
/// A unique owning pointer to a [`RcBox`] **that does not imply the contents are initialized,**
3385+
/// but will deallocate it (without dropping the value) when dropped.
3386+
///
3387+
/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
3388+
struct RcUninit<T: ?Sized, A: Allocator> {
3389+
ptr: NonNull<RcBox<T>>,
3390+
layout_for_value: Layout,
3391+
alloc: Option<A>,
3392+
}
3393+
3394+
impl<T: ?Sized, A: Allocator> RcUninit<T, A> {
3395+
/// Allocate a RcBox with layout suitable to contain `for_value` or a clone of it.
3396+
#[cfg(not(no_global_oom_handling))]
3397+
fn new(for_value: &T, alloc: A) -> RcUninit<T, A> {
3398+
let layout = Layout::for_value(for_value);
3399+
let ptr = unsafe {
3400+
Rc::allocate_for_layout(
3401+
layout,
3402+
|layout_for_rcbox| alloc.allocate(layout_for_rcbox),
3403+
|mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcBox<T>),
3404+
)
3405+
};
3406+
Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
3407+
}
3408+
3409+
/// Returns the pointer to be written into to initialize the [`Rc`].
3410+
fn data_ptr(&mut self) -> *mut T {
3411+
let offset = data_offset_align(self.layout_for_value.align());
3412+
unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
3413+
}
3414+
3415+
/// Upgrade this into a normal [`Rc`].
3416+
///
3417+
/// # Safety
3418+
///
3419+
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
3420+
unsafe fn into_rc(mut self) -> Rc<T, A> {
3421+
let ptr = self.ptr;
3422+
let alloc = self.alloc.take().unwrap();
3423+
mem::forget(self);
3424+
// SAFETY: The pointer is valid as per `RcUninit::new`, and the caller is responsible
3425+
// for having initialized the data.
3426+
unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
3427+
}
3428+
}
3429+
3430+
impl<T: ?Sized, A: Allocator> Drop for RcUninit<T, A> {
3431+
fn drop(&mut self) {
3432+
// SAFETY:
3433+
// * new() produced a pointer safe to deallocate.
3434+
// * We own the pointer unless into_rc() was called, which forgets us.
3435+
unsafe {
3436+
self.alloc
3437+
.take()
3438+
.unwrap()
3439+
.deallocate(self.ptr.cast(), rcbox_layout_for_value_layout(self.layout_for_value));
3440+
}
3441+
}
3442+
}
3443+
33633444
/// A uniquely owned `Rc`
33643445
///
33653446
/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong

library/alloc/src/rc/tests.rs

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -321,6 +321,24 @@ fn test_cowrc_clone_weak() {
321321
assert!(cow1_weak.upgrade().is_none());
322322
}
323323

324+
/// This is similar to the doc-test for `Rc::make_mut()`, but on an unsized type (slice).
325+
#[test]
326+
fn test_cowrc_unsized() {
327+
use std::rc::Rc;
328+
329+
let mut data: Rc<[i32]> = Rc::new([10, 20, 30]);
330+
331+
Rc::make_mut(&mut data)[0] += 1; // Won't clone anything
332+
let mut other_data = Rc::clone(&data); // Won't clone inner data
333+
Rc::make_mut(&mut data)[1] += 1; // Clones inner data
334+
Rc::make_mut(&mut data)[2] += 1; // Won't clone anything
335+
Rc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything
336+
337+
// Now `data` and `other_data` point to different allocations.
338+
assert_eq!(*data, [11, 21, 31]);
339+
assert_eq!(*other_data, [110, 20, 30]);
340+
}
341+
324342
#[test]
325343
fn test_show() {
326344
let foo = Rc::new(75);

library/alloc/src/sync.rs

Lines changed: 92 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2055,7 +2055,7 @@ impl<T: ?Sized, A: Allocator> Deref for Arc<T, A> {
20552055
#[unstable(feature = "receiver_trait", issue = "none")]
20562056
impl<T: ?Sized> Receiver for Arc<T> {}
20572057

2058-
impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
2058+
impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Arc<T, A> {
20592059
/// Makes a mutable reference into the given `Arc`.
20602060
///
20612061
/// If there are other `Arc` pointers to the same allocation, then `make_mut` will
@@ -2110,6 +2110,8 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
21102110
#[inline]
21112111
#[stable(feature = "arc_unique", since = "1.4.0")]
21122112
pub fn make_mut(this: &mut Self) -> &mut T {
2113+
let size_of_val = mem::size_of_val::<T>(&**this);
2114+
21132115
// Note that we hold both a strong reference and a weak reference.
21142116
// Thus, releasing our strong reference only will not, by itself, cause
21152117
// the memory to be deallocated.
@@ -2120,13 +2122,19 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
21202122
// deallocated.
21212123
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
21222124
// Another strong pointer exists, so we must clone.
2123-
// Pre-allocate memory to allow writing the cloned value directly.
2124-
let mut arc = Self::new_uninit_in(this.alloc.clone());
2125-
unsafe {
2126-
let data = Arc::get_mut_unchecked(&mut arc);
2127-
(**this).clone_to_uninit(data.as_mut_ptr());
2128-
*this = arc.assume_init();
2129-
}
2125+
2126+
let this_data_ref: &T = &**this;
2127+
// `in_progress` drops the allocation if we panic before finishing initializing it.
2128+
let mut in_progress: ArcUninit<T, A> =
2129+
ArcUninit::new(this_data_ref, this.alloc.clone());
2130+
2131+
let initialized_clone = unsafe {
2132+
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
2133+
this_data_ref.clone_to_uninit(in_progress.data_ptr());
2134+
// Cast type of pointer, now that it is initialized.
2135+
in_progress.into_arc()
2136+
};
2137+
*this = initialized_clone;
21302138
} else if this.inner().weak.load(Relaxed) != 1 {
21312139
// Relaxed suffices in the above because this is fundamentally an
21322140
// optimization: we are always racing with weak pointers being
@@ -2145,11 +2153,21 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
21452153
let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() };
21462154

21472155
// Can just steal the data, all that's left is Weaks
2148-
let mut arc = Self::new_uninit_in(this.alloc.clone());
2156+
//
2157+
// We don't need panic-protection like the above branch does, but we might as well
2158+
// use the same mechanism.
2159+
let mut in_progress: ArcUninit<T, A> = ArcUninit::new(&**this, this.alloc.clone());
21492160
unsafe {
2150-
let data = Arc::get_mut_unchecked(&mut arc);
2151-
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
2152-
ptr::write(this, arc.assume_init());
2161+
// Initialize `in_progress` with move of **this.
2162+
// We have to express this in terms of bytes because `T: ?Sized`; there is no
2163+
// operation that just copies a value based on its `size_of_val()`.
2164+
ptr::copy_nonoverlapping(
2165+
ptr::from_ref(&**this).cast::<u8>(),
2166+
in_progress.data_ptr().cast::<u8>(),
2167+
size_of_val,
2168+
);
2169+
2170+
ptr::write(this, in_progress.into_arc());
21532171
}
21542172
} else {
21552173
// We were the sole reference of either kind; bump back up the
@@ -2161,7 +2179,9 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
21612179
// either unique to begin with, or became one upon cloning the contents.
21622180
unsafe { Self::get_mut_unchecked(this) }
21632181
}
2182+
}
21642183

2184+
impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
21652185
/// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
21662186
/// clone.
21672187
///
@@ -3557,6 +3577,66 @@ fn data_offset_align(align: usize) -> usize {
35573577
layout.size() + layout.padding_needed_for(align)
35583578
}
35593579

3580+
/// A unique owning pointer to a [`ArcInner`] **that does not imply the contents are initialized,**
3581+
/// but will deallocate it (without dropping the value) when dropped.
3582+
///
3583+
/// This is a helper for [`Arc::make_mut()`] to ensure correct cleanup on panic.
3584+
struct ArcUninit<T: ?Sized, A: Allocator> {
3585+
ptr: NonNull<ArcInner<T>>,
3586+
layout_for_value: Layout,
3587+
alloc: Option<A>,
3588+
}
3589+
3590+
impl<T: ?Sized, A: Allocator> ArcUninit<T, A> {
3591+
/// Allocate a ArcInner with layout suitable to contain `for_value` or a clone of it.
3592+
#[cfg(not(no_global_oom_handling))]
3593+
fn new(for_value: &T, alloc: A) -> ArcUninit<T, A> {
3594+
let layout = Layout::for_value(for_value);
3595+
let ptr = unsafe {
3596+
Arc::allocate_for_layout(
3597+
layout,
3598+
|layout_for_arcinner| alloc.allocate(layout_for_arcinner),
3599+
|mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const ArcInner<T>),
3600+
)
3601+
};
3602+
Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
3603+
}
3604+
3605+
/// Returns the pointer to be written into to initialize the [`Arc`].
3606+
fn data_ptr(&mut self) -> *mut T {
3607+
let offset = data_offset_align(self.layout_for_value.align());
3608+
unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
3609+
}
3610+
3611+
/// Upgrade this into a normal [`Arc`].
3612+
///
3613+
/// # Safety
3614+
///
3615+
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
3616+
unsafe fn into_arc(mut self) -> Arc<T, A> {
3617+
let ptr = self.ptr;
3618+
let alloc = self.alloc.take().unwrap();
3619+
mem::forget(self);
3620+
// SAFETY: The pointer is valid as per `ArcUninit::new`, and the caller is responsible
3621+
// for having initialized the data.
3622+
unsafe { Arc::from_ptr_in(ptr.as_ptr(), alloc) }
3623+
}
3624+
}
3625+
3626+
impl<T: ?Sized, A: Allocator> Drop for ArcUninit<T, A> {
3627+
fn drop(&mut self) {
3628+
// SAFETY:
3629+
// * new() produced a pointer safe to deallocate.
3630+
// * We own the pointer unless into_arc() was called, which forgets us.
3631+
unsafe {
3632+
self.alloc.take().unwrap().deallocate(
3633+
self.ptr.cast(),
3634+
arcinner_layout_for_value_layout(self.layout_for_value),
3635+
);
3636+
}
3637+
}
3638+
}
3639+
35603640
#[stable(feature = "arc_error", since = "1.52.0")]
35613641
impl<T: core::error::Error + ?Sized> core::error::Error for Arc<T> {
35623642
#[allow(deprecated, deprecated_in_future)]

library/alloc/tests/arc.rs

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -210,3 +210,21 @@ fn weak_may_dangle() {
210210
// `val` dropped here while still borrowed
211211
// borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak`
212212
}
213+
214+
/// This is similar to the doc-test for `Arc::make_mut()`, but on an unsized type (slice).
215+
#[test]
216+
fn make_mut_unsized() {
217+
use alloc::sync::Arc;
218+
219+
let mut data: Arc<[i32]> = Arc::new([10, 20, 30]);
220+
221+
Arc::make_mut(&mut data)[0] += 1; // Won't clone anything
222+
let mut other_data = Arc::clone(&data); // Won't clone inner data
223+
Arc::make_mut(&mut data)[1] += 1; // Clones inner data
224+
Arc::make_mut(&mut data)[2] += 1; // Won't clone anything
225+
Arc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything
226+
227+
// Now `data` and `other_data` point to different allocations.
228+
assert_eq!(*data, [11, 21, 31]);
229+
assert_eq!(*other_data, [110, 20, 30]);
230+
}

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy