Skip to content

Commit 88c3db5

Browse files
committed
Generalize {Rc,Arc}::make_mut() to unsized types.
This requires introducing a new internal type `RcUninit` (and `ArcUninit`), which can own an `RcBox<T>` without requiring it to be initialized, sized, or a slice. This is similar to `UniqueRc`, but `UniqueRc` doesn't support the allocator parameter, and there is no `UniqueArc`.
1 parent a9a4830 commit 88c3db5

File tree

4 files changed

+229
-26
lines changed

4 files changed

+229
-26
lines changed

library/alloc/src/rc.rs

Lines changed: 99 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1749,7 +1749,8 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
17491749
}
17501750
}
17511751

1752-
impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
1752+
#[cfg(not(no_global_oom_handling))]
1753+
impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
17531754
/// Makes a mutable reference into the given `Rc`.
17541755
///
17551756
/// If there are other `Rc` pointers to the same allocation, then `make_mut` will
@@ -1800,31 +1801,52 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
18001801
/// assert!(76 == *data);
18011802
/// assert!(weak.upgrade().is_none());
18021803
/// ```
1803-
#[cfg(not(no_global_oom_handling))]
18041804
#[inline]
18051805
#[stable(feature = "rc_unique", since = "1.4.0")]
18061806
pub fn make_mut(this: &mut Self) -> &mut T {
1807+
let size_of_val = size_of_val::<T>(&**this);
1808+
18071809
if Rc::strong_count(this) != 1 {
18081810
// Gotta clone the data, there are other Rcs.
1809-
// Pre-allocate memory to allow writing the cloned value directly.
1810-
let mut rc = Self::new_uninit_in(this.alloc.clone());
1811-
unsafe {
1812-
let data = Rc::get_mut_unchecked(&mut rc);
1813-
(**this).clone_to_uninit(data.as_mut_ptr());
1814-
*this = rc.assume_init();
1815-
}
1811+
1812+
let this_data_ref: &T = &**this;
1813+
// `in_progress` drops the allocation if we panic before finishing initializing it.
1814+
let mut in_progress: UniqueRcUninit<T, A> =
1815+
UniqueRcUninit::new(this_data_ref, this.alloc.clone());
1816+
1817+
// Initialize with clone of this.
1818+
let initialized_clone = unsafe {
1819+
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
1820+
this_data_ref.clone_to_uninit(in_progress.data_ptr());
1821+
// Cast type of pointer, now that it is initialized.
1822+
in_progress.into_rc()
1823+
};
1824+
1825+
// Replace `this` with newly constructed Rc.
1826+
*this = initialized_clone;
18161827
} else if Rc::weak_count(this) != 0 {
18171828
// Can just steal the data, all that's left is Weaks
1818-
let mut rc = Self::new_uninit_in(this.alloc.clone());
1829+
1830+
// We don't need panic-protection like the above branch does, but we might as well
1831+
// use the same mechanism.
1832+
let mut in_progress: UniqueRcUninit<T, A> =
1833+
UniqueRcUninit::new(&**this, this.alloc.clone());
18191834
unsafe {
1820-
let data = Rc::get_mut_unchecked(&mut rc);
1821-
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
1835+
// Initialize `in_progress` with move of **this.
1836+
// We have to express this in terms of bytes because `T: ?Sized`; there is no
1837+
// operation that just copies a value based on its `size_of_val()`.
1838+
ptr::copy_nonoverlapping(
1839+
ptr::from_ref(&**this).cast::<u8>(),
1840+
in_progress.data_ptr().cast::<u8>(),
1841+
size_of_val,
1842+
);
18221843

18231844
this.inner().dec_strong();
18241845
// Remove implicit strong-weak ref (no need to craft a fake
18251846
// Weak here -- we know other Weaks can clean up for us)
18261847
this.inner().dec_weak();
1827-
ptr::write(this, rc.assume_init());
1848+
// Replace `this` with newly constructed Rc that has the moved data.
1849+
ptr::write(this, in_progress.into_rc());
18281850
}
18291851
}
18301852
// This unsafety is ok because we're guaranteed that the pointer
@@ -3686,3 +3708,67 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
36863708
}
36873709
}
36883710
}
3711+
3712+
/// A unique owning pointer to a [`RcBox`] **that does not imply the contents are initialized,**
3713+
/// but will deallocate it (without dropping the value) when dropped.
3714+
///
3715+
/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
3716+
/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
3717+
/// which `MaybeUninit` does not.
3718+
#[cfg(not(no_global_oom_handling))]
3719+
struct UniqueRcUninit<T: ?Sized, A: Allocator> {
3720+
ptr: NonNull<RcBox<T>>,
3721+
layout_for_value: Layout,
3722+
alloc: Option<A>,
3723+
}
3724+
3725+
#[cfg(not(no_global_oom_handling))]
3726+
impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
3727+
/// Allocate a RcBox with layout suitable to contain `for_value` or a clone of it.
3728+
fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
3729+
let layout = Layout::for_value(for_value);
3730+
let ptr = unsafe {
3731+
Rc::allocate_for_layout(
3732+
layout,
3733+
|layout_for_rcbox| alloc.allocate(layout_for_rcbox),
3734+
|mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcBox<T>),
3735+
)
3736+
};
3737+
Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
3738+
}
3739+
3740+
/// Returns the pointer to be written into to initialize the [`Rc`].
3741+
fn data_ptr(&mut self) -> *mut T {
3742+
let offset = data_offset_align(self.layout_for_value.align());
3743+
unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
3744+
}
3745+
3746+
/// Upgrade this into a normal [`Rc`].
3747+
///
3748+
/// # Safety
3749+
///
3750+
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
3751+
unsafe fn into_rc(mut self) -> Rc<T, A> {
3752+
let ptr = self.ptr;
3753+
let alloc = self.alloc.take().unwrap();
3754+
mem::forget(self);
3755+
// SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
3756+
// for having initialized the data.
3757+
unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
3758+
}
3759+
}
3760+
3761+
#[cfg(not(no_global_oom_handling))]
3762+
impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
3763+
fn drop(&mut self) {
3764+
// SAFETY:
3765+
// * new() produced a pointer safe to deallocate.
3766+
// * We own the pointer unless into_rc() was called, which forgets us.
3767+
unsafe {
3768+
self.alloc
3769+
.take()
3770+
.unwrap()
3771+
.deallocate(self.ptr.cast(), rcbox_layout_for_value_layout(self.layout_for_value));
3772+
}
3773+
}
3774+
}

library/alloc/src/rc/tests.rs

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -316,6 +316,24 @@ fn test_cowrc_clone_weak() {
316316
assert!(cow1_weak.upgrade().is_none());
317317
}
318318

319+
/// This is similar to the doc-test for `Rc::make_mut()`, but on an unsized type (slice).
320+
#[test]
321+
fn test_cowrc_unsized() {
322+
use std::rc::Rc;
323+
324+
let mut data: Rc<[i32]> = Rc::new([10, 20, 30]);
325+
326+
Rc::make_mut(&mut data)[0] += 1; // Won't clone anything
327+
let mut other_data = Rc::clone(&data); // Won't clone inner data
328+
Rc::make_mut(&mut data)[1] += 1; // Clones inner data
329+
Rc::make_mut(&mut data)[2] += 1; // Won't clone anything
330+
Rc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything
331+
332+
// Now `data` and `other_data` point to different allocations.
333+
assert_eq!(*data, [11, 21, 31]);
334+
assert_eq!(*other_data, [110, 20, 30]);
335+
}
336+
319337
#[test]
320338
fn test_show() {
321339
let foo = Rc::new(75);

library/alloc/src/sync.rs

Lines changed: 94 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2150,7 +2150,8 @@ unsafe impl<T: ?Sized, A: Allocator> DerefPure for Arc<T, A> {}
21502150
#[unstable(feature = "receiver_trait", issue = "none")]
21512151
impl<T: ?Sized> Receiver for Arc<T> {}
21522152

2153-
impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
2153+
#[cfg(not(no_global_oom_handling))]
2154+
impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Arc<T, A> {
21542155
/// Makes a mutable reference into the given `Arc`.
21552156
///
21562157
/// If there are other `Arc` pointers to the same allocation, then `make_mut` will
@@ -2201,10 +2202,11 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
22012202
/// assert!(76 == *data);
22022203
/// assert!(weak.upgrade().is_none());
22032204
/// ```
2204-
#[cfg(not(no_global_oom_handling))]
22052205
#[inline]
22062206
#[stable(feature = "arc_unique", since = "1.4.0")]
22072207
pub fn make_mut(this: &mut Self) -> &mut T {
2208+
let size_of_val = mem::size_of_val::<T>(&**this);
2209+
22082210
// Note that we hold both a strong reference and a weak reference.
22092211
// Thus, releasing our strong reference only will not, by itself, cause
22102212
// the memory to be deallocated.
@@ -2215,13 +2217,19 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
22152217
// deallocated.
22162218
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
22172219
// Another strong pointer exists, so we must clone.
2218-
// Pre-allocate memory to allow writing the cloned value directly.
2219-
let mut arc = Self::new_uninit_in(this.alloc.clone());
2220-
unsafe {
2221-
let data = Arc::get_mut_unchecked(&mut arc);
2222-
(**this).clone_to_uninit(data.as_mut_ptr());
2223-
*this = arc.assume_init();
2224-
}
2220+
2221+
let this_data_ref: &T = &**this;
2222+
// `in_progress` drops the allocation if we panic before finishing initializing it.
2223+
let mut in_progress: UniqueArcUninit<T, A> =
2224+
UniqueArcUninit::new(this_data_ref, this.alloc.clone());
2225+
2226+
let initialized_clone = unsafe {
2227+
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
2228+
this_data_ref.clone_to_uninit(in_progress.data_ptr());
2229+
// Cast type of pointer, now that it is initialized.
2230+
in_progress.into_arc()
2231+
};
2232+
*this = initialized_clone;
22252233
} else if this.inner().weak.load(Relaxed) != 1 {
22262234
// Relaxed suffices in the above because this is fundamentally an
22272235
// optimization: we are always racing with weak pointers being
@@ -2240,11 +2248,22 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
22402248
let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() };
22412249

22422250
// Can just steal the data, all that's left is Weaks
2243-
let mut arc = Self::new_uninit_in(this.alloc.clone());
2251+
//
2252+
// We don't need panic-protection like the above branch does, but we might as well
2253+
// use the same mechanism.
2254+
let mut in_progress: UniqueArcUninit<T, A> =
2255+
UniqueArcUninit::new(&**this, this.alloc.clone());
22442256
unsafe {
2245-
let data = Arc::get_mut_unchecked(&mut arc);
2246-
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
2247-
ptr::write(this, arc.assume_init());
2257+
// Initialize `in_progress` with move of **this.
2258+
// We have to express this in terms of bytes because `T: ?Sized`; there is no
2259+
// operation that just copies a value based on its `size_of_val()`.
2260+
ptr::copy_nonoverlapping(
2261+
ptr::from_ref(&**this).cast::<u8>(),
2262+
in_progress.data_ptr().cast::<u8>(),
2263+
size_of_val,
2264+
);
2265+
2266+
ptr::write(this, in_progress.into_arc());
22482267
}
22492268
} else {
22502269
// We were the sole reference of either kind; bump back up the
@@ -3809,6 +3828,68 @@ fn data_offset_align(align: usize) -> usize {
38093828
layout.size() + layout.padding_needed_for(align)
38103829
}
38113830

3831+
/// A unique owning pointer to a [`ArcInner`] **that does not imply the contents are initialized,**
3832+
/// but will deallocate it (without dropping the value) when dropped.
3833+
///
3834+
/// This is a helper for [`Arc::make_mut()`] to ensure correct cleanup on panic.
3835+
#[cfg(not(no_global_oom_handling))]
3836+
struct UniqueArcUninit<T: ?Sized, A: Allocator> {
3837+
ptr: NonNull<ArcInner<T>>,
3838+
layout_for_value: Layout,
3839+
alloc: Option<A>,
3840+
}
3841+
3842+
#[cfg(not(no_global_oom_handling))]
3843+
impl<T: ?Sized, A: Allocator> UniqueArcUninit<T, A> {
3844+
/// Allocate a ArcInner with layout suitable to contain `for_value` or a clone of it.
3845+
fn new(for_value: &T, alloc: A) -> UniqueArcUninit<T, A> {
3846+
let layout = Layout::for_value(for_value);
3847+
let ptr = unsafe {
3848+
Arc::allocate_for_layout(
3849+
layout,
3850+
|layout_for_arcinner| alloc.allocate(layout_for_arcinner),
3851+
|mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const ArcInner<T>),
3852+
)
3853+
};
3854+
Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
3855+
}
3856+
3857+
/// Returns the pointer to be written into to initialize the [`Arc`].
3858+
fn data_ptr(&mut self) -> *mut T {
3859+
let offset = data_offset_align(self.layout_for_value.align());
3860+
unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
3861+
}
3862+
3863+
/// Upgrade this into a normal [`Arc`].
3864+
///
3865+
/// # Safety
3866+
///
3867+
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
3868+
unsafe fn into_arc(mut self) -> Arc<T, A> {
3869+
let ptr = self.ptr;
3870+
let alloc = self.alloc.take().unwrap();
3871+
mem::forget(self);
3872+
// SAFETY: The pointer is valid as per `UniqueArcUninit::new`, and the caller is responsible
3873+
// for having initialized the data.
3874+
unsafe { Arc::from_ptr_in(ptr.as_ptr(), alloc) }
3875+
}
3876+
}
3877+
3878+
#[cfg(not(no_global_oom_handling))]
3879+
impl<T: ?Sized, A: Allocator> Drop for UniqueArcUninit<T, A> {
3880+
fn drop(&mut self) {
3881+
// SAFETY:
3882+
// * new() produced a pointer safe to deallocate.
3883+
// * We own the pointer unless into_arc() was called, which forgets us.
3884+
unsafe {
3885+
self.alloc.take().unwrap().deallocate(
3886+
self.ptr.cast(),
3887+
arcinner_layout_for_value_layout(self.layout_for_value),
3888+
);
3889+
}
3890+
}
3891+
}
3892+
38123893
#[stable(feature = "arc_error", since = "1.52.0")]
38133894
impl<T: core::error::Error + ?Sized> core::error::Error for Arc<T> {
38143895
#[allow(deprecated, deprecated_in_future)]

library/alloc/tests/arc.rs

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -209,3 +209,21 @@ fn weak_may_dangle() {
209209
// `val` dropped here while still borrowed
210210
// borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak`
211211
}
212+
213+
/// This is similar to the doc-test for `Arc::make_mut()`, but on an unsized type (slice).
214+
#[test]
215+
fn make_mut_unsized() {
216+
use alloc::sync::Arc;
217+
218+
let mut data: Arc<[i32]> = Arc::new([10, 20, 30]);
219+
220+
Arc::make_mut(&mut data)[0] += 1; // Won't clone anything
221+
let mut other_data = Arc::clone(&data); // Won't clone inner data
222+
Arc::make_mut(&mut data)[1] += 1; // Clones inner data
223+
Arc::make_mut(&mut data)[2] += 1; // Won't clone anything
224+
Arc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything
225+
226+
// Now `data` and `other_data` point to different allocations.
227+
assert_eq!(*data, [11, 21, 31]);
228+
assert_eq!(*other_data, [110, 20, 30]);
229+
}

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy