Generalize {Rc,Arc}::make_mut()
to unsized types. · model-checking/verify-rust-std@49d4fdb (original) (raw)
`@@ -2150,7 +2150,8 @@ unsafe impl<T: ?Sized, A: Allocator> DerefPure for Arc<T, A> {}
`
2150
2150
`#[unstable(feature = "receiver_trait", issue = "none")]
`
2151
2151
`impl<T: ?Sized> Receiver for Arc {}
`
2152
2152
``
2153
``
`-
impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
`
``
2153
`+
#[cfg(not(no_global_oom_handling))]
`
``
2154
`+
impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Arc<T, A> {
`
2154
2155
`` /// Makes a mutable reference into the given Arc
.
``
2155
2156
`///
`
2156
2157
`` /// If there are other Arc
pointers to the same allocation, then make_mut
will
``
`@@ -2201,10 +2202,11 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
`
2201
2202
`/// assert!(76 == *data);
`
2202
2203
`/// assert!(weak.upgrade().is_none());
`
2203
2204
```` /// ```
````
2204
``
`-
#[cfg(not(no_global_oom_handling))]
`
2205
2205
`#[inline]
`
2206
2206
`#[stable(feature = "arc_unique", since = "1.4.0")]
`
2207
2207
`pub fn make_mut(this: &mut Self) -> &mut T {
`
``
2208
`+
let size_of_val = mem::size_of_val::(&**this);
`
``
2209
+
2208
2210
`// Note that we hold both a strong reference and a weak reference.
`
2209
2211
`// Thus, releasing our strong reference only will not, by itself, cause
`
2210
2212
`// the memory to be deallocated.
`
`@@ -2215,13 +2217,19 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
`
2215
2217
`// deallocated.
`
2216
2218
`if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
`
2217
2219
`// Another strong pointer exists, so we must clone.
`
2218
``
`-
// Pre-allocate memory to allow writing the cloned value directly.
`
2219
``
`-
let mut arc = Self::new_uninit_in(this.alloc.clone());
`
2220
``
`-
unsafe {
`
2221
``
`-
let data = Arc::get_mut_unchecked(&mut arc);
`
2222
``
`-
(**this).clone_to_uninit(data.as_mut_ptr());
`
2223
``
`-
*this = arc.assume_init();
`
2224
``
`-
}
`
``
2220
+
``
2221
`+
let this_data_ref: &T = &**this;
`
``
2222
`` +
// in_progress
drops the allocation if we panic before finishing initializing it.
``
``
2223
`+
let mut in_progress: UniqueArcUninit<T, A> =
`
``
2224
`+
UniqueArcUninit::new(this_data_ref, this.alloc.clone());
`
``
2225
+
``
2226
`+
let initialized_clone = unsafe {
`
``
2227
`` +
// Clone. If the clone panics, in_progress
will be dropped and clean up.
``
``
2228
`+
this_data_ref.clone_to_uninit(in_progress.data_ptr());
`
``
2229
`+
// Cast type of pointer, now that it is initialized.
`
``
2230
`+
in_progress.into_arc()
`
``
2231
`+
};
`
``
2232
`+
*this = initialized_clone;
`
2225
2233
`} else if this.inner().weak.load(Relaxed) != 1 {
`
2226
2234
`// Relaxed suffices in the above because this is fundamentally an
`
2227
2235
`// optimization: we are always racing with weak pointers being
`
`@@ -2240,11 +2248,22 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
`
2240
2248
`let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() };
`
2241
2249
``
2242
2250
`// Can just steal the data, all that's left is Weaks
`
2243
``
`-
let mut arc = Self::new_uninit_in(this.alloc.clone());
`
``
2251
`+
//
`
``
2252
`+
// We don't need panic-protection like the above branch does, but we might as well
`
``
2253
`+
// use the same mechanism.
`
``
2254
`+
let mut in_progress: UniqueArcUninit<T, A> =
`
``
2255
`+
UniqueArcUninit::new(&**this, this.alloc.clone());
`
2244
2256
`unsafe {
`
2245
``
`-
let data = Arc::get_mut_unchecked(&mut arc);
`
2246
``
`-
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
`
2247
``
`-
ptr::write(this, arc.assume_init());
`
``
2257
`` +
// Initialize in_progress
with move of **this.
``
``
2258
`` +
// We have to express this in terms of bytes because T: ?Sized
; there is no
``
``
2259
`` +
// operation that just copies a value based on its size_of_val()
.
``
``
2260
`+
ptr::copy_nonoverlapping(
`
``
2261
`+
ptr::from_ref(&**this).cast::(),
`
``
2262
`+
in_progress.data_ptr().cast::(),
`
``
2263
`+
size_of_val,
`
``
2264
`+
);
`
``
2265
+
``
2266
`+
ptr::write(this, in_progress.into_arc());
`
2248
2267
`}
`
2249
2268
`} else {
`
2250
2269
`// We were the sole reference of either kind; bump back up the
`
`@@ -3809,6 +3828,68 @@ fn data_offset_align(align: usize) -> usize {
`
3809
3828
` layout.size() + layout.padding_needed_for(align)
`
3810
3829
`}
`
3811
3830
``
``
3831
`` +
/// A unique owning pointer to a [ArcInner
] that does not imply the contents are initialized,
``
``
3832
`+
/// but will deallocate it (without dropping the value) when dropped.
`
``
3833
`+
///
`
``
3834
`` +
/// This is a helper for [Arc::make_mut()
] to ensure correct cleanup on panic.
``
``
3835
`+
#[cfg(not(no_global_oom_handling))]
`
``
3836
`+
struct UniqueArcUninit<T: ?Sized, A: Allocator> {
`
``
3837
`+
ptr: NonNull<ArcInner>,
`
``
3838
`+
layout_for_value: Layout,
`
``
3839
`+
alloc: Option,
`
``
3840
`+
}
`
``
3841
+
``
3842
`+
#[cfg(not(no_global_oom_handling))]
`
``
3843
`+
impl<T: ?Sized, A: Allocator> UniqueArcUninit<T, A> {
`
``
3844
`` +
/// Allocate a ArcInner with layout suitable to contain for_value
or a clone of it.
``
``
3845
`+
fn new(for_value: &T, alloc: A) -> UniqueArcUninit<T, A> {
`
``
3846
`+
let layout = Layout::for_value(for_value);
`
``
3847
`+
let ptr = unsafe {
`
``
3848
`+
Arc::allocate_for_layout(
`
``
3849
`+
layout,
`
``
3850
`+
|layout_for_arcinner| alloc.allocate(layout_for_arcinner),
`
``
3851
`+
|mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const ArcInner),
`
``
3852
`+
)
`
``
3853
`+
};
`
``
3854
`+
Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
`
``
3855
`+
}
`
``
3856
+
``
3857
`` +
/// Returns the pointer to be written into to initialize the [Arc
].
``
``
3858
`+
fn data_ptr(&mut self) -> *mut T {
`
``
3859
`+
let offset = data_offset_align(self.layout_for_value.align());
`
``
3860
`+
unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
`
``
3861
`+
}
`
``
3862
+
``
3863
`` +
/// Upgrade this into a normal [Arc
].
``
``
3864
`+
///
`
``
3865
`+
/// # Safety
`
``
3866
`+
///
`
``
3867
`` +
/// The data must have been initialized (by writing to [Self::data_ptr()
]).
``
``
3868
`+
unsafe fn into_arc(mut self) -> Arc<T, A> {
`
``
3869
`+
let ptr = self.ptr;
`
``
3870
`+
let alloc = self.alloc.take().unwrap();
`
``
3871
`+
mem::forget(self);
`
``
3872
`` +
// SAFETY: The pointer is valid as per UniqueArcUninit::new
, and the caller is responsible
``
``
3873
`+
// for having initialized the data.
`
``
3874
`+
unsafe { Arc::from_ptr_in(ptr.as_ptr(), alloc) }
`
``
3875
`+
}
`
``
3876
`+
}
`
``
3877
+
``
3878
`+
#[cfg(not(no_global_oom_handling))]
`
``
3879
`+
impl<T: ?Sized, A: Allocator> Drop for UniqueArcUninit<T, A> {
`
``
3880
`+
fn drop(&mut self) {
`
``
3881
`+
// SAFETY:
`
``
3882
`+
// * new() produced a pointer safe to deallocate.
`
``
3883
`+
// * We own the pointer unless into_arc() was called, which forgets us.
`
``
3884
`+
unsafe {
`
``
3885
`+
self.alloc.take().unwrap().deallocate(
`
``
3886
`+
self.ptr.cast(),
`
``
3887
`+
arcinner_layout_for_value_layout(self.layout_for_value),
`
``
3888
`+
);
`
``
3889
`+
}
`
``
3890
`+
}
`
``
3891
`+
}
`
``
3892
+
3812
3893
`#[stable(feature = "arc_error", since = "1.52.0")]
`
3813
3894
`impl<T: core::error::Error + ?Sized> core::error::Error for Arc {
`
3814
3895
`#[allow(deprecated, deprecated_in_future)]
`