Remove duplicate impl · qinheping/verify-rust-std@a9cf084 (original) (raw)
`@@ -450,54 +450,7 @@ impl Arc {
`
450
450
`where
`
451
451
`F: FnOnce(&Weak) -> T,
`
452
452
`{
`
453
``
`-
// Construct the inner in the "uninitialized" state with a single
`
454
``
`-
// weak reference.
`
455
``
`-
let uninit_ptr: NonNull<_> = Box::leak(Box::new(ArcInner {
`
456
``
`-
strong: atomic::AtomicUsize::new(0),
`
457
``
`-
weak: atomic::AtomicUsize::new(1),
`
458
``
`-
data: mem::MaybeUninit::::uninit(),
`
459
``
`-
}))
`
460
``
`-
.into();
`
461
``
`-
let init_ptr: NonNull<ArcInner> = uninit_ptr.cast();
`
462
``
-
463
``
`-
let weak = Weak { ptr: init_ptr, alloc: Global };
`
464
``
-
465
``
`-
// It's important we don't give up ownership of the weak pointer, or
`
466
``
`` -
// else the memory might be freed by the time data_fn
returns. If
``
467
``
`-
// we really wanted to pass ownership, we could create an additional
`
468
``
`-
// weak pointer for ourselves, but this would result in additional
`
469
``
`-
// updates to the weak reference count which might not be necessary
`
470
``
`-
// otherwise.
`
471
``
`-
let data = data_fn(&weak);
`
472
``
-
473
``
`-
// Now we can properly initialize the inner value and turn our weak
`
474
``
`-
// reference into a strong reference.
`
475
``
`-
let strong = unsafe {
`
476
``
`-
let inner = init_ptr.as_ptr();
`
477
``
`-
ptr::write(ptr::addr_of_mut!((*inner).data), data);
`
478
``
-
479
``
`-
// The above write to the data field must be visible to any threads which
`
480
``
`-
// observe a non-zero strong count. Therefore we need at least "Release" ordering
`
481
``
`` -
// in order to synchronize with the compare_exchange_weak
in Weak::upgrade
.
``
482
``
`-
//
`
483
``
`-
// "Acquire" ordering is not required. When considering the possible behaviours
`
484
``
`` -
// of data_fn
we only need to look at what it could do with a reference to a
``
485
``
`` -
// non-upgradeable Weak
:
``
486
``
`` -
// - It can clone the Weak
, increasing the weak reference count.
``
487
``
`-
// - It can drop those clones, decreasing the weak reference count (but never to zero).
`
488
``
`-
//
`
489
``
`-
// These side effects do not impact us in any way, and no other side effects are
`
490
``
`-
// possible with safe code alone.
`
491
``
`-
let prev_value = (*inner).strong.fetch_add(1, Release);
`
492
``
`-
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
`
493
``
-
494
``
`-
Arc::from_inner(init_ptr)
`
495
``
`-
};
`
496
``
-
497
``
`-
// Strong references should collectively own a shared weak reference,
`
498
``
`-
// so don't run the destructor for our old weak reference.
`
499
``
`-
mem::forget(weak);
`
500
``
`-
strong
`
``
453
`+
Self::new_cyclic_in(data_fn, Global)
`
501
454
`}
`
502
455
``
503
456
`` /// Constructs a new Arc
with uninitialized contents.
``
`@@ -821,8 +774,6 @@ impl<T, A: Allocator> Arc<T, A> {
`
821
774
`where
`
822
775
`F: FnOnce(&Weak<T, A>) -> T,
`
823
776
`{
`
824
``
`-
// Note: these comments and much of the implementation is copied from Arc::new_cyclic.
`
825
``
-
826
777
`// Construct the inner in the "uninitialized" state with a single
`
827
778
`// weak reference.
`
828
779
`let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
`
`@@ -869,6 +820,8 @@ impl<T, A: Allocator> Arc<T, A> {
`
869
820
``
870
821
`// Strong references should collectively own a shared weak reference,
`
871
822
`// so don't run the destructor for our old weak reference.
`
``
823
`+
// Calling into_raw_with_allocator has the double effect of giving us back the allocator,
`
``
824
`+
// and forgetting the weak reference.
`
872
825
`let alloc = weak.into_raw_with_allocator().1;
`
873
826
``
874
827
`Arc::from_inner_in(init_ptr, alloc)
`