Skip to content

Commit 550e55f

Browse files
author
Matthew Giordano
committed
Remove duplicate impl
1 parent 97df334 commit 550e55f

File tree

2 files changed

+6
-88
lines changed

2 files changed

+6
-88
lines changed

library/alloc/src/rc.rs

+3-38
Original file line numberDiff line numberDiff line change
@@ -460,42 +460,7 @@ impl<T> Rc<T> {
460460
where
461461
F: FnOnce(&Weak<T>) -> T,
462462
{
463-
// Construct the inner in the "uninitialized" state with a single
464-
// weak reference.
465-
let uninit_ptr: NonNull<_> = Box::leak(Box::new(RcBox {
466-
strong: Cell::new(0),
467-
weak: Cell::new(1),
468-
value: mem::MaybeUninit::<T>::uninit(),
469-
}))
470-
.into();
471-
472-
let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
473-
474-
let weak = Weak { ptr: init_ptr, alloc: Global };
475-
476-
// It's important we don't give up ownership of the weak pointer, or
477-
// else the memory might be freed by the time `data_fn` returns. If
478-
// we really wanted to pass ownership, we could create an additional
479-
// weak pointer for ourselves, but this would result in additional
480-
// updates to the weak reference count which might not be necessary
481-
// otherwise.
482-
let data = data_fn(&weak);
483-
484-
let strong = unsafe {
485-
let inner = init_ptr.as_ptr();
486-
ptr::write(ptr::addr_of_mut!((*inner).value), data);
487-
488-
let prev_value = (*inner).strong.get();
489-
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
490-
(*inner).strong.set(1);
491-
492-
Rc::from_inner(init_ptr)
493-
};
494-
495-
// Strong references should collectively own a shared weak reference,
496-
// so don't run the destructor for our old weak reference.
497-
mem::forget(weak);
498-
strong
463+
Self::new_cyclic_in(data_fn, Global)
499464
}
500465

501466
/// Constructs a new `Rc` with uninitialized contents.
@@ -801,8 +766,6 @@ impl<T, A: Allocator> Rc<T, A> {
801766
where
802767
F: FnOnce(&Weak<T, A>) -> T,
803768
{
804-
// Note: comments and implementation are copied from Rc::new_cyclic.
805-
806769
// Construct the inner in the "uninitialized" state with a single
807770
// weak reference.
808771
let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
@@ -836,6 +799,8 @@ impl<T, A: Allocator> Rc<T, A> {
836799

837800
// Strong references should collectively own a shared weak reference,
838801
// so don't run the destructor for our old weak reference.
802+
// Calling into_raw_with_allocator has the double effect of giving us back the allocator,
803+
// and forgetting the weak reference.
839804
let alloc = weak.into_raw_with_allocator().1;
840805

841806
Rc::from_inner_in(init_ptr, alloc)

library/alloc/src/sync.rs

+3-50
Original file line numberDiff line numberDiff line change
@@ -450,54 +450,7 @@ impl<T> Arc<T> {
450450
where
451451
F: FnOnce(&Weak<T>) -> T,
452452
{
453-
// Construct the inner in the "uninitialized" state with a single
454-
// weak reference.
455-
let uninit_ptr: NonNull<_> = Box::leak(Box::new(ArcInner {
456-
strong: atomic::AtomicUsize::new(0),
457-
weak: atomic::AtomicUsize::new(1),
458-
data: mem::MaybeUninit::<T>::uninit(),
459-
}))
460-
.into();
461-
let init_ptr: NonNull<ArcInner<T>> = uninit_ptr.cast();
462-
463-
let weak = Weak { ptr: init_ptr, alloc: Global };
464-
465-
// It's important we don't give up ownership of the weak pointer, or
466-
// else the memory might be freed by the time `data_fn` returns. If
467-
// we really wanted to pass ownership, we could create an additional
468-
// weak pointer for ourselves, but this would result in additional
469-
// updates to the weak reference count which might not be necessary
470-
// otherwise.
471-
let data = data_fn(&weak);
472-
473-
// Now we can properly initialize the inner value and turn our weak
474-
// reference into a strong reference.
475-
let strong = unsafe {
476-
let inner = init_ptr.as_ptr();
477-
ptr::write(ptr::addr_of_mut!((*inner).data), data);
478-
479-
// The above write to the data field must be visible to any threads which
480-
// observe a non-zero strong count. Therefore we need at least "Release" ordering
481-
// in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`.
482-
//
483-
// "Acquire" ordering is not required. When considering the possible behaviours
484-
// of `data_fn` we only need to look at what it could do with a reference to a
485-
// non-upgradeable `Weak`:
486-
// - It can *clone* the `Weak`, increasing the weak reference count.
487-
// - It can drop those clones, decreasing the weak reference count (but never to zero).
488-
//
489-
// These side effects do not impact us in any way, and no other side effects are
490-
// possible with safe code alone.
491-
let prev_value = (*inner).strong.fetch_add(1, Release);
492-
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
493-
494-
Arc::from_inner(init_ptr)
495-
};
496-
497-
// Strong references should collectively own a shared weak reference,
498-
// so don't run the destructor for our old weak reference.
499-
mem::forget(weak);
500-
strong
453+
Self::new_cyclic_in(data_fn, Global)
501454
}
502455

503456
/// Constructs a new `Arc` with uninitialized contents.
@@ -821,8 +774,6 @@ impl<T, A: Allocator> Arc<T, A> {
821774
where
822775
F: FnOnce(&Weak<T, A>) -> T,
823776
{
824-
// Note: these comments and much of the implementation is copied from Arc::new_cyclic.
825-
826777
// Construct the inner in the "uninitialized" state with a single
827778
// weak reference.
828779
let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
@@ -869,6 +820,8 @@ impl<T, A: Allocator> Arc<T, A> {
869820

870821
// Strong references should collectively own a shared weak reference,
871822
// so don't run the destructor for our old weak reference.
823+
// Calling into_raw_with_allocator has the double effect of giving us back the allocator,
824+
// and forgetting the weak reference.
872825
let alloc = weak.into_raw_with_allocator().1;
873826

874827
Arc::from_inner_in(init_ptr, alloc)

0 commit comments

Comments
 (0)