@@ -687,6 +687,54 @@ impl<T, A: Allocator> Rc<T, A> {
687
687
}
688
688
}
689
689
690
+ /// TODO: document
691
+ #[ cfg( not( no_global_oom_handling) ) ]
692
+ #[ unstable( feature = "allocator_api" , issue = "32838" ) ]
693
+ pub fn new_cyclic_in < F > ( data_fn : F , alloc : A ) -> Rc < T , A >
694
+ where
695
+ F : FnOnce ( & Weak < T , A > ) -> T ,
696
+ {
697
+ // Construct the inner in the "uninitialized" state with a single
698
+ // weak reference.
699
+ let uninit_ptr: NonNull < _ > = Box :: leak (
700
+ Box :: new_in ( RcBox {
701
+ strong : Cell :: new ( 0 ) ,
702
+ weak : Cell :: new ( 1 ) ,
703
+ value : mem:: MaybeUninit :: < T > :: uninit ( ) ,
704
+ } ) ,
705
+ alloc,
706
+ )
707
+ . into ( ) ;
708
+
709
+ let init_ptr: NonNull < RcBox < T > > = uninit_ptr. cast ( ) ;
710
+
711
+ let weak = Weak { ptr : init_ptr, alloc : Global } ;
712
+
713
+ // It's important we don't give up ownership of the weak pointer, or
714
+ // else the memory might be freed by the time `data_fn` returns. If
715
+ // we really wanted to pass ownership, we could create an additional
716
+ // weak pointer for ourselves, but this would result in additional
717
+ // updates to the weak reference count which might not be necessary
718
+ // otherwise.
719
+ let data = data_fn ( & weak) ;
720
+
721
+ let strong = unsafe {
722
+ let inner = init_ptr. as_ptr ( ) ;
723
+ ptr:: write ( ptr:: addr_of_mut!( ( * inner) . value) , data) ;
724
+
725
+ let prev_value = ( * inner) . strong . get ( ) ;
726
+ debug_assert_eq ! ( prev_value, 0 , "No prior strong references should exist" ) ;
727
+ ( * inner) . strong . set ( 1 ) ;
728
+
729
+ Rc :: from_inner ( init_ptr)
730
+ } ;
731
+
732
+ // Strong references should collectively own a shared weak reference,
733
+ // so don't run the destructor for our old weak reference.
734
+ mem:: forget ( weak) ;
735
+ strong
736
+ }
737
+
690
738
/// Constructs a new `Rc` with uninitialized contents in the provided allocator.
691
739
///
692
740
/// # Examples
@@ -1662,7 +1710,11 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
1662
1710
#[ inline]
1663
1711
#[ stable( feature = "rc_unique" , since = "1.4.0" ) ]
1664
1712
pub fn get_mut ( this : & mut Self ) -> Option < & mut T > {
1665
- if Rc :: is_unique ( this) { unsafe { Some ( Rc :: get_mut_unchecked ( this) ) } } else { None }
1713
+ if Rc :: is_unique ( this) {
1714
+ unsafe { Some ( Rc :: get_mut_unchecked ( this) ) }
1715
+ } else {
1716
+ None
1717
+ }
1666
1718
}
1667
1719
1668
1720
/// Returns a mutable reference into the given `Rc`,
@@ -3239,7 +3291,11 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
3239
3291
#[ must_use]
3240
3292
#[ stable( feature = "weak_counts" , since = "1.41.0" ) ]
3241
3293
pub fn strong_count ( & self ) -> usize {
3242
- if let Some ( inner) = self . inner ( ) { inner. strong ( ) } else { 0 }
3294
+ if let Some ( inner) = self . inner ( ) {
3295
+ inner. strong ( )
3296
+ } else {
3297
+ 0
3298
+ }
3243
3299
}
3244
3300
3245
3301
/// Gets the number of `Weak` pointers pointing to this allocation.
0 commit comments