From 13e36cf8870a88ab0ac5651d9513b13085baa8cc Mon Sep 17 00:00:00 2001 From: Matthew Giordano Date: Tue, 27 Aug 2024 18:06:00 -0700 Subject: [PATCH 1/8] add new_cyclic_in for rc --- library/alloc/src/rc.rs | 60 +++++++++++++++++++++++++++++++++++++++-- 1 file changed, 58 insertions(+), 2 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index f153aa6d3be9a..151eec532613c 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -687,6 +687,54 @@ impl Rc { } } + /// TODO: document + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_cyclic_in(data_fn: F, alloc: A) -> Rc + where + F: FnOnce(&Weak) -> T, + { + // Construct the inner in the "uninitialized" state with a single + // weak reference. + let uninit_ptr: NonNull<_> = Box::leak( + Box::new_in(RcBox { + strong: Cell::new(0), + weak: Cell::new(1), + value: mem::MaybeUninit::::uninit(), + }), + alloc, + ) + .into(); + + let init_ptr: NonNull> = uninit_ptr.cast(); + + let weak = Weak { ptr: init_ptr, alloc: Global }; + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + let data = data_fn(&weak); + + let strong = unsafe { + let inner = init_ptr.as_ptr(); + ptr::write(ptr::addr_of_mut!((*inner).value), data); + + let prev_value = (*inner).strong.get(); + debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); + (*inner).strong.set(1); + + Rc::from_inner(init_ptr) + }; + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + mem::forget(weak); + strong + } + /// Constructs a new `Rc` with uninitialized contents in the provided allocator. /// /// # Examples @@ -1662,7 +1710,11 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { - if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None } + if Rc::is_unique(this) { + unsafe { Some(Rc::get_mut_unchecked(this)) } + } else { + None + } } /// Returns a mutable reference into the given `Rc`, @@ -3239,7 +3291,11 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { inner.strong() } else { 0 } + if let Some(inner) = self.inner() { + inner.strong() + } else { + 0 + } } /// Gets the number of `Weak` pointers pointing to this allocation. From 21cb84763cf942dc2fa70d582cf2b8ba50163750 Mon Sep 17 00:00:00 2001 From: Matthew Giordano Date: Thu, 29 Aug 2024 12:32:33 -0700 Subject: [PATCH 2/8] fix fmt --- library/alloc/src/rc.rs | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 151eec532613c..0afb138db8ada 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -1710,11 +1710,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { - if Rc::is_unique(this) { - unsafe { Some(Rc::get_mut_unchecked(this)) } - } else { - None - } + if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None } } /// Returns a mutable reference into the given `Rc`, @@ -3291,11 +3287,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { - inner.strong() - } else { - 0 - } + if let Some(inner) = self.inner() { inner.strong() } else { 0 } } /// Gets the number of `Weak` pointers pointing to this allocation. From 4abb8e2d0d1821a98e77109dae4001a613229074 Mon Sep 17 00:00:00 2001 From: Matthew Giordano Date: Thu, 29 Aug 2024 13:26:39 -0700 Subject: [PATCH 3/8] fix new_cyclic_in for rc --- library/alloc/src/rc.rs | 127 +++++++++++++++++++++++++--------------- 1 file changed, 79 insertions(+), 48 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 0afb138db8ada..726c0f2de9f25 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -687,54 +687,6 @@ impl Rc { } } - /// TODO: document - #[cfg(not(no_global_oom_handling))] - #[unstable(feature = "allocator_api", issue = "32838")] - pub fn new_cyclic_in(data_fn: F, alloc: A) -> Rc - where - F: FnOnce(&Weak) -> T, - { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let uninit_ptr: NonNull<_> = Box::leak( - Box::new_in(RcBox { - strong: Cell::new(0), - weak: Cell::new(1), - value: mem::MaybeUninit::::uninit(), - }), - alloc, - ) - .into(); - - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc: Global }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).value), data); - - let prev_value = (*inner).strong.get(); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); - - Rc::from_inner(init_ptr) - }; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong - } - /// Constructs a new `Rc` with uninitialized contents in the provided allocator. /// /// # Examples @@ -2312,6 +2264,85 @@ impl Clone for Rc { } } +impl Rc { + /// Constructs a new `Rc` in the given allocator while giving you a `Weak` to the allocation, + /// to allow you to construct a `T` which holds a weak pointer to itself. + /// + /// Generally, a structure circularly referencing itself, either directly or + /// indirectly, should not hold a strong reference to itself to prevent a memory leak. + /// Using this function, you get access to the weak pointer during the + /// initialization of `T`, before the `Rc` is created, such that you can + /// clone and store it inside the `T`. + /// + /// `new_cyclic` first allocates the managed allocation for the `Rc`, + /// then calls your closure, giving it a `Weak` to this allocation, + /// and only afterwards completes the construction of the `Rc` by placing + /// the `T` returned from your closure into the allocation. + /// + /// Since the new `Rc` is not fully-constructed until `Rc::new_cyclic` + /// returns, calling [`upgrade`] on the weak reference inside your closure will + /// fail and result in a `None` value. + /// + /// # Panics + /// + /// If `data_fn` panics, the panic is propagated to the caller, and the + /// temporary [`Weak`] is dropped normally. + /// + /// # Examples + /// See [`new_cyclic`]. + /// + /// [`new_cyclic`]: Rc::new_cyclic + /// [`upgrade`]: Weak::upgrade + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_cyclic_in(data_fn: F, alloc: A) -> Rc + where + F: FnOnce(&Weak) -> T, + { + // Note: comments and implementation are copied from Rc::new_cyclic. + + // Construct the inner in the "uninitialized" state with a single + // weak reference. + let uninit_ptr: NonNull<_> = Box::leak(Box::new_in( + RcBox { + strong: Cell::new(0), + weak: Cell::new(1), + value: mem::MaybeUninit::::uninit(), + }, + alloc.clone(), + )) + .into(); + + let init_ptr: NonNull> = uninit_ptr.cast(); + + let weak = Weak { ptr: init_ptr, alloc: alloc.clone() }; + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + let data = data_fn(&weak); + + let strong = unsafe { + let inner = init_ptr.as_ptr(); + ptr::write(ptr::addr_of_mut!((*inner).value), data); + + let prev_value = (*inner).strong.get(); + debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); + (*inner).strong.set(1); + + Rc::from_inner_in(init_ptr, alloc) + }; + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + mem::forget(weak); + strong + } +} + #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for Rc { From 2383cc991020b9dd678074072fcd3ed4f459eb6c Mon Sep 17 00:00:00 2001 From: Matthew Giordano Date: Thu, 29 Aug 2024 13:41:37 -0700 Subject: [PATCH 4/8] improve comments --- library/alloc/src/rc.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 726c0f2de9f25..f6d4174f5ebb5 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -2279,7 +2279,7 @@ impl Rc { /// and only afterwards completes the construction of the `Rc` by placing /// the `T` returned from your closure into the allocation. /// - /// Since the new `Rc` is not fully-constructed until `Rc::new_cyclic` + /// Since the new `Rc` is not fully-constructed until `Rc::new_cyclic_in` /// returns, calling [`upgrade`] on the weak reference inside your closure will /// fail and result in a `None` value. /// @@ -2289,6 +2289,7 @@ impl Rc { /// temporary [`Weak`] is dropped normally. /// /// # Examples + /// /// See [`new_cyclic`]. /// /// [`new_cyclic`]: Rc::new_cyclic From 68169d3103dca63330b7e3e0e03f41b43b3b0490 Mon Sep 17 00:00:00 2001 From: Matthew Giordano Date: Thu, 29 Aug 2024 13:41:57 -0700 Subject: [PATCH 5/8] add new_cyclic_in for Arc --- library/alloc/src/sync.rs | 93 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 93 insertions(+) diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 4a3522f1a641b..f808f3313297b 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1322,6 +1322,99 @@ impl Arc<[mem::MaybeUninit], A> { } } +impl Arc { + /// Constructs a new `Arc` in the given allocator while giving you a `Weak` to the allocation, + /// to allow you to construct a `T` which holds a weak pointer to itself. + /// + /// Generally, a structure circularly referencing itself, either directly or + /// indirectly, should not hold a strong reference to itself to prevent a memory leak. + /// Using this function, you get access to the weak pointer during the + /// initialization of `T`, before the `Arc` is created, such that you can + /// clone and store it inside the `T`. + /// + /// `new_cyclic` first allocates the managed allocation for the `Arc`, + /// then calls your closure, giving it a `Weak` to this allocation, + /// and only afterwards completes the construction of the `Arc` by placing + /// the `T` returned from your closure into the allocation. + /// + /// Since the new `Arc` is not fully-constructed until `Arc::new_cyclic_in` + /// returns, calling [`upgrade`] on the weak reference inside your closure will + /// fail and result in a `None` value. + /// + /// # Panics + /// + /// If `data_fn` panics, the panic is propagated to the caller, and the + /// temporary [`Weak`] is dropped normally. + /// + /// # Example + /// + /// See [`new_cyclic`] + /// + /// [`new_cyclic`]: Arc::new_cyclic + /// [`upgrade`]: Weak::upgrade + #[cfg(not(no_global_oom_handling))] + #[inline] + #[stable(feature = "arc_new_cyclic", since = "1.60.0")] + pub fn new_cyclic_in(data_fn: F, alloc: A) -> Arc + where + F: FnOnce(&Weak) -> T, + { + // Note: these comments and much of the implementation is copied from Arc::new_cyclic. + + // Construct the inner in the "uninitialized" state with a single + // weak reference. + let uninit_ptr: NonNull<_> = Box::leak(Box::new_in( + ArcInner { + strong: atomic::AtomicUsize::new(0), + weak: atomic::AtomicUsize::new(1), + data: mem::MaybeUninit::::uninit(), + }, + alloc.clone(), + )) + .into(); + let init_ptr: NonNull> = uninit_ptr.cast(); + + let weak = Weak { ptr: init_ptr, alloc: alloc.clone() }; + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + let data = data_fn(&weak); + + // Now we can properly initialize the inner value and turn our weak + // reference into a strong reference. + let strong = unsafe { + let inner = init_ptr.as_ptr(); + ptr::write(ptr::addr_of_mut!((*inner).data), data); + + // The above write to the data field must be visible to any threads which + // observe a non-zero strong count. Therefore we need at least "Release" ordering + // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`. + // + // "Acquire" ordering is not required. When considering the possible behaviours + // of `data_fn` we only need to look at what it could do with a reference to a + // non-upgradeable `Weak`: + // - It can *clone* the `Weak`, increasing the weak reference count. + // - It can drop those clones, decreasing the weak reference count (but never to zero). + // + // These side effects do not impact us in any way, and no other side effects are + // possible with safe code alone. + let prev_value = (*inner).strong.fetch_add(1, Release); + debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); + + Arc::from_inner_in(init_ptr, alloc) + }; + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + mem::forget(weak); + strong + } +} + impl Arc { /// Constructs an `Arc` from a raw pointer. /// From 97df334d5fd97725ffc2836c80a9bfae501085d1 Mon Sep 17 00:00:00 2001 From: Matthew Giordano Date: Fri, 6 Sep 2024 14:24:25 -0700 Subject: [PATCH 6/8] remove the Clone requirement --- library/alloc/src/rc.rs | 158 ++++++++++++++++---------------- library/alloc/src/sync.rs | 185 +++++++++++++++++++------------------- 2 files changed, 170 insertions(+), 173 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index f6d4174f5ebb5..664bc5ffc3441 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -766,6 +766,84 @@ impl Rc { } } + /// Constructs a new `Rc` in the given allocator while giving you a `Weak` to the allocation, + /// to allow you to construct a `T` which holds a weak pointer to itself. + /// + /// Generally, a structure circularly referencing itself, either directly or + /// indirectly, should not hold a strong reference to itself to prevent a memory leak. + /// Using this function, you get access to the weak pointer during the + /// initialization of `T`, before the `Rc` is created, such that you can + /// clone and store it inside the `T`. + /// + /// `new_cyclic_in` first allocates the managed allocation for the `Rc`, + /// then calls your closure, giving it a `Weak` to this allocation, + /// and only afterwards completes the construction of the `Rc` by placing + /// the `T` returned from your closure into the allocation. + /// + /// Since the new `Rc` is not fully-constructed until `Rc::new_cyclic_in` + /// returns, calling [`upgrade`] on the weak reference inside your closure will + /// fail and result in a `None` value. + /// + /// # Panics + /// + /// If `data_fn` panics, the panic is propagated to the caller, and the + /// temporary [`Weak`] is dropped normally. + /// + /// # Examples + /// + /// See [`new_cyclic`]. + /// + /// [`new_cyclic`]: Rc::new_cyclic + /// [`upgrade`]: Weak::upgrade + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_cyclic_in(data_fn: F, alloc: A) -> Rc + where + F: FnOnce(&Weak) -> T, + { + // Note: comments and implementation are copied from Rc::new_cyclic. + + // Construct the inner in the "uninitialized" state with a single + // weak reference. + let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( + RcBox { + strong: Cell::new(0), + weak: Cell::new(1), + value: mem::MaybeUninit::::uninit(), + }, + alloc, + )); + let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into(); + let init_ptr: NonNull> = uninit_ptr.cast(); + + let weak = Weak { ptr: init_ptr, alloc: alloc }; + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + let data = data_fn(&weak); + + let strong = unsafe { + let inner = init_ptr.as_ptr(); + ptr::write(ptr::addr_of_mut!((*inner).value), data); + + let prev_value = (*inner).strong.get(); + debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); + (*inner).strong.set(1); + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + let alloc = weak.into_raw_with_allocator().1; + + Rc::from_inner_in(init_ptr, alloc) + }; + + strong + } + /// Constructs a new `Rc` in the provided allocator, returning an error if the allocation /// fails /// @@ -2264,86 +2342,6 @@ impl Clone for Rc { } } -impl Rc { - /// Constructs a new `Rc` in the given allocator while giving you a `Weak` to the allocation, - /// to allow you to construct a `T` which holds a weak pointer to itself. - /// - /// Generally, a structure circularly referencing itself, either directly or - /// indirectly, should not hold a strong reference to itself to prevent a memory leak. - /// Using this function, you get access to the weak pointer during the - /// initialization of `T`, before the `Rc` is created, such that you can - /// clone and store it inside the `T`. - /// - /// `new_cyclic` first allocates the managed allocation for the `Rc`, - /// then calls your closure, giving it a `Weak` to this allocation, - /// and only afterwards completes the construction of the `Rc` by placing - /// the `T` returned from your closure into the allocation. - /// - /// Since the new `Rc` is not fully-constructed until `Rc::new_cyclic_in` - /// returns, calling [`upgrade`] on the weak reference inside your closure will - /// fail and result in a `None` value. - /// - /// # Panics - /// - /// If `data_fn` panics, the panic is propagated to the caller, and the - /// temporary [`Weak`] is dropped normally. - /// - /// # Examples - /// - /// See [`new_cyclic`]. - /// - /// [`new_cyclic`]: Rc::new_cyclic - /// [`upgrade`]: Weak::upgrade - #[cfg(not(no_global_oom_handling))] - #[unstable(feature = "allocator_api", issue = "32838")] - pub fn new_cyclic_in(data_fn: F, alloc: A) -> Rc - where - F: FnOnce(&Weak) -> T, - { - // Note: comments and implementation are copied from Rc::new_cyclic. - - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(Box::new_in( - RcBox { - strong: Cell::new(0), - weak: Cell::new(1), - value: mem::MaybeUninit::::uninit(), - }, - alloc.clone(), - )) - .into(); - - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc: alloc.clone() }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).value), data); - - let prev_value = (*inner).strong.get(); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); - - Rc::from_inner_in(init_ptr, alloc) - }; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong - } -} - #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for Rc { diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index f808f3313297b..4f8039fd1f8b8 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -785,6 +785,98 @@ impl Arc { } } + /// Constructs a new `Arc` in the given allocator while giving you a `Weak` to the allocation, + /// to allow you to construct a `T` which holds a weak pointer to itself. + /// + /// Generally, a structure circularly referencing itself, either directly or + /// indirectly, should not hold a strong reference to itself to prevent a memory leak. + /// Using this function, you get access to the weak pointer during the + /// initialization of `T`, before the `Arc` is created, such that you can + /// clone and store it inside the `T`. + /// + /// `new_cyclic_in` first allocates the managed allocation for the `Arc`, + /// then calls your closure, giving it a `Weak` to this allocation, + /// and only afterwards completes the construction of the `Arc` by placing + /// the `T` returned from your closure into the allocation. + /// + /// Since the new `Arc` is not fully-constructed until `Arc::new_cyclic_in` + /// returns, calling [`upgrade`] on the weak reference inside your closure will + /// fail and result in a `None` value. + /// + /// # Panics + /// + /// If `data_fn` panics, the panic is propagated to the caller, and the + /// temporary [`Weak`] is dropped normally. + /// + /// # Example + /// + /// See [`new_cyclic`] + /// + /// [`new_cyclic`]: Arc::new_cyclic + /// [`upgrade`]: Weak::upgrade + #[cfg(not(no_global_oom_handling))] + #[inline] + #[stable(feature = "arc_new_cyclic", since = "1.60.0")] + pub fn new_cyclic_in(data_fn: F, alloc: A) -> Arc + where + F: FnOnce(&Weak) -> T, + { + // Note: these comments and much of the implementation is copied from Arc::new_cyclic. + + // Construct the inner in the "uninitialized" state with a single + // weak reference. + let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( + ArcInner { + strong: atomic::AtomicUsize::new(0), + weak: atomic::AtomicUsize::new(1), + data: mem::MaybeUninit::::uninit(), + }, + alloc, + )); + let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into(); + let init_ptr: NonNull> = uninit_ptr.cast(); + + let weak = Weak { ptr: init_ptr, alloc: alloc }; + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + let data = data_fn(&weak); + + // Now we can properly initialize the inner value and turn our weak + // reference into a strong reference. + let strong = unsafe { + let inner = init_ptr.as_ptr(); + ptr::write(ptr::addr_of_mut!((*inner).data), data); + + // The above write to the data field must be visible to any threads which + // observe a non-zero strong count. Therefore we need at least "Release" ordering + // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`. + // + // "Acquire" ordering is not required. When considering the possible behaviours + // of `data_fn` we only need to look at what it could do with a reference to a + // non-upgradeable `Weak`: + // - It can *clone* the `Weak`, increasing the weak reference count. + // - It can drop those clones, decreasing the weak reference count (but never to zero). + // + // These side effects do not impact us in any way, and no other side effects are + // possible with safe code alone. + let prev_value = (*inner).strong.fetch_add(1, Release); + debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + let alloc = weak.into_raw_with_allocator().1; + + Arc::from_inner_in(init_ptr, alloc) + }; + + strong + } + /// Constructs a new `Pin>` in the provided allocator. If `T` does not implement `Unpin`, /// then `data` will be pinned in memory and unable to be moved. #[cfg(not(no_global_oom_handling))] @@ -1322,99 +1414,6 @@ impl Arc<[mem::MaybeUninit], A> { } } -impl Arc { - /// Constructs a new `Arc` in the given allocator while giving you a `Weak` to the allocation, - /// to allow you to construct a `T` which holds a weak pointer to itself. - /// - /// Generally, a structure circularly referencing itself, either directly or - /// indirectly, should not hold a strong reference to itself to prevent a memory leak. - /// Using this function, you get access to the weak pointer during the - /// initialization of `T`, before the `Arc` is created, such that you can - /// clone and store it inside the `T`. - /// - /// `new_cyclic` first allocates the managed allocation for the `Arc`, - /// then calls your closure, giving it a `Weak` to this allocation, - /// and only afterwards completes the construction of the `Arc` by placing - /// the `T` returned from your closure into the allocation. - /// - /// Since the new `Arc` is not fully-constructed until `Arc::new_cyclic_in` - /// returns, calling [`upgrade`] on the weak reference inside your closure will - /// fail and result in a `None` value. - /// - /// # Panics - /// - /// If `data_fn` panics, the panic is propagated to the caller, and the - /// temporary [`Weak`] is dropped normally. - /// - /// # Example - /// - /// See [`new_cyclic`] - /// - /// [`new_cyclic`]: Arc::new_cyclic - /// [`upgrade`]: Weak::upgrade - #[cfg(not(no_global_oom_handling))] - #[inline] - #[stable(feature = "arc_new_cyclic", since = "1.60.0")] - pub fn new_cyclic_in(data_fn: F, alloc: A) -> Arc - where - F: FnOnce(&Weak) -> T, - { - // Note: these comments and much of the implementation is copied from Arc::new_cyclic. - - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(Box::new_in( - ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), - data: mem::MaybeUninit::::uninit(), - }, - alloc.clone(), - )) - .into(); - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc: alloc.clone() }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - // Now we can properly initialize the inner value and turn our weak - // reference into a strong reference. - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).data), data); - - // The above write to the data field must be visible to any threads which - // observe a non-zero strong count. Therefore we need at least "Release" ordering - // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`. - // - // "Acquire" ordering is not required. When considering the possible behaviours - // of `data_fn` we only need to look at what it could do with a reference to a - // non-upgradeable `Weak`: - // - It can *clone* the `Weak`, increasing the weak reference count. - // - It can drop those clones, decreasing the weak reference count (but never to zero). - // - // These side effects do not impact us in any way, and no other side effects are - // possible with safe code alone. - let prev_value = (*inner).strong.fetch_add(1, Release); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - - Arc::from_inner_in(init_ptr, alloc) - }; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong - } -} - impl Arc { /// Constructs an `Arc` from a raw pointer. /// From 550e55fec5cf3a11cf5b7372fda45465e4e6b4d0 Mon Sep 17 00:00:00 2001 From: Matthew Giordano Date: Fri, 6 Sep 2024 15:20:41 -0700 Subject: [PATCH 7/8] Remove duplicate impl --- library/alloc/src/rc.rs | 41 +++--------------------------- library/alloc/src/sync.rs | 53 +++------------------------------------ 2 files changed, 6 insertions(+), 88 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 664bc5ffc3441..719f154141b56 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -460,42 +460,7 @@ impl Rc { where F: FnOnce(&Weak) -> T, { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(Box::new(RcBox { - strong: Cell::new(0), - weak: Cell::new(1), - value: mem::MaybeUninit::::uninit(), - })) - .into(); - - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc: Global }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).value), data); - - let prev_value = (*inner).strong.get(); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); - - Rc::from_inner(init_ptr) - }; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong + Self::new_cyclic_in(data_fn, Global) } /// Constructs a new `Rc` with uninitialized contents. @@ -801,8 +766,6 @@ impl Rc { where F: FnOnce(&Weak) -> T, { - // Note: comments and implementation are copied from Rc::new_cyclic. - // Construct the inner in the "uninitialized" state with a single // weak reference. let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( @@ -836,6 +799,8 @@ impl Rc { // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. + // Calling into_raw_with_allocator has the double effect of giving us back the allocator, + // and forgetting the weak reference. let alloc = weak.into_raw_with_allocator().1; Rc::from_inner_in(init_ptr, alloc) diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 4f8039fd1f8b8..496865e303b40 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -450,54 +450,7 @@ impl Arc { where F: FnOnce(&Weak) -> T, { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(Box::new(ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), - data: mem::MaybeUninit::::uninit(), - })) - .into(); - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc: Global }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - // Now we can properly initialize the inner value and turn our weak - // reference into a strong reference. - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).data), data); - - // The above write to the data field must be visible to any threads which - // observe a non-zero strong count. Therefore we need at least "Release" ordering - // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`. - // - // "Acquire" ordering is not required. When considering the possible behaviours - // of `data_fn` we only need to look at what it could do with a reference to a - // non-upgradeable `Weak`: - // - It can *clone* the `Weak`, increasing the weak reference count. - // - It can drop those clones, decreasing the weak reference count (but never to zero). - // - // These side effects do not impact us in any way, and no other side effects are - // possible with safe code alone. - let prev_value = (*inner).strong.fetch_add(1, Release); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - - Arc::from_inner(init_ptr) - }; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong + Self::new_cyclic_in(data_fn, Global) } /// Constructs a new `Arc` with uninitialized contents. @@ -821,8 +774,6 @@ impl Arc { where F: FnOnce(&Weak) -> T, { - // Note: these comments and much of the implementation is copied from Arc::new_cyclic. - // Construct the inner in the "uninitialized" state with a single // weak reference. let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( @@ -869,6 +820,8 @@ impl Arc { // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. + // Calling into_raw_with_allocator has the double effect of giving us back the allocator, + // and forgetting the weak reference. let alloc = weak.into_raw_with_allocator().1; Arc::from_inner_in(init_ptr, alloc) From 6750f042ca571407c8050693412d929ea1097b3f Mon Sep 17 00:00:00 2001 From: matthewpipie Date: Mon, 16 Sep 2024 20:05:15 -0500 Subject: [PATCH 8/8] Update library/alloc/src/sync.rs Co-authored-by: David Tolnay --- library/alloc/src/sync.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 496865e303b40..63e4af34a578c 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -769,7 +769,7 @@ impl Arc { /// [`upgrade`]: Weak::upgrade #[cfg(not(no_global_oom_handling))] #[inline] - #[stable(feature = "arc_new_cyclic", since = "1.60.0")] + #[unstable(feature = "allocator_api", issue = "32838")] pub fn new_cyclic_in(data_fn: F, alloc: A) -> Arc where F: FnOnce(&Weak) -> T,