Skip to main content

alloc/
rc.rs

1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//!     name: String,
96//!     // ...other fields
97//! }
98//!
99//! struct Gadget {
100//!     id: i32,
101//!     owner: Rc<Owner>,
102//!     // ...other fields
103//! }
104//!
105//! fn main() {
106//!     // Create a reference-counted `Owner`.
107//!     let gadget_owner: Rc<Owner> = Rc::new(
108//!         Owner {
109//!             name: "Gadget Man".to_string(),
110//!         }
111//!     );
112//!
113//!     // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//!     // gives us a new pointer to the same `Owner` allocation, incrementing
115//!     // the reference count in the process.
116//!     let gadget1 = Gadget {
117//!         id: 1,
118//!         owner: Rc::clone(&gadget_owner),
119//!     };
120//!     let gadget2 = Gadget {
121//!         id: 2,
122//!         owner: Rc::clone(&gadget_owner),
123//!     };
124//!
125//!     // Dispose of our local variable `gadget_owner`.
126//!     drop(gadget_owner);
127//!
128//!     // Despite dropping `gadget_owner`, we're still able to print out the name
129//!     // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//!     // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//!     // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//!     // live. The field projection `gadget1.owner.name` works because
133//!     // `Rc<Owner>` automatically dereferences to `Owner`.
134//!     println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//!     println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//!     // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//!     // with them the last counted references to our `Owner`. Gadget Man now
139//!     // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//!     name: String,
166//!     gadgets: RefCell<Vec<Weak<Gadget>>>,
167//!     // ...other fields
168//! }
169//!
170//! struct Gadget {
171//!     id: i32,
172//!     owner: Rc<Owner>,
173//!     // ...other fields
174//! }
175//!
176//! fn main() {
177//!     // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//!     // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//!     // a shared reference.
180//!     let gadget_owner: Rc<Owner> = Rc::new(
181//!         Owner {
182//!             name: "Gadget Man".to_string(),
183//!             gadgets: RefCell::new(vec![]),
184//!         }
185//!     );
186//!
187//!     // Create `Gadget`s belonging to `gadget_owner`, as before.
188//!     let gadget1 = Rc::new(
189//!         Gadget {
190//!             id: 1,
191//!             owner: Rc::clone(&gadget_owner),
192//!         }
193//!     );
194//!     let gadget2 = Rc::new(
195//!         Gadget {
196//!             id: 2,
197//!             owner: Rc::clone(&gadget_owner),
198//!         }
199//!     );
200//!
201//!     // Add the `Gadget`s to their `Owner`.
202//!     {
203//!         let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//!         gadgets.push(Rc::downgrade(&gadget1));
205//!         gadgets.push(Rc::downgrade(&gadget2));
206//!
207//!         // `RefCell` dynamic borrow ends here.
208//!     }
209//!
210//!     // Iterate over our `Gadget`s, printing their details out.
211//!     for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//!         // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//!         // guarantee the allocation still exists, we need to call
215//!         // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//!         //
217//!         // In this case we know the allocation still exists, so we simply
218//!         // `unwrap` the `Option`. In a more complicated program, you might
219//!         // need graceful error handling for a `None` result.
220//!
221//!         let gadget = gadget_weak.upgrade().unwrap();
222//!         println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//!     }
224//!
225//!     // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//!     // are destroyed. There are now no strong (`Rc`) pointers to the
227//!     // gadgets, so they are destroyed. This zeroes the reference count on
228//!     // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::{Cell, CloneFromCell};
246#[cfg(not(no_global_oom_handling))]
247use core::clone::TrivialClone;
248use core::clone::{CloneToUninit, UseCloned};
249use core::cmp::Ordering;
250use core::hash::{Hash, Hasher};
251use core::intrinsics::abort;
252#[cfg(not(no_global_oom_handling))]
253use core::iter;
254use core::marker::{PhantomData, Unsize};
255use core::mem::{self, Alignment, ManuallyDrop};
256use core::num::NonZeroUsize;
257use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258#[cfg(not(no_global_oom_handling))]
259use core::ops::{Residual, Try};
260use core::panic::{RefUnwindSafe, UnwindSafe};
261#[cfg(not(no_global_oom_handling))]
262use core::pin::Pin;
263use core::pin::PinCoerceUnsized;
264use core::ptr::{self, NonNull, drop_in_place};
265#[cfg(not(no_global_oom_handling))]
266use core::slice::from_raw_parts_mut;
267use core::{borrow, fmt, hint};
268
269#[cfg(not(no_global_oom_handling))]
270use crate::alloc::handle_alloc_error;
271use crate::alloc::{AllocError, Allocator, Global, Layout};
272use crate::borrow::{Cow, ToOwned};
273use crate::boxed::Box;
274#[cfg(not(no_global_oom_handling))]
275use crate::string::String;
276#[cfg(not(no_global_oom_handling))]
277use crate::vec::Vec;
278
279// This is repr(C) to future-proof against possible field-reordering, which
280// would interfere with otherwise safe [into|from]_raw() of transmutable
281// inner types.
282// repr(align(2)) (forcing alignment to at least 2) is required because usize
283// has 1-byte alignment on AVR.
284#[repr(C, align(2))]
285struct RcInner<T: ?Sized> {
286    strong: Cell<usize>,
287    weak: Cell<usize>,
288    value: T,
289}
290
291/// Calculate layout for `RcInner<T>` using the inner value's layout
292fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
293    // Calculate layout using the given value layout.
294    // Previously, layout was calculated on the expression
295    // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
296    // reference (see #54908).
297    Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
298}
299
300/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
301/// Counted'.
302///
303/// See the [module-level documentation](./index.html) for more details.
304///
305/// The inherent methods of `Rc` are all associated functions, which means
306/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
307/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
308///
309/// [get_mut]: Rc::get_mut
310#[doc(search_unbox)]
311#[rustc_diagnostic_item = "Rc"]
312#[stable(feature = "rust1", since = "1.0.0")]
313#[rustc_insignificant_dtor]
314pub struct Rc<
315    T: ?Sized,
316    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
317> {
318    ptr: NonNull<RcInner<T>>,
319    phantom: PhantomData<RcInner<T>>,
320    alloc: A,
321}
322
323#[stable(feature = "rust1", since = "1.0.0")]
324impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
325
326// Note that this negative impl isn't strictly necessary for correctness,
327// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
328// However, given how important `Rc`'s `!Sync`-ness is,
329// having an explicit negative impl is nice for documentation purposes
330// and results in nicer error messages.
331#[stable(feature = "rust1", since = "1.0.0")]
332impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
333
334#[stable(feature = "catch_unwind", since = "1.9.0")]
335impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
336#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
337impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
338
339#[unstable(feature = "coerce_unsized", issue = "18598")]
340impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
341
342#[unstable(feature = "dispatch_from_dyn", issue = "none")]
343impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
344
345// SAFETY: `Rc::clone` doesn't access any `Cell`s which could contain the `Rc` being cloned.
346#[unstable(feature = "cell_get_cloned", issue = "145329")]
347unsafe impl<T: ?Sized> CloneFromCell for Rc<T> {}
348
349impl<T: ?Sized> Rc<T> {
350    #[inline]
351    unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
352        unsafe { Self::from_inner_in(ptr, Global) }
353    }
354
355    #[inline]
356    unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
357        unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
358    }
359}
360
361impl<T: ?Sized, A: Allocator> Rc<T, A> {
362    #[inline(always)]
363    fn inner(&self) -> &RcInner<T> {
364        // This unsafety is ok because while this Rc is alive we're guaranteed
365        // that the inner pointer is valid.
366        unsafe { self.ptr.as_ref() }
367    }
368
369    #[inline]
370    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
371        let this = mem::ManuallyDrop::new(this);
372        (this.ptr, unsafe { ptr::read(&this.alloc) })
373    }
374
375    #[inline]
376    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
377        Self { ptr, phantom: PhantomData, alloc }
378    }
379
380    #[inline]
381    unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
382        unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
383    }
384
385    // Non-inlined part of `drop`.
386    #[inline(never)]
387    unsafe fn drop_slow(&mut self) {
388        // Reconstruct the "strong weak" pointer and drop it when this
389        // variable goes out of scope. This ensures that the memory is
390        // deallocated even if the destructor of `T` panics.
391        let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
392
393        // Destroy the contained object.
394        // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
395        unsafe {
396            ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
397        }
398    }
399}
400
401impl<T> Rc<T> {
402    /// Constructs a new `Rc<T>`.
403    ///
404    /// # Examples
405    ///
406    /// ```
407    /// use std::rc::Rc;
408    ///
409    /// let five = Rc::new(5);
410    /// ```
411    #[cfg(not(no_global_oom_handling))]
412    #[stable(feature = "rust1", since = "1.0.0")]
413    pub fn new(value: T) -> Rc<T> {
414        // There is an implicit weak pointer owned by all the strong
415        // pointers, which ensures that the weak destructor never frees
416        // the allocation while the strong destructor is running, even
417        // if the weak pointer is stored inside the strong one.
418        unsafe {
419            Self::from_inner(
420                Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
421                    .into(),
422            )
423        }
424    }
425
426    /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
427    /// to allow you to construct a `T` which holds a weak pointer to itself.
428    ///
429    /// Generally, a structure circularly referencing itself, either directly or
430    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
431    /// Using this function, you get access to the weak pointer during the
432    /// initialization of `T`, before the `Rc<T>` is created, such that you can
433    /// clone and store it inside the `T`.
434    ///
435    /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
436    /// then calls your closure, giving it a `Weak<T>` to this allocation,
437    /// and only afterwards completes the construction of the `Rc<T>` by placing
438    /// the `T` returned from your closure into the allocation.
439    ///
440    /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
441    /// returns, calling [`upgrade`] on the weak reference inside your closure will
442    /// fail and result in a `None` value.
443    ///
444    /// # Panics
445    ///
446    /// If `data_fn` panics, the panic is propagated to the caller, and the
447    /// temporary [`Weak<T>`] is dropped normally.
448    ///
449    /// # Examples
450    ///
451    /// ```
452    /// # #![allow(dead_code)]
453    /// use std::rc::{Rc, Weak};
454    ///
455    /// struct Gadget {
456    ///     me: Weak<Gadget>,
457    /// }
458    ///
459    /// impl Gadget {
460    ///     /// Constructs a reference counted Gadget.
461    ///     fn new() -> Rc<Self> {
462    ///         // `me` is a `Weak<Gadget>` pointing at the new allocation of the
463    ///         // `Rc` we're constructing.
464    ///         Rc::new_cyclic(|me| {
465    ///             // Create the actual struct here.
466    ///             Gadget { me: me.clone() }
467    ///         })
468    ///     }
469    ///
470    ///     /// Returns a reference counted pointer to Self.
471    ///     fn me(&self) -> Rc<Self> {
472    ///         self.me.upgrade().unwrap()
473    ///     }
474    /// }
475    /// ```
476    /// [`upgrade`]: Weak::upgrade
477    #[cfg(not(no_global_oom_handling))]
478    #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
479    pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
480    where
481        F: FnOnce(&Weak<T>) -> T,
482    {
483        Self::new_cyclic_in(data_fn, Global)
484    }
485
486    /// Constructs a new `Rc` with uninitialized contents.
487    ///
488    /// # Examples
489    ///
490    /// ```
491    /// use std::rc::Rc;
492    ///
493    /// let mut five = Rc::<u32>::new_uninit();
494    ///
495    /// // Deferred initialization:
496    /// Rc::get_mut(&mut five).unwrap().write(5);
497    ///
498    /// let five = unsafe { five.assume_init() };
499    ///
500    /// assert_eq!(*five, 5)
501    /// ```
502    #[cfg(not(no_global_oom_handling))]
503    #[stable(feature = "new_uninit", since = "1.82.0")]
504    #[must_use]
505    pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
506        unsafe {
507            Rc::from_ptr(Rc::allocate_for_layout(
508                Layout::new::<T>(),
509                |layout| Global.allocate(layout),
510                <*mut u8>::cast,
511            ))
512        }
513    }
514
515    /// Constructs a new `Rc` with uninitialized contents, with the memory
516    /// being filled with `0` bytes.
517    ///
518    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
519    /// incorrect usage of this method.
520    ///
521    /// # Examples
522    ///
523    /// ```
524    /// use std::rc::Rc;
525    ///
526    /// let zero = Rc::<u32>::new_zeroed();
527    /// let zero = unsafe { zero.assume_init() };
528    ///
529    /// assert_eq!(*zero, 0)
530    /// ```
531    ///
532    /// [zeroed]: mem::MaybeUninit::zeroed
533    #[cfg(not(no_global_oom_handling))]
534    #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
535    #[must_use]
536    pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
537        unsafe {
538            Rc::from_ptr(Rc::allocate_for_layout(
539                Layout::new::<T>(),
540                |layout| Global.allocate_zeroed(layout),
541                <*mut u8>::cast,
542            ))
543        }
544    }
545
546    /// Constructs a new `Rc<T>`, returning an error if the allocation fails
547    ///
548    /// # Examples
549    ///
550    /// ```
551    /// #![feature(allocator_api)]
552    /// use std::rc::Rc;
553    ///
554    /// let five = Rc::try_new(5);
555    /// # Ok::<(), std::alloc::AllocError>(())
556    /// ```
557    #[unstable(feature = "allocator_api", issue = "32838")]
558    pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
559        // There is an implicit weak pointer owned by all the strong
560        // pointers, which ensures that the weak destructor never frees
561        // the allocation while the strong destructor is running, even
562        // if the weak pointer is stored inside the strong one.
563        unsafe {
564            Ok(Self::from_inner(
565                Box::leak(Box::try_new(RcInner {
566                    strong: Cell::new(1),
567                    weak: Cell::new(1),
568                    value,
569                })?)
570                .into(),
571            ))
572        }
573    }
574
575    /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
576    ///
577    /// # Examples
578    ///
579    /// ```
580    /// #![feature(allocator_api)]
581    ///
582    /// use std::rc::Rc;
583    ///
584    /// let mut five = Rc::<u32>::try_new_uninit()?;
585    ///
586    /// // Deferred initialization:
587    /// Rc::get_mut(&mut five).unwrap().write(5);
588    ///
589    /// let five = unsafe { five.assume_init() };
590    ///
591    /// assert_eq!(*five, 5);
592    /// # Ok::<(), std::alloc::AllocError>(())
593    /// ```
594    #[unstable(feature = "allocator_api", issue = "32838")]
595    pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
596        unsafe {
597            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
598                Layout::new::<T>(),
599                |layout| Global.allocate(layout),
600                <*mut u8>::cast,
601            )?))
602        }
603    }
604
605    /// Constructs a new `Rc` with uninitialized contents, with the memory
606    /// being filled with `0` bytes, returning an error if the allocation fails
607    ///
608    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
609    /// incorrect usage of this method.
610    ///
611    /// # Examples
612    ///
613    /// ```
614    /// #![feature(allocator_api)]
615    ///
616    /// use std::rc::Rc;
617    ///
618    /// let zero = Rc::<u32>::try_new_zeroed()?;
619    /// let zero = unsafe { zero.assume_init() };
620    ///
621    /// assert_eq!(*zero, 0);
622    /// # Ok::<(), std::alloc::AllocError>(())
623    /// ```
624    ///
625    /// [zeroed]: mem::MaybeUninit::zeroed
626    #[unstable(feature = "allocator_api", issue = "32838")]
627    pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
628        unsafe {
629            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
630                Layout::new::<T>(),
631                |layout| Global.allocate_zeroed(layout),
632                <*mut u8>::cast,
633            )?))
634        }
635    }
636    /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
637    /// `value` will be pinned in memory and unable to be moved.
638    #[cfg(not(no_global_oom_handling))]
639    #[stable(feature = "pin", since = "1.33.0")]
640    #[must_use]
641    pub fn pin(value: T) -> Pin<Rc<T>> {
642        unsafe { Pin::new_unchecked(Rc::new(value)) }
643    }
644
645    /// Maps the value in an `Rc`, reusing the allocation if possible.
646    ///
647    /// `f` is called on a reference to the value in the `Rc`, and the result is returned, also in
648    /// an `Rc`.
649    ///
650    /// Note: this is an associated function, which means that you have
651    /// to call it as `Rc::map(r, f)` instead of `r.map(f)`. This
652    /// is so that there is no conflict with a method on the inner type.
653    ///
654    /// # Examples
655    ///
656    /// ```
657    /// #![feature(smart_pointer_try_map)]
658    ///
659    /// use std::rc::Rc;
660    ///
661    /// let r = Rc::new(7);
662    /// let new = Rc::map(r, |i| i + 7);
663    /// assert_eq!(*new, 14);
664    /// ```
665    #[cfg(not(no_global_oom_handling))]
666    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
667    pub fn map<U>(this: Self, f: impl FnOnce(&T) -> U) -> Rc<U> {
668        if size_of::<T>() == size_of::<U>()
669            && align_of::<T>() == align_of::<U>()
670            && Rc::is_unique(&this)
671        {
672            unsafe {
673                let ptr = Rc::into_raw(this);
674                let value = ptr.read();
675                let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
676
677                Rc::get_mut_unchecked(&mut allocation).write(f(&value));
678                allocation.assume_init()
679            }
680        } else {
681            Rc::new(f(&*this))
682        }
683    }
684
685    /// Attempts to map the value in an `Rc`, reusing the allocation if possible.
686    ///
687    /// `f` is called on a reference to the value in the `Rc`, and if the operation succeeds, the
688    /// result is returned, also in an `Rc`.
689    ///
690    /// Note: this is an associated function, which means that you have
691    /// to call it as `Rc::try_map(r, f)` instead of `r.try_map(f)`. This
692    /// is so that there is no conflict with a method on the inner type.
693    ///
694    /// # Examples
695    ///
696    /// ```
697    /// #![feature(smart_pointer_try_map)]
698    ///
699    /// use std::rc::Rc;
700    ///
701    /// let b = Rc::new(7);
702    /// let new = Rc::try_map(b, |&i| u32::try_from(i)).unwrap();
703    /// assert_eq!(*new, 7);
704    /// ```
705    #[cfg(not(no_global_oom_handling))]
706    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
707    pub fn try_map<R>(
708        this: Self,
709        f: impl FnOnce(&T) -> R,
710    ) -> <R::Residual as Residual<Rc<R::Output>>>::TryType
711    where
712        R: Try,
713        R::Residual: Residual<Rc<R::Output>>,
714    {
715        if size_of::<T>() == size_of::<R::Output>()
716            && align_of::<T>() == align_of::<R::Output>()
717            && Rc::is_unique(&this)
718        {
719            unsafe {
720                let ptr = Rc::into_raw(this);
721                let value = ptr.read();
722                let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
723
724                Rc::get_mut_unchecked(&mut allocation).write(f(&value)?);
725                try { allocation.assume_init() }
726            }
727        } else {
728            try { Rc::new(f(&*this)?) }
729        }
730    }
731}
732
733impl<T, A: Allocator> Rc<T, A> {
734    /// Constructs a new `Rc` in the provided allocator.
735    ///
736    /// # Examples
737    ///
738    /// ```
739    /// #![feature(allocator_api)]
740    /// use std::rc::Rc;
741    /// use std::alloc::System;
742    ///
743    /// let five = Rc::new_in(5, System);
744    /// ```
745    #[cfg(not(no_global_oom_handling))]
746    #[unstable(feature = "allocator_api", issue = "32838")]
747    #[inline]
748    pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
749        // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
750        // That would make code size bigger.
751        match Self::try_new_in(value, alloc) {
752            Ok(m) => m,
753            Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
754        }
755    }
756
757    /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
758    ///
759    /// # Examples
760    ///
761    /// ```
762    /// #![feature(get_mut_unchecked)]
763    /// #![feature(allocator_api)]
764    ///
765    /// use std::rc::Rc;
766    /// use std::alloc::System;
767    ///
768    /// let mut five = Rc::<u32, _>::new_uninit_in(System);
769    ///
770    /// let five = unsafe {
771    ///     // Deferred initialization:
772    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
773    ///
774    ///     five.assume_init()
775    /// };
776    ///
777    /// assert_eq!(*five, 5)
778    /// ```
779    #[cfg(not(no_global_oom_handling))]
780    #[unstable(feature = "allocator_api", issue = "32838")]
781    #[inline]
782    pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
783        unsafe {
784            Rc::from_ptr_in(
785                Rc::allocate_for_layout(
786                    Layout::new::<T>(),
787                    |layout| alloc.allocate(layout),
788                    <*mut u8>::cast,
789                ),
790                alloc,
791            )
792        }
793    }
794
795    /// Constructs a new `Rc` with uninitialized contents, with the memory
796    /// being filled with `0` bytes, in the provided allocator.
797    ///
798    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
799    /// incorrect usage of this method.
800    ///
801    /// # Examples
802    ///
803    /// ```
804    /// #![feature(allocator_api)]
805    ///
806    /// use std::rc::Rc;
807    /// use std::alloc::System;
808    ///
809    /// let zero = Rc::<u32, _>::new_zeroed_in(System);
810    /// let zero = unsafe { zero.assume_init() };
811    ///
812    /// assert_eq!(*zero, 0)
813    /// ```
814    ///
815    /// [zeroed]: mem::MaybeUninit::zeroed
816    #[cfg(not(no_global_oom_handling))]
817    #[unstable(feature = "allocator_api", issue = "32838")]
818    #[inline]
819    pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
820        unsafe {
821            Rc::from_ptr_in(
822                Rc::allocate_for_layout(
823                    Layout::new::<T>(),
824                    |layout| alloc.allocate_zeroed(layout),
825                    <*mut u8>::cast,
826                ),
827                alloc,
828            )
829        }
830    }
831
832    /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
833    /// to allow you to construct a `T` which holds a weak pointer to itself.
834    ///
835    /// Generally, a structure circularly referencing itself, either directly or
836    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
837    /// Using this function, you get access to the weak pointer during the
838    /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
839    /// clone and store it inside the `T`.
840    ///
841    /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
842    /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
843    /// and only afterwards completes the construction of the `Rc<T, A>` by placing
844    /// the `T` returned from your closure into the allocation.
845    ///
846    /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
847    /// returns, calling [`upgrade`] on the weak reference inside your closure will
848    /// fail and result in a `None` value.
849    ///
850    /// # Panics
851    ///
852    /// If `data_fn` panics, the panic is propagated to the caller, and the
853    /// temporary [`Weak<T, A>`] is dropped normally.
854    ///
855    /// # Examples
856    ///
857    /// See [`new_cyclic`].
858    ///
859    /// [`new_cyclic`]: Rc::new_cyclic
860    /// [`upgrade`]: Weak::upgrade
861    #[cfg(not(no_global_oom_handling))]
862    #[unstable(feature = "allocator_api", issue = "32838")]
863    pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
864    where
865        F: FnOnce(&Weak<T, A>) -> T,
866    {
867        // Construct the inner in the "uninitialized" state with a single
868        // weak reference.
869        let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
870            RcInner {
871                strong: Cell::new(0),
872                weak: Cell::new(1),
873                value: mem::MaybeUninit::<T>::uninit(),
874            },
875            alloc,
876        ));
877        let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
878        let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
879
880        let weak = Weak { ptr: init_ptr, alloc };
881
882        // It's important we don't give up ownership of the weak pointer, or
883        // else the memory might be freed by the time `data_fn` returns. If
884        // we really wanted to pass ownership, we could create an additional
885        // weak pointer for ourselves, but this would result in additional
886        // updates to the weak reference count which might not be necessary
887        // otherwise.
888        let data = data_fn(&weak);
889
890        let strong = unsafe {
891            let inner = init_ptr.as_ptr();
892            ptr::write(&raw mut (*inner).value, data);
893
894            let prev_value = (*inner).strong.get();
895            debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
896            (*inner).strong.set(1);
897
898            // Strong references should collectively own a shared weak reference,
899            // so don't run the destructor for our old weak reference.
900            // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
901            // and forgetting the weak reference.
902            let alloc = weak.into_raw_with_allocator().1;
903
904            Rc::from_inner_in(init_ptr, alloc)
905        };
906
907        strong
908    }
909
910    /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
911    /// fails
912    ///
913    /// # Examples
914    ///
915    /// ```
916    /// #![feature(allocator_api)]
917    /// use std::rc::Rc;
918    /// use std::alloc::System;
919    ///
920    /// let five = Rc::try_new_in(5, System);
921    /// # Ok::<(), std::alloc::AllocError>(())
922    /// ```
923    #[unstable(feature = "allocator_api", issue = "32838")]
924    #[inline]
925    pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
926        // There is an implicit weak pointer owned by all the strong
927        // pointers, which ensures that the weak destructor never frees
928        // the allocation while the strong destructor is running, even
929        // if the weak pointer is stored inside the strong one.
930        let (ptr, alloc) = Box::into_unique(Box::try_new_in(
931            RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
932            alloc,
933        )?);
934        Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
935    }
936
937    /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
938    /// error if the allocation fails
939    ///
940    /// # Examples
941    ///
942    /// ```
943    /// #![feature(allocator_api)]
944    /// #![feature(get_mut_unchecked)]
945    ///
946    /// use std::rc::Rc;
947    /// use std::alloc::System;
948    ///
949    /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
950    ///
951    /// let five = unsafe {
952    ///     // Deferred initialization:
953    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
954    ///
955    ///     five.assume_init()
956    /// };
957    ///
958    /// assert_eq!(*five, 5);
959    /// # Ok::<(), std::alloc::AllocError>(())
960    /// ```
961    #[unstable(feature = "allocator_api", issue = "32838")]
962    #[inline]
963    pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
964        unsafe {
965            Ok(Rc::from_ptr_in(
966                Rc::try_allocate_for_layout(
967                    Layout::new::<T>(),
968                    |layout| alloc.allocate(layout),
969                    <*mut u8>::cast,
970                )?,
971                alloc,
972            ))
973        }
974    }
975
976    /// Constructs a new `Rc` with uninitialized contents, with the memory
977    /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
978    /// fails
979    ///
980    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
981    /// incorrect usage of this method.
982    ///
983    /// # Examples
984    ///
985    /// ```
986    /// #![feature(allocator_api)]
987    ///
988    /// use std::rc::Rc;
989    /// use std::alloc::System;
990    ///
991    /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
992    /// let zero = unsafe { zero.assume_init() };
993    ///
994    /// assert_eq!(*zero, 0);
995    /// # Ok::<(), std::alloc::AllocError>(())
996    /// ```
997    ///
998    /// [zeroed]: mem::MaybeUninit::zeroed
999    #[unstable(feature = "allocator_api", issue = "32838")]
1000    #[inline]
1001    pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
1002        unsafe {
1003            Ok(Rc::from_ptr_in(
1004                Rc::try_allocate_for_layout(
1005                    Layout::new::<T>(),
1006                    |layout| alloc.allocate_zeroed(layout),
1007                    <*mut u8>::cast,
1008                )?,
1009                alloc,
1010            ))
1011        }
1012    }
1013
1014    /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
1015    /// `value` will be pinned in memory and unable to be moved.
1016    #[cfg(not(no_global_oom_handling))]
1017    #[unstable(feature = "allocator_api", issue = "32838")]
1018    #[inline]
1019    pub fn pin_in(value: T, alloc: A) -> Pin<Self>
1020    where
1021        A: 'static,
1022    {
1023        unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
1024    }
1025
1026    /// Returns the inner value, if the `Rc` has exactly one strong reference.
1027    ///
1028    /// Otherwise, an [`Err`] is returned with the same `Rc` that was
1029    /// passed in.
1030    ///
1031    /// This will succeed even if there are outstanding weak references.
1032    ///
1033    /// # Examples
1034    ///
1035    /// ```
1036    /// use std::rc::Rc;
1037    ///
1038    /// let x = Rc::new(3);
1039    /// assert_eq!(Rc::try_unwrap(x), Ok(3));
1040    ///
1041    /// let x = Rc::new(4);
1042    /// let _y = Rc::clone(&x);
1043    /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
1044    /// ```
1045    #[inline]
1046    #[stable(feature = "rc_unique", since = "1.4.0")]
1047    pub fn try_unwrap(this: Self) -> Result<T, Self> {
1048        if Rc::strong_count(&this) == 1 {
1049            let this = ManuallyDrop::new(this);
1050
1051            let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
1052            let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
1053
1054            // Indicate to Weaks that they can't be promoted by decrementing
1055            // the strong count, and then remove the implicit "strong weak"
1056            // pointer while also handling drop logic by just crafting a
1057            // fake Weak.
1058            this.inner().dec_strong();
1059            let _weak = Weak { ptr: this.ptr, alloc };
1060            Ok(val)
1061        } else {
1062            Err(this)
1063        }
1064    }
1065
1066    /// Returns the inner value, if the `Rc` has exactly one strong reference.
1067    ///
1068    /// Otherwise, [`None`] is returned and the `Rc` is dropped.
1069    ///
1070    /// This will succeed even if there are outstanding weak references.
1071    ///
1072    /// If `Rc::into_inner` is called on every clone of this `Rc`,
1073    /// it is guaranteed that exactly one of the calls returns the inner value.
1074    /// This means in particular that the inner value is not dropped.
1075    ///
1076    /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
1077    /// And while they are meant for different use-cases, `Rc::into_inner(this)`
1078    /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
1079    /// (Note that the same kind of equivalence does **not** hold true for
1080    /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
1081    ///
1082    /// # Examples
1083    ///
1084    /// ```
1085    /// use std::rc::Rc;
1086    ///
1087    /// let x = Rc::new(3);
1088    /// assert_eq!(Rc::into_inner(x), Some(3));
1089    ///
1090    /// let x = Rc::new(4);
1091    /// let y = Rc::clone(&x);
1092    ///
1093    /// assert_eq!(Rc::into_inner(y), None);
1094    /// assert_eq!(Rc::into_inner(x), Some(4));
1095    /// ```
1096    #[inline]
1097    #[stable(feature = "rc_into_inner", since = "1.70.0")]
1098    pub fn into_inner(this: Self) -> Option<T> {
1099        Rc::try_unwrap(this).ok()
1100    }
1101}
1102
1103impl<T> Rc<[T]> {
1104    /// Constructs a new reference-counted slice with uninitialized contents.
1105    ///
1106    /// # Examples
1107    ///
1108    /// ```
1109    /// use std::rc::Rc;
1110    ///
1111    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1112    ///
1113    /// // Deferred initialization:
1114    /// let data = Rc::get_mut(&mut values).unwrap();
1115    /// data[0].write(1);
1116    /// data[1].write(2);
1117    /// data[2].write(3);
1118    ///
1119    /// let values = unsafe { values.assume_init() };
1120    ///
1121    /// assert_eq!(*values, [1, 2, 3])
1122    /// ```
1123    #[cfg(not(no_global_oom_handling))]
1124    #[stable(feature = "new_uninit", since = "1.82.0")]
1125    #[must_use]
1126    pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1127        unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1128    }
1129
1130    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1131    /// filled with `0` bytes.
1132    ///
1133    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1134    /// incorrect usage of this method.
1135    ///
1136    /// # Examples
1137    ///
1138    /// ```
1139    /// use std::rc::Rc;
1140    ///
1141    /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1142    /// let values = unsafe { values.assume_init() };
1143    ///
1144    /// assert_eq!(*values, [0, 0, 0])
1145    /// ```
1146    ///
1147    /// [zeroed]: mem::MaybeUninit::zeroed
1148    #[cfg(not(no_global_oom_handling))]
1149    #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
1150    #[must_use]
1151    pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1152        unsafe {
1153            Rc::from_ptr(Rc::allocate_for_layout(
1154                Layout::array::<T>(len).unwrap(),
1155                |layout| Global.allocate_zeroed(layout),
1156                |mem| {
1157                    ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1158                        as *mut RcInner<[mem::MaybeUninit<T>]>
1159                },
1160            ))
1161        }
1162    }
1163
1164    /// Converts the reference-counted slice into a reference-counted array.
1165    ///
1166    /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1167    ///
1168    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1169    #[unstable(feature = "alloc_slice_into_array", issue = "148082")]
1170    #[inline]
1171    #[must_use]
1172    pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
1173        if self.len() == N {
1174            let ptr = Self::into_raw(self) as *const [T; N];
1175
1176            // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1177            let me = unsafe { Rc::from_raw(ptr) };
1178            Some(me)
1179        } else {
1180            None
1181        }
1182    }
1183}
1184
1185impl<T, A: Allocator> Rc<[T], A> {
1186    /// Constructs a new reference-counted slice with uninitialized contents.
1187    ///
1188    /// # Examples
1189    ///
1190    /// ```
1191    /// #![feature(get_mut_unchecked)]
1192    /// #![feature(allocator_api)]
1193    ///
1194    /// use std::rc::Rc;
1195    /// use std::alloc::System;
1196    ///
1197    /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1198    ///
1199    /// let values = unsafe {
1200    ///     // Deferred initialization:
1201    ///     Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1202    ///     Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1203    ///     Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1204    ///
1205    ///     values.assume_init()
1206    /// };
1207    ///
1208    /// assert_eq!(*values, [1, 2, 3])
1209    /// ```
1210    #[cfg(not(no_global_oom_handling))]
1211    #[unstable(feature = "allocator_api", issue = "32838")]
1212    #[inline]
1213    pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1214        unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1215    }
1216
1217    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1218    /// filled with `0` bytes.
1219    ///
1220    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1221    /// incorrect usage of this method.
1222    ///
1223    /// # Examples
1224    ///
1225    /// ```
1226    /// #![feature(allocator_api)]
1227    ///
1228    /// use std::rc::Rc;
1229    /// use std::alloc::System;
1230    ///
1231    /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1232    /// let values = unsafe { values.assume_init() };
1233    ///
1234    /// assert_eq!(*values, [0, 0, 0])
1235    /// ```
1236    ///
1237    /// [zeroed]: mem::MaybeUninit::zeroed
1238    #[cfg(not(no_global_oom_handling))]
1239    #[unstable(feature = "allocator_api", issue = "32838")]
1240    #[inline]
1241    pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1242        unsafe {
1243            Rc::from_ptr_in(
1244                Rc::allocate_for_layout(
1245                    Layout::array::<T>(len).unwrap(),
1246                    |layout| alloc.allocate_zeroed(layout),
1247                    |mem| {
1248                        ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1249                            as *mut RcInner<[mem::MaybeUninit<T>]>
1250                    },
1251                ),
1252                alloc,
1253            )
1254        }
1255    }
1256}
1257
1258impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1259    /// Converts to `Rc<T>`.
1260    ///
1261    /// # Safety
1262    ///
1263    /// As with [`MaybeUninit::assume_init`],
1264    /// it is up to the caller to guarantee that the inner value
1265    /// really is in an initialized state.
1266    /// Calling this when the content is not yet fully initialized
1267    /// causes immediate undefined behavior.
1268    ///
1269    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1270    ///
1271    /// # Examples
1272    ///
1273    /// ```
1274    /// use std::rc::Rc;
1275    ///
1276    /// let mut five = Rc::<u32>::new_uninit();
1277    ///
1278    /// // Deferred initialization:
1279    /// Rc::get_mut(&mut five).unwrap().write(5);
1280    ///
1281    /// let five = unsafe { five.assume_init() };
1282    ///
1283    /// assert_eq!(*five, 5)
1284    /// ```
1285    #[stable(feature = "new_uninit", since = "1.82.0")]
1286    #[inline]
1287    pub unsafe fn assume_init(self) -> Rc<T, A> {
1288        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1289        unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1290    }
1291}
1292
1293impl<T: ?Sized + CloneToUninit> Rc<T> {
1294    /// Constructs a new `Rc<T>` with a clone of `value`.
1295    ///
1296    /// # Examples
1297    ///
1298    /// ```
1299    /// #![feature(clone_from_ref)]
1300    /// use std::rc::Rc;
1301    ///
1302    /// let hello: Rc<str> = Rc::clone_from_ref("hello");
1303    /// ```
1304    #[cfg(not(no_global_oom_handling))]
1305    #[unstable(feature = "clone_from_ref", issue = "149075")]
1306    pub fn clone_from_ref(value: &T) -> Rc<T> {
1307        Rc::clone_from_ref_in(value, Global)
1308    }
1309
1310    /// Constructs a new `Rc<T>` with a clone of `value`, returning an error if allocation fails
1311    ///
1312    /// # Examples
1313    ///
1314    /// ```
1315    /// #![feature(clone_from_ref)]
1316    /// #![feature(allocator_api)]
1317    /// use std::rc::Rc;
1318    ///
1319    /// let hello: Rc<str> = Rc::try_clone_from_ref("hello")?;
1320    /// # Ok::<(), std::alloc::AllocError>(())
1321    /// ```
1322    #[unstable(feature = "clone_from_ref", issue = "149075")]
1323    //#[unstable(feature = "allocator_api", issue = "32838")]
1324    pub fn try_clone_from_ref(value: &T) -> Result<Rc<T>, AllocError> {
1325        Rc::try_clone_from_ref_in(value, Global)
1326    }
1327}
1328
1329impl<T: ?Sized + CloneToUninit, A: Allocator> Rc<T, A> {
1330    /// Constructs a new `Rc<T>` with a clone of `value` in the provided allocator.
1331    ///
1332    /// # Examples
1333    ///
1334    /// ```
1335    /// #![feature(clone_from_ref)]
1336    /// #![feature(allocator_api)]
1337    /// use std::rc::Rc;
1338    /// use std::alloc::System;
1339    ///
1340    /// let hello: Rc<str, System> = Rc::clone_from_ref_in("hello", System);
1341    /// ```
1342    #[cfg(not(no_global_oom_handling))]
1343    #[unstable(feature = "clone_from_ref", issue = "149075")]
1344    //#[unstable(feature = "allocator_api", issue = "32838")]
1345    pub fn clone_from_ref_in(value: &T, alloc: A) -> Rc<T, A> {
1346        // `in_progress` drops the allocation if we panic before finishing initializing it.
1347        let mut in_progress: UniqueRcUninit<T, A> = UniqueRcUninit::new(value, alloc);
1348
1349        // Initialize with clone of value.
1350        let initialized_clone = unsafe {
1351            // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1352            value.clone_to_uninit(in_progress.data_ptr().cast());
1353            // Cast type of pointer, now that it is initialized.
1354            in_progress.into_rc()
1355        };
1356
1357        initialized_clone
1358    }
1359
1360    /// Constructs a new `Rc<T>` with a clone of `value` in the provided allocator, returning an error if allocation fails
1361    ///
1362    /// # Examples
1363    ///
1364    /// ```
1365    /// #![feature(clone_from_ref)]
1366    /// #![feature(allocator_api)]
1367    /// use std::rc::Rc;
1368    /// use std::alloc::System;
1369    ///
1370    /// let hello: Rc<str, System> = Rc::try_clone_from_ref_in("hello", System)?;
1371    /// # Ok::<(), std::alloc::AllocError>(())
1372    /// ```
1373    #[unstable(feature = "clone_from_ref", issue = "149075")]
1374    //#[unstable(feature = "allocator_api", issue = "32838")]
1375    pub fn try_clone_from_ref_in(value: &T, alloc: A) -> Result<Rc<T, A>, AllocError> {
1376        // `in_progress` drops the allocation if we panic before finishing initializing it.
1377        let mut in_progress: UniqueRcUninit<T, A> = UniqueRcUninit::try_new(value, alloc)?;
1378
1379        // Initialize with clone of value.
1380        let initialized_clone = unsafe {
1381            // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1382            value.clone_to_uninit(in_progress.data_ptr().cast());
1383            // Cast type of pointer, now that it is initialized.
1384            in_progress.into_rc()
1385        };
1386
1387        Ok(initialized_clone)
1388    }
1389}
1390
1391impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1392    /// Converts to `Rc<[T]>`.
1393    ///
1394    /// # Safety
1395    ///
1396    /// As with [`MaybeUninit::assume_init`],
1397    /// it is up to the caller to guarantee that the inner value
1398    /// really is in an initialized state.
1399    /// Calling this when the content is not yet fully initialized
1400    /// causes immediate undefined behavior.
1401    ///
1402    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1403    ///
1404    /// # Examples
1405    ///
1406    /// ```
1407    /// use std::rc::Rc;
1408    ///
1409    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1410    ///
1411    /// // Deferred initialization:
1412    /// let data = Rc::get_mut(&mut values).unwrap();
1413    /// data[0].write(1);
1414    /// data[1].write(2);
1415    /// data[2].write(3);
1416    ///
1417    /// let values = unsafe { values.assume_init() };
1418    ///
1419    /// assert_eq!(*values, [1, 2, 3])
1420    /// ```
1421    #[stable(feature = "new_uninit", since = "1.82.0")]
1422    #[inline]
1423    pub unsafe fn assume_init(self) -> Rc<[T], A> {
1424        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1425        unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1426    }
1427}
1428
1429impl<T: ?Sized> Rc<T> {
1430    /// Constructs an `Rc<T>` from a raw pointer.
1431    ///
1432    /// The raw pointer must have been previously returned by a call to
1433    /// [`Rc<U>::into_raw`][into_raw] or [`Rc<U>::into_raw_with_allocator`][into_raw_with_allocator].
1434    ///
1435    /// # Safety
1436    ///
1437    /// * Creating a `Rc<T>` from a pointer other than one returned from
1438    ///   [`Rc<U>::into_raw`][into_raw] or [`Rc<U>::into_raw_with_allocator`][into_raw_with_allocator]
1439    ///   is undefined behavior.
1440    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1441    ///   is trivially true if `U` is `T`.
1442    /// * If `U` is unsized, its data pointer must have the same size and
1443    ///   alignment as `T`. This is trivially true if `Rc<U>` was constructed
1444    ///   through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1445    ///   coercion].
1446    /// * Note that if `U` or `U`'s data pointer is not `T` but has the same size
1447    ///   and alignment, this is basically like transmuting references of
1448    ///   different types. See [`mem::transmute`][transmute] for more information
1449    ///   on what restrictions apply in this case.
1450    /// * The raw pointer must point to a block of memory allocated by the global allocator
1451    /// * The user of `from_raw` has to make sure a specific value of `T` is only
1452    ///   dropped once.
1453    ///
1454    /// This function is unsafe because improper use may lead to memory unsafety,
1455    /// even if the returned `Rc<T>` is never accessed.
1456    ///
1457    /// [into_raw]: Rc::into_raw
1458    /// [into_raw_with_allocator]: Rc::into_raw_with_allocator
1459    /// [transmute]: core::mem::transmute
1460    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1461    ///
1462    /// # Examples
1463    ///
1464    /// ```
1465    /// use std::rc::Rc;
1466    ///
1467    /// let x = Rc::new("hello".to_owned());
1468    /// let x_ptr = Rc::into_raw(x);
1469    ///
1470    /// unsafe {
1471    ///     // Convert back to an `Rc` to prevent leak.
1472    ///     let x = Rc::from_raw(x_ptr);
1473    ///     assert_eq!(&*x, "hello");
1474    ///
1475    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1476    /// }
1477    ///
1478    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1479    /// ```
1480    ///
1481    /// Convert a slice back into its original array:
1482    ///
1483    /// ```
1484    /// use std::rc::Rc;
1485    ///
1486    /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1487    /// let x_ptr: *const [u32] = Rc::into_raw(x);
1488    ///
1489    /// unsafe {
1490    ///     let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1491    ///     assert_eq!(&*x, &[1, 2, 3]);
1492    /// }
1493    /// ```
1494    #[inline]
1495    #[stable(feature = "rc_raw", since = "1.17.0")]
1496    pub unsafe fn from_raw(ptr: *const T) -> Self {
1497        unsafe { Self::from_raw_in(ptr, Global) }
1498    }
1499
1500    /// Consumes the `Rc`, returning the wrapped pointer.
1501    ///
1502    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1503    /// [`Rc::from_raw`].
1504    ///
1505    /// # Examples
1506    ///
1507    /// ```
1508    /// use std::rc::Rc;
1509    ///
1510    /// let x = Rc::new("hello".to_owned());
1511    /// let x_ptr = Rc::into_raw(x);
1512    /// assert_eq!(unsafe { &*x_ptr }, "hello");
1513    /// # // Prevent leaks for Miri.
1514    /// # drop(unsafe { Rc::from_raw(x_ptr) });
1515    /// ```
1516    #[must_use = "losing the pointer will leak memory"]
1517    #[stable(feature = "rc_raw", since = "1.17.0")]
1518    #[rustc_never_returns_null_ptr]
1519    pub fn into_raw(this: Self) -> *const T {
1520        let this = ManuallyDrop::new(this);
1521        Self::as_ptr(&*this)
1522    }
1523
1524    /// Increments the strong reference count on the `Rc<T>` associated with the
1525    /// provided pointer by one.
1526    ///
1527    /// # Safety
1528    ///
1529    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1530    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1531    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1532    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1533    /// allocated by the global allocator.
1534    ///
1535    /// [from_raw_in]: Rc::from_raw_in
1536    ///
1537    /// # Examples
1538    ///
1539    /// ```
1540    /// use std::rc::Rc;
1541    ///
1542    /// let five = Rc::new(5);
1543    ///
1544    /// unsafe {
1545    ///     let ptr = Rc::into_raw(five);
1546    ///     Rc::increment_strong_count(ptr);
1547    ///
1548    ///     let five = Rc::from_raw(ptr);
1549    ///     assert_eq!(2, Rc::strong_count(&five));
1550    /// #   // Prevent leaks for Miri.
1551    /// #   Rc::decrement_strong_count(ptr);
1552    /// }
1553    /// ```
1554    #[inline]
1555    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1556    pub unsafe fn increment_strong_count(ptr: *const T) {
1557        unsafe { Self::increment_strong_count_in(ptr, Global) }
1558    }
1559
1560    /// Decrements the strong reference count on the `Rc<T>` associated with the
1561    /// provided pointer by one.
1562    ///
1563    /// # Safety
1564    ///
1565    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1566    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1567    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1568    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1569    /// allocated by the global allocator. This method can be used to release the final `Rc` and
1570    /// backing storage, but **should not** be called after the final `Rc` has been released.
1571    ///
1572    /// [from_raw_in]: Rc::from_raw_in
1573    ///
1574    /// # Examples
1575    ///
1576    /// ```
1577    /// use std::rc::Rc;
1578    ///
1579    /// let five = Rc::new(5);
1580    ///
1581    /// unsafe {
1582    ///     let ptr = Rc::into_raw(five);
1583    ///     Rc::increment_strong_count(ptr);
1584    ///
1585    ///     let five = Rc::from_raw(ptr);
1586    ///     assert_eq!(2, Rc::strong_count(&five));
1587    ///     Rc::decrement_strong_count(ptr);
1588    ///     assert_eq!(1, Rc::strong_count(&five));
1589    /// }
1590    /// ```
1591    #[inline]
1592    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1593    pub unsafe fn decrement_strong_count(ptr: *const T) {
1594        unsafe { Self::decrement_strong_count_in(ptr, Global) }
1595    }
1596}
1597
1598impl<T: ?Sized, A: Allocator> Rc<T, A> {
1599    /// Returns a reference to the underlying allocator.
1600    ///
1601    /// Note: this is an associated function, which means that you have
1602    /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1603    /// is so that there is no conflict with a method on the inner type.
1604    #[inline]
1605    #[unstable(feature = "allocator_api", issue = "32838")]
1606    pub fn allocator(this: &Self) -> &A {
1607        &this.alloc
1608    }
1609
1610    /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1611    ///
1612    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1613    /// [`Rc::from_raw_in`].
1614    ///
1615    /// # Examples
1616    ///
1617    /// ```
1618    /// #![feature(allocator_api)]
1619    /// use std::rc::Rc;
1620    /// use std::alloc::System;
1621    ///
1622    /// let x = Rc::new_in("hello".to_owned(), System);
1623    /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1624    /// assert_eq!(unsafe { &*ptr }, "hello");
1625    /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1626    /// assert_eq!(&*x, "hello");
1627    /// ```
1628    #[must_use = "losing the pointer will leak memory"]
1629    #[unstable(feature = "allocator_api", issue = "32838")]
1630    pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1631        let this = mem::ManuallyDrop::new(this);
1632        let ptr = Self::as_ptr(&this);
1633        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1634        let alloc = unsafe { ptr::read(&this.alloc) };
1635        (ptr, alloc)
1636    }
1637
1638    /// Provides a raw pointer to the data.
1639    ///
1640    /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1641    /// for as long as there are strong counts in the `Rc`.
1642    ///
1643    /// # Examples
1644    ///
1645    /// ```
1646    /// use std::rc::Rc;
1647    ///
1648    /// let x = Rc::new(0);
1649    /// let y = Rc::clone(&x);
1650    /// let x_ptr = Rc::as_ptr(&x);
1651    /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1652    /// assert_eq!(unsafe { *x_ptr }, 0);
1653    /// ```
1654    #[stable(feature = "weak_into_raw", since = "1.45.0")]
1655    #[rustc_never_returns_null_ptr]
1656    pub fn as_ptr(this: &Self) -> *const T {
1657        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1658
1659        // SAFETY: This cannot go through Deref::deref or Rc::inner because
1660        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1661        // write through the pointer after the Rc is recovered through `from_raw`.
1662        unsafe { &raw mut (*ptr).value }
1663    }
1664
1665    /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1666    ///
1667    /// The raw pointer must have been previously returned by a call to [`Rc<U,
1668    /// A>::into_raw`][into_raw] or [`Rc<U, A>::into_raw_with_allocator`][into_raw_with_allocator].
1669    ///
1670    /// # Safety
1671    ///
1672    /// * Creating a `Rc<T, A>` from a pointer other than one returned from
1673    ///   [`Rc<U, A>::into_raw`][into_raw] or [`Rc<U, A>::into_raw_with_allocator`][into_raw_with_allocator]
1674    ///   is undefined behavior.
1675    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1676    ///   is trivially true if `U` is `T`.
1677    /// * If `U` is unsized, its data pointer must have the same size and
1678    ///   alignment as `T`. This is trivially true if `Rc<U, A>` was constructed
1679    ///   through `Rc<T, A>` and then converted to `Rc<U, A>` through an [unsized
1680    ///   coercion].
1681    /// * Note that if `U` or `U`'s data pointer is not `T` but has the same size
1682    ///   and alignment, this is basically like transmuting references of
1683    ///   different types. See [`mem::transmute`][transmute] for more information
1684    ///   on what restrictions apply in this case.
1685    /// * The raw pointer must point to a block of memory allocated by `alloc`
1686    /// * The user of `from_raw` has to make sure a specific value of `T` is only
1687    ///   dropped once.
1688    ///
1689    /// This function is unsafe because improper use may lead to memory unsafety,
1690    /// even if the returned `Rc<T, A>` is never accessed.
1691    ///
1692    /// [into_raw]: Rc::into_raw
1693    /// [into_raw_with_allocator]: Rc::into_raw_with_allocator
1694    /// [transmute]: core::mem::transmute
1695    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1696    ///
1697    /// # Examples
1698    ///
1699    /// ```
1700    /// #![feature(allocator_api)]
1701    ///
1702    /// use std::rc::Rc;
1703    /// use std::alloc::System;
1704    ///
1705    /// let x = Rc::new_in("hello".to_owned(), System);
1706    /// let (x_ptr, _alloc) = Rc::into_raw_with_allocator(x);
1707    ///
1708    /// unsafe {
1709    ///     // Convert back to an `Rc` to prevent leak.
1710    ///     let x = Rc::from_raw_in(x_ptr, System);
1711    ///     assert_eq!(&*x, "hello");
1712    ///
1713    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1714    /// }
1715    ///
1716    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1717    /// ```
1718    ///
1719    /// Convert a slice back into its original array:
1720    ///
1721    /// ```
1722    /// #![feature(allocator_api)]
1723    ///
1724    /// use std::rc::Rc;
1725    /// use std::alloc::System;
1726    ///
1727    /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1728    /// let x_ptr: *const [u32] = Rc::into_raw_with_allocator(x).0;
1729    ///
1730    /// unsafe {
1731    ///     let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1732    ///     assert_eq!(&*x, &[1, 2, 3]);
1733    /// }
1734    /// ```
1735    #[unstable(feature = "allocator_api", issue = "32838")]
1736    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1737        let offset = unsafe { data_offset(ptr) };
1738
1739        // Reverse the offset to find the original RcInner.
1740        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1741
1742        unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1743    }
1744
1745    /// Creates a new [`Weak`] pointer to this allocation.
1746    ///
1747    /// # Examples
1748    ///
1749    /// ```
1750    /// use std::rc::Rc;
1751    ///
1752    /// let five = Rc::new(5);
1753    ///
1754    /// let weak_five = Rc::downgrade(&five);
1755    /// ```
1756    #[must_use = "this returns a new `Weak` pointer, \
1757                  without modifying the original `Rc`"]
1758    #[stable(feature = "rc_weak", since = "1.4.0")]
1759    pub fn downgrade(this: &Self) -> Weak<T, A>
1760    where
1761        A: Clone,
1762    {
1763        this.inner().inc_weak();
1764        // Make sure we do not create a dangling Weak
1765        debug_assert!(!is_dangling(this.ptr.as_ptr()));
1766        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1767    }
1768
1769    /// Gets the number of [`Weak`] pointers to this allocation.
1770    ///
1771    /// # Examples
1772    ///
1773    /// ```
1774    /// use std::rc::Rc;
1775    ///
1776    /// let five = Rc::new(5);
1777    /// let _weak_five = Rc::downgrade(&five);
1778    ///
1779    /// assert_eq!(1, Rc::weak_count(&five));
1780    /// ```
1781    #[inline]
1782    #[stable(feature = "rc_counts", since = "1.15.0")]
1783    pub fn weak_count(this: &Self) -> usize {
1784        this.inner().weak() - 1
1785    }
1786
1787    /// Gets the number of strong (`Rc`) pointers to this allocation.
1788    ///
1789    /// # Examples
1790    ///
1791    /// ```
1792    /// use std::rc::Rc;
1793    ///
1794    /// let five = Rc::new(5);
1795    /// let _also_five = Rc::clone(&five);
1796    ///
1797    /// assert_eq!(2, Rc::strong_count(&five));
1798    /// ```
1799    #[inline]
1800    #[stable(feature = "rc_counts", since = "1.15.0")]
1801    pub fn strong_count(this: &Self) -> usize {
1802        this.inner().strong()
1803    }
1804
1805    /// Increments the strong reference count on the `Rc<T>` associated with the
1806    /// provided pointer by one.
1807    ///
1808    /// # Safety
1809    ///
1810    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1811    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1812    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1813    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1814    /// allocated by `alloc`.
1815    ///
1816    /// [from_raw_in]: Rc::from_raw_in
1817    ///
1818    /// # Examples
1819    ///
1820    /// ```
1821    /// #![feature(allocator_api)]
1822    ///
1823    /// use std::rc::Rc;
1824    /// use std::alloc::System;
1825    ///
1826    /// let five = Rc::new_in(5, System);
1827    ///
1828    /// unsafe {
1829    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1830    ///     Rc::increment_strong_count_in(ptr, System);
1831    ///
1832    ///     let five = Rc::from_raw_in(ptr, System);
1833    ///     assert_eq!(2, Rc::strong_count(&five));
1834    /// #   // Prevent leaks for Miri.
1835    /// #   Rc::decrement_strong_count_in(ptr, System);
1836    /// }
1837    /// ```
1838    #[inline]
1839    #[unstable(feature = "allocator_api", issue = "32838")]
1840    pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1841    where
1842        A: Clone,
1843    {
1844        // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1845        let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1846        // Now increase refcount, but don't drop new refcount either
1847        let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1848    }
1849
1850    /// Decrements the strong reference count on the `Rc<T>` associated with the
1851    /// provided pointer by one.
1852    ///
1853    /// # Safety
1854    ///
1855    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1856    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1857    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1858    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1859    /// allocated by `alloc`. This method can be used to release the final `Rc` and
1860    /// backing storage, but **should not** be called after the final `Rc` has been released.
1861    ///
1862    /// [from_raw_in]: Rc::from_raw_in
1863    ///
1864    /// # Examples
1865    ///
1866    /// ```
1867    /// #![feature(allocator_api)]
1868    ///
1869    /// use std::rc::Rc;
1870    /// use std::alloc::System;
1871    ///
1872    /// let five = Rc::new_in(5, System);
1873    ///
1874    /// unsafe {
1875    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1876    ///     Rc::increment_strong_count_in(ptr, System);
1877    ///
1878    ///     let five = Rc::from_raw_in(ptr, System);
1879    ///     assert_eq!(2, Rc::strong_count(&five));
1880    ///     Rc::decrement_strong_count_in(ptr, System);
1881    ///     assert_eq!(1, Rc::strong_count(&five));
1882    /// }
1883    /// ```
1884    #[inline]
1885    #[unstable(feature = "allocator_api", issue = "32838")]
1886    pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1887        unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1888    }
1889
1890    /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1891    /// this allocation.
1892    #[inline]
1893    fn is_unique(this: &Self) -> bool {
1894        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1895    }
1896
1897    /// Returns a mutable reference into the given `Rc`, if there are
1898    /// no other `Rc` or [`Weak`] pointers to the same allocation.
1899    ///
1900    /// Returns [`None`] otherwise, because it is not safe to
1901    /// mutate a shared value.
1902    ///
1903    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1904    /// the inner value when there are other `Rc` pointers.
1905    ///
1906    /// [make_mut]: Rc::make_mut
1907    /// [clone]: Clone::clone
1908    ///
1909    /// # Examples
1910    ///
1911    /// ```
1912    /// use std::rc::Rc;
1913    ///
1914    /// let mut x = Rc::new(3);
1915    /// *Rc::get_mut(&mut x).unwrap() = 4;
1916    /// assert_eq!(*x, 4);
1917    ///
1918    /// let _y = Rc::clone(&x);
1919    /// assert!(Rc::get_mut(&mut x).is_none());
1920    /// ```
1921    #[inline]
1922    #[stable(feature = "rc_unique", since = "1.4.0")]
1923    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1924        if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1925    }
1926
1927    /// Returns a mutable reference into the given `Rc`,
1928    /// without any check.
1929    ///
1930    /// See also [`get_mut`], which is safe and does appropriate checks.
1931    ///
1932    /// [`get_mut`]: Rc::get_mut
1933    ///
1934    /// # Safety
1935    ///
1936    /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1937    /// they must not be dereferenced or have active borrows for the duration
1938    /// of the returned borrow, and their inner type must be exactly the same as the
1939    /// inner type of this Rc (including lifetimes). This is trivially the case if no
1940    /// such pointers exist, for example immediately after `Rc::new`.
1941    ///
1942    /// # Examples
1943    ///
1944    /// ```
1945    /// #![feature(get_mut_unchecked)]
1946    ///
1947    /// use std::rc::Rc;
1948    ///
1949    /// let mut x = Rc::new(String::new());
1950    /// unsafe {
1951    ///     Rc::get_mut_unchecked(&mut x).push_str("foo")
1952    /// }
1953    /// assert_eq!(*x, "foo");
1954    /// ```
1955    /// Other `Rc` pointers to the same allocation must be to the same type.
1956    /// ```no_run
1957    /// #![feature(get_mut_unchecked)]
1958    ///
1959    /// use std::rc::Rc;
1960    ///
1961    /// let x: Rc<str> = Rc::from("Hello, world!");
1962    /// let mut y: Rc<[u8]> = x.clone().into();
1963    /// unsafe {
1964    ///     // this is Undefined Behavior, because x's inner type is str, not [u8]
1965    ///     Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1966    /// }
1967    /// println!("{}", &*x); // Invalid UTF-8 in a str
1968    /// ```
1969    /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1970    /// ```no_run
1971    /// #![feature(get_mut_unchecked)]
1972    ///
1973    /// use std::rc::Rc;
1974    ///
1975    /// let x: Rc<&str> = Rc::new("Hello, world!");
1976    /// {
1977    ///     let s = String::from("Oh, no!");
1978    ///     let mut y: Rc<&str> = x.clone();
1979    ///     unsafe {
1980    ///         // this is Undefined Behavior, because x's inner type
1981    ///         // is &'long str, not &'short str
1982    ///         *Rc::get_mut_unchecked(&mut y) = &s;
1983    ///     }
1984    /// }
1985    /// println!("{}", &*x); // Use-after-free
1986    /// ```
1987    #[inline]
1988    #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1989    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1990        // We are careful to *not* create a reference covering the "count" fields, as
1991        // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1992        unsafe { &mut (*this.ptr.as_ptr()).value }
1993    }
1994
1995    #[inline]
1996    #[stable(feature = "ptr_eq", since = "1.17.0")]
1997    /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1998    /// [`ptr::eq`]. This function ignores the metadata of  `dyn Trait` pointers.
1999    ///
2000    /// # Examples
2001    ///
2002    /// ```
2003    /// use std::rc::Rc;
2004    ///
2005    /// let five = Rc::new(5);
2006    /// let same_five = Rc::clone(&five);
2007    /// let other_five = Rc::new(5);
2008    ///
2009    /// assert!(Rc::ptr_eq(&five, &same_five));
2010    /// assert!(!Rc::ptr_eq(&five, &other_five));
2011    /// ```
2012    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
2013        ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
2014    }
2015}
2016
2017#[cfg(not(no_global_oom_handling))]
2018impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
2019    /// Makes a mutable reference into the given `Rc`.
2020    ///
2021    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
2022    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
2023    /// referred to as clone-on-write.
2024    ///
2025    /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
2026    /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
2027    /// be cloned.
2028    ///
2029    /// See also [`get_mut`], which will fail rather than cloning the inner value
2030    /// or disassociating [`Weak`] pointers.
2031    ///
2032    /// [`clone`]: Clone::clone
2033    /// [`get_mut`]: Rc::get_mut
2034    ///
2035    /// # Examples
2036    ///
2037    /// ```
2038    /// use std::rc::Rc;
2039    ///
2040    /// let mut data = Rc::new(5);
2041    ///
2042    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
2043    /// let mut other_data = Rc::clone(&data); // Won't clone inner data
2044    /// *Rc::make_mut(&mut data) += 1;         // Clones inner data
2045    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
2046    /// *Rc::make_mut(&mut other_data) *= 2;   // Won't clone anything
2047    ///
2048    /// // Now `data` and `other_data` point to different allocations.
2049    /// assert_eq!(*data, 8);
2050    /// assert_eq!(*other_data, 12);
2051    /// ```
2052    ///
2053    /// [`Weak`] pointers will be disassociated:
2054    ///
2055    /// ```
2056    /// use std::rc::Rc;
2057    ///
2058    /// let mut data = Rc::new(75);
2059    /// let weak = Rc::downgrade(&data);
2060    ///
2061    /// assert!(75 == *data);
2062    /// assert!(75 == *weak.upgrade().unwrap());
2063    ///
2064    /// *Rc::make_mut(&mut data) += 1;
2065    ///
2066    /// assert!(76 == *data);
2067    /// assert!(weak.upgrade().is_none());
2068    /// ```
2069    #[inline]
2070    #[stable(feature = "rc_unique", since = "1.4.0")]
2071    pub fn make_mut(this: &mut Self) -> &mut T {
2072        let size_of_val = size_of_val::<T>(&**this);
2073
2074        if Rc::strong_count(this) != 1 {
2075            // Gotta clone the data, there are other Rcs.
2076            *this = Rc::clone_from_ref_in(&**this, this.alloc.clone());
2077        } else if Rc::weak_count(this) != 0 {
2078            // Can just steal the data, all that's left is Weaks
2079
2080            // We don't need panic-protection like the above branch does, but we might as well
2081            // use the same mechanism.
2082            let mut in_progress: UniqueRcUninit<T, A> =
2083                UniqueRcUninit::new(&**this, this.alloc.clone());
2084            unsafe {
2085                // Initialize `in_progress` with move of **this.
2086                // We have to express this in terms of bytes because `T: ?Sized`; there is no
2087                // operation that just copies a value based on its `size_of_val()`.
2088                ptr::copy_nonoverlapping(
2089                    ptr::from_ref(&**this).cast::<u8>(),
2090                    in_progress.data_ptr().cast::<u8>(),
2091                    size_of_val,
2092                );
2093
2094                this.inner().dec_strong();
2095                // Remove implicit strong-weak ref (no need to craft a fake
2096                // Weak here -- we know other Weaks can clean up for us)
2097                this.inner().dec_weak();
2098                // Replace `this` with newly constructed Rc that has the moved data.
2099                ptr::write(this, in_progress.into_rc());
2100            }
2101        }
2102        // This unsafety is ok because we're guaranteed that the pointer
2103        // returned is the *only* pointer that will ever be returned to T. Our
2104        // reference count is guaranteed to be 1 at this point, and we required
2105        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
2106        // reference to the allocation.
2107        unsafe { &mut this.ptr.as_mut().value }
2108    }
2109}
2110
2111impl<T: Clone, A: Allocator> Rc<T, A> {
2112    /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
2113    /// clone.
2114    ///
2115    /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
2116    /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
2117    ///
2118    /// # Examples
2119    ///
2120    /// ```
2121    /// # use std::{ptr, rc::Rc};
2122    /// let inner = String::from("test");
2123    /// let ptr = inner.as_ptr();
2124    ///
2125    /// let rc = Rc::new(inner);
2126    /// let inner = Rc::unwrap_or_clone(rc);
2127    /// // The inner value was not cloned
2128    /// assert!(ptr::eq(ptr, inner.as_ptr()));
2129    ///
2130    /// let rc = Rc::new(inner);
2131    /// let rc2 = rc.clone();
2132    /// let inner = Rc::unwrap_or_clone(rc);
2133    /// // Because there were 2 references, we had to clone the inner value.
2134    /// assert!(!ptr::eq(ptr, inner.as_ptr()));
2135    /// // `rc2` is the last reference, so when we unwrap it we get back
2136    /// // the original `String`.
2137    /// let inner = Rc::unwrap_or_clone(rc2);
2138    /// assert!(ptr::eq(ptr, inner.as_ptr()));
2139    /// ```
2140    #[inline]
2141    #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
2142    pub fn unwrap_or_clone(this: Self) -> T {
2143        Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
2144    }
2145}
2146
2147impl<A: Allocator> Rc<dyn Any, A> {
2148    /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
2149    ///
2150    /// # Examples
2151    ///
2152    /// ```
2153    /// use std::any::Any;
2154    /// use std::rc::Rc;
2155    ///
2156    /// fn print_if_string(value: Rc<dyn Any>) {
2157    ///     if let Ok(string) = value.downcast::<String>() {
2158    ///         println!("String ({}): {}", string.len(), string);
2159    ///     }
2160    /// }
2161    ///
2162    /// let my_string = "Hello World".to_string();
2163    /// print_if_string(Rc::new(my_string));
2164    /// print_if_string(Rc::new(0i8));
2165    /// ```
2166    #[inline]
2167    #[stable(feature = "rc_downcast", since = "1.29.0")]
2168    pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
2169        if (*self).is::<T>() {
2170            unsafe {
2171                let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2172                Ok(Rc::from_inner_in(ptr.cast(), alloc))
2173            }
2174        } else {
2175            Err(self)
2176        }
2177    }
2178
2179    /// Downcasts the `Rc<dyn Any>` to a concrete type.
2180    ///
2181    /// For a safe alternative see [`downcast`].
2182    ///
2183    /// # Examples
2184    ///
2185    /// ```
2186    /// #![feature(downcast_unchecked)]
2187    ///
2188    /// use std::any::Any;
2189    /// use std::rc::Rc;
2190    ///
2191    /// let x: Rc<dyn Any> = Rc::new(1_usize);
2192    ///
2193    /// unsafe {
2194    ///     assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2195    /// }
2196    /// ```
2197    ///
2198    /// # Safety
2199    ///
2200    /// The contained value must be of type `T`. Calling this method
2201    /// with the incorrect type is *undefined behavior*.
2202    ///
2203    ///
2204    /// [`downcast`]: Self::downcast
2205    #[inline]
2206    #[unstable(feature = "downcast_unchecked", issue = "90850")]
2207    pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2208        unsafe {
2209            let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2210            Rc::from_inner_in(ptr.cast(), alloc)
2211        }
2212    }
2213}
2214
2215impl<T: ?Sized> Rc<T> {
2216    /// Allocates an `RcInner<T>` with sufficient space for
2217    /// a possibly-unsized inner value where the value has the layout provided.
2218    ///
2219    /// The function `mem_to_rc_inner` is called with the data pointer
2220    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2221    #[cfg(not(no_global_oom_handling))]
2222    unsafe fn allocate_for_layout(
2223        value_layout: Layout,
2224        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2225        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2226    ) -> *mut RcInner<T> {
2227        let layout = rc_inner_layout_for_value_layout(value_layout);
2228        unsafe {
2229            Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2230                .unwrap_or_else(|_| handle_alloc_error(layout))
2231        }
2232    }
2233
2234    /// Allocates an `RcInner<T>` with sufficient space for
2235    /// a possibly-unsized inner value where the value has the layout provided,
2236    /// returning an error if allocation fails.
2237    ///
2238    /// The function `mem_to_rc_inner` is called with the data pointer
2239    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2240    #[inline]
2241    unsafe fn try_allocate_for_layout(
2242        value_layout: Layout,
2243        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2244        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2245    ) -> Result<*mut RcInner<T>, AllocError> {
2246        let layout = rc_inner_layout_for_value_layout(value_layout);
2247
2248        // Allocate for the layout.
2249        let ptr = allocate(layout)?;
2250
2251        // Initialize the RcInner
2252        let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2253        unsafe {
2254            debug_assert_eq!(Layout::for_value_raw(inner), layout);
2255
2256            (&raw mut (*inner).strong).write(Cell::new(1));
2257            (&raw mut (*inner).weak).write(Cell::new(1));
2258        }
2259
2260        Ok(inner)
2261    }
2262}
2263
2264impl<T: ?Sized, A: Allocator> Rc<T, A> {
2265    /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2266    #[cfg(not(no_global_oom_handling))]
2267    unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2268        // Allocate for the `RcInner<T>` using the given value.
2269        unsafe {
2270            Rc::<T>::allocate_for_layout(
2271                Layout::for_value_raw(ptr),
2272                |layout| alloc.allocate(layout),
2273                |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2274            )
2275        }
2276    }
2277
2278    #[cfg(not(no_global_oom_handling))]
2279    fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2280        unsafe {
2281            let value_size = size_of_val(&*src);
2282            let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2283
2284            // Copy value as bytes
2285            ptr::copy_nonoverlapping(
2286                (&raw const *src) as *const u8,
2287                (&raw mut (*ptr).value) as *mut u8,
2288                value_size,
2289            );
2290
2291            // Free the allocation without dropping its contents
2292            let (bptr, alloc) = Box::into_raw_with_allocator(src);
2293            let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2294            drop(src);
2295
2296            Self::from_ptr_in(ptr, alloc)
2297        }
2298    }
2299}
2300
2301impl<T> Rc<[T]> {
2302    /// Allocates an `RcInner<[T]>` with the given length.
2303    #[cfg(not(no_global_oom_handling))]
2304    unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2305        unsafe {
2306            Self::allocate_for_layout(
2307                Layout::array::<T>(len).unwrap(),
2308                |layout| Global.allocate(layout),
2309                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2310            )
2311        }
2312    }
2313
2314    /// Copy elements from slice into newly allocated `Rc<[T]>`
2315    ///
2316    /// Unsafe because the caller must either take ownership, bind `T: Copy` or
2317    /// bind `T: TrivialClone`.
2318    #[cfg(not(no_global_oom_handling))]
2319    unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2320        unsafe {
2321            let ptr = Self::allocate_for_slice(v.len());
2322            ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2323            Self::from_ptr(ptr)
2324        }
2325    }
2326
2327    /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2328    ///
2329    /// Behavior is undefined should the size be wrong.
2330    #[cfg(not(no_global_oom_handling))]
2331    unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2332        // Panic guard while cloning T elements.
2333        // In the event of a panic, elements that have been written
2334        // into the new RcInner will be dropped, then the memory freed.
2335        struct Guard<T> {
2336            mem: NonNull<u8>,
2337            elems: *mut T,
2338            layout: Layout,
2339            n_elems: usize,
2340        }
2341
2342        impl<T> Drop for Guard<T> {
2343            fn drop(&mut self) {
2344                unsafe {
2345                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
2346                    ptr::drop_in_place(slice);
2347
2348                    Global.deallocate(self.mem, self.layout);
2349                }
2350            }
2351        }
2352
2353        unsafe {
2354            let ptr = Self::allocate_for_slice(len);
2355
2356            let mem = ptr as *mut _ as *mut u8;
2357            let layout = Layout::for_value_raw(ptr);
2358
2359            // Pointer to first element
2360            let elems = (&raw mut (*ptr).value) as *mut T;
2361
2362            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2363
2364            for (i, item) in iter.enumerate() {
2365                ptr::write(elems.add(i), item);
2366                guard.n_elems += 1;
2367            }
2368
2369            // All clear. Forget the guard so it doesn't free the new RcInner.
2370            mem::forget(guard);
2371
2372            Self::from_ptr(ptr)
2373        }
2374    }
2375}
2376
2377impl<T, A: Allocator> Rc<[T], A> {
2378    /// Allocates an `RcInner<[T]>` with the given length.
2379    #[inline]
2380    #[cfg(not(no_global_oom_handling))]
2381    unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2382        unsafe {
2383            Rc::<[T]>::allocate_for_layout(
2384                Layout::array::<T>(len).unwrap(),
2385                |layout| alloc.allocate(layout),
2386                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2387            )
2388        }
2389    }
2390}
2391
2392#[cfg(not(no_global_oom_handling))]
2393/// Specialization trait used for `From<&[T]>`.
2394trait RcFromSlice<T> {
2395    fn from_slice(slice: &[T]) -> Self;
2396}
2397
2398#[cfg(not(no_global_oom_handling))]
2399impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2400    #[inline]
2401    default fn from_slice(v: &[T]) -> Self {
2402        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2403    }
2404}
2405
2406#[cfg(not(no_global_oom_handling))]
2407impl<T: TrivialClone> RcFromSlice<T> for Rc<[T]> {
2408    #[inline]
2409    fn from_slice(v: &[T]) -> Self {
2410        // SAFETY: `T` implements `TrivialClone`, so this is sound and equivalent
2411        // to the above.
2412        unsafe { Rc::copy_from_slice(v) }
2413    }
2414}
2415
2416#[stable(feature = "rust1", since = "1.0.0")]
2417impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2418    type Target = T;
2419
2420    #[inline(always)]
2421    fn deref(&self) -> &T {
2422        &self.inner().value
2423    }
2424}
2425
2426#[unstable(feature = "pin_coerce_unsized_trait", issue = "150112")]
2427unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2428
2429//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2430#[unstable(feature = "pin_coerce_unsized_trait", issue = "150112")]
2431unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2432
2433#[unstable(feature = "deref_pure_trait", issue = "87121")]
2434unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2435
2436//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2437#[unstable(feature = "deref_pure_trait", issue = "87121")]
2438unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2439
2440#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2441impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2442
2443#[stable(feature = "rust1", since = "1.0.0")]
2444unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2445    /// Drops the `Rc`.
2446    ///
2447    /// This will decrement the strong reference count. If the strong reference
2448    /// count reaches zero then the only other references (if any) are
2449    /// [`Weak`], so we `drop` the inner value.
2450    ///
2451    /// # Examples
2452    ///
2453    /// ```
2454    /// use std::rc::Rc;
2455    ///
2456    /// struct Foo;
2457    ///
2458    /// impl Drop for Foo {
2459    ///     fn drop(&mut self) {
2460    ///         println!("dropped!");
2461    ///     }
2462    /// }
2463    ///
2464    /// let foo  = Rc::new(Foo);
2465    /// let foo2 = Rc::clone(&foo);
2466    ///
2467    /// drop(foo);    // Doesn't print anything
2468    /// drop(foo2);   // Prints "dropped!"
2469    /// ```
2470    #[inline]
2471    fn drop(&mut self) {
2472        unsafe {
2473            self.inner().dec_strong();
2474            if self.inner().strong() == 0 {
2475                self.drop_slow();
2476            }
2477        }
2478    }
2479}
2480
2481#[stable(feature = "rust1", since = "1.0.0")]
2482impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2483    /// Makes a clone of the `Rc` pointer.
2484    ///
2485    /// This creates another pointer to the same allocation, increasing the
2486    /// strong reference count.
2487    ///
2488    /// # Examples
2489    ///
2490    /// ```
2491    /// use std::rc::Rc;
2492    ///
2493    /// let five = Rc::new(5);
2494    ///
2495    /// let _ = Rc::clone(&five);
2496    /// ```
2497    #[inline]
2498    fn clone(&self) -> Self {
2499        unsafe {
2500            self.inner().inc_strong();
2501            Self::from_inner_in(self.ptr, self.alloc.clone())
2502        }
2503    }
2504}
2505
2506#[unstable(feature = "ergonomic_clones", issue = "132290")]
2507impl<T: ?Sized, A: Allocator + Clone> UseCloned for Rc<T, A> {}
2508
2509#[cfg(not(no_global_oom_handling))]
2510#[stable(feature = "rust1", since = "1.0.0")]
2511impl<T: Default> Default for Rc<T> {
2512    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2513    ///
2514    /// # Examples
2515    ///
2516    /// ```
2517    /// use std::rc::Rc;
2518    ///
2519    /// let x: Rc<i32> = Default::default();
2520    /// assert_eq!(*x, 0);
2521    /// ```
2522    #[inline]
2523    fn default() -> Self {
2524        unsafe {
2525            Self::from_inner(
2526                Box::leak(Box::write(
2527                    Box::new_uninit(),
2528                    RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2529                ))
2530                .into(),
2531            )
2532        }
2533    }
2534}
2535
2536#[cfg(not(no_global_oom_handling))]
2537#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2538impl Default for Rc<str> {
2539    /// Creates an empty `str` inside an `Rc`.
2540    ///
2541    /// This may or may not share an allocation with other Rcs on the same thread.
2542    #[inline]
2543    fn default() -> Self {
2544        let rc = Rc::<[u8]>::default();
2545        // `[u8]` has the same layout as `str`.
2546        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2547    }
2548}
2549
2550#[cfg(not(no_global_oom_handling))]
2551#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2552impl<T> Default for Rc<[T]> {
2553    /// Creates an empty `[T]` inside an `Rc`.
2554    ///
2555    /// This may or may not share an allocation with other Rcs on the same thread.
2556    #[inline]
2557    fn default() -> Self {
2558        let arr: [T; 0] = [];
2559        Rc::from(arr)
2560    }
2561}
2562
2563#[cfg(not(no_global_oom_handling))]
2564#[stable(feature = "pin_default_impls", since = "1.91.0")]
2565impl<T> Default for Pin<Rc<T>>
2566where
2567    T: ?Sized,
2568    Rc<T>: Default,
2569{
2570    #[inline]
2571    fn default() -> Self {
2572        unsafe { Pin::new_unchecked(Rc::<T>::default()) }
2573    }
2574}
2575
2576#[stable(feature = "rust1", since = "1.0.0")]
2577trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2578    fn eq(&self, other: &Rc<T, A>) -> bool;
2579    fn ne(&self, other: &Rc<T, A>) -> bool;
2580}
2581
2582#[stable(feature = "rust1", since = "1.0.0")]
2583impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2584    #[inline]
2585    default fn eq(&self, other: &Rc<T, A>) -> bool {
2586        **self == **other
2587    }
2588
2589    #[inline]
2590    default fn ne(&self, other: &Rc<T, A>) -> bool {
2591        **self != **other
2592    }
2593}
2594
2595// Hack to allow specializing on `Eq` even though `Eq` has a method.
2596#[rustc_unsafe_specialization_marker]
2597pub(crate) trait MarkerEq: PartialEq<Self> {}
2598
2599impl<T: Eq> MarkerEq for T {}
2600
2601/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2602/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2603/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2604/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2605/// the same value, than two `&T`s.
2606///
2607/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2608#[stable(feature = "rust1", since = "1.0.0")]
2609impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2610    #[inline]
2611    fn eq(&self, other: &Rc<T, A>) -> bool {
2612        Rc::ptr_eq(self, other) || **self == **other
2613    }
2614
2615    #[inline]
2616    fn ne(&self, other: &Rc<T, A>) -> bool {
2617        !Rc::ptr_eq(self, other) && **self != **other
2618    }
2619}
2620
2621#[stable(feature = "rust1", since = "1.0.0")]
2622impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2623    /// Equality for two `Rc`s.
2624    ///
2625    /// Two `Rc`s are equal if their inner values are equal, even if they are
2626    /// stored in different allocation.
2627    ///
2628    /// If `T` also implements `Eq` (implying reflexivity of equality),
2629    /// two `Rc`s that point to the same allocation are
2630    /// always equal.
2631    ///
2632    /// # Examples
2633    ///
2634    /// ```
2635    /// use std::rc::Rc;
2636    ///
2637    /// let five = Rc::new(5);
2638    ///
2639    /// assert!(five == Rc::new(5));
2640    /// ```
2641    #[inline]
2642    fn eq(&self, other: &Rc<T, A>) -> bool {
2643        RcEqIdent::eq(self, other)
2644    }
2645
2646    /// Inequality for two `Rc`s.
2647    ///
2648    /// Two `Rc`s are not equal if their inner values are not equal.
2649    ///
2650    /// If `T` also implements `Eq` (implying reflexivity of equality),
2651    /// two `Rc`s that point to the same allocation are
2652    /// always equal.
2653    ///
2654    /// # Examples
2655    ///
2656    /// ```
2657    /// use std::rc::Rc;
2658    ///
2659    /// let five = Rc::new(5);
2660    ///
2661    /// assert!(five != Rc::new(6));
2662    /// ```
2663    #[inline]
2664    fn ne(&self, other: &Rc<T, A>) -> bool {
2665        RcEqIdent::ne(self, other)
2666    }
2667}
2668
2669#[stable(feature = "rust1", since = "1.0.0")]
2670impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2671
2672#[stable(feature = "rust1", since = "1.0.0")]
2673impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2674    /// Partial comparison for two `Rc`s.
2675    ///
2676    /// The two are compared by calling `partial_cmp()` on their inner values.
2677    ///
2678    /// # Examples
2679    ///
2680    /// ```
2681    /// use std::rc::Rc;
2682    /// use std::cmp::Ordering;
2683    ///
2684    /// let five = Rc::new(5);
2685    ///
2686    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2687    /// ```
2688    #[inline(always)]
2689    fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2690        (**self).partial_cmp(&**other)
2691    }
2692
2693    /// Less-than comparison for two `Rc`s.
2694    ///
2695    /// The two are compared by calling `<` on their inner values.
2696    ///
2697    /// # Examples
2698    ///
2699    /// ```
2700    /// use std::rc::Rc;
2701    ///
2702    /// let five = Rc::new(5);
2703    ///
2704    /// assert!(five < Rc::new(6));
2705    /// ```
2706    #[inline(always)]
2707    fn lt(&self, other: &Rc<T, A>) -> bool {
2708        **self < **other
2709    }
2710
2711    /// 'Less than or equal to' comparison for two `Rc`s.
2712    ///
2713    /// The two are compared by calling `<=` on their inner values.
2714    ///
2715    /// # Examples
2716    ///
2717    /// ```
2718    /// use std::rc::Rc;
2719    ///
2720    /// let five = Rc::new(5);
2721    ///
2722    /// assert!(five <= Rc::new(5));
2723    /// ```
2724    #[inline(always)]
2725    fn le(&self, other: &Rc<T, A>) -> bool {
2726        **self <= **other
2727    }
2728
2729    /// Greater-than comparison for two `Rc`s.
2730    ///
2731    /// The two are compared by calling `>` on their inner values.
2732    ///
2733    /// # Examples
2734    ///
2735    /// ```
2736    /// use std::rc::Rc;
2737    ///
2738    /// let five = Rc::new(5);
2739    ///
2740    /// assert!(five > Rc::new(4));
2741    /// ```
2742    #[inline(always)]
2743    fn gt(&self, other: &Rc<T, A>) -> bool {
2744        **self > **other
2745    }
2746
2747    /// 'Greater than or equal to' comparison for two `Rc`s.
2748    ///
2749    /// The two are compared by calling `>=` on their inner values.
2750    ///
2751    /// # Examples
2752    ///
2753    /// ```
2754    /// use std::rc::Rc;
2755    ///
2756    /// let five = Rc::new(5);
2757    ///
2758    /// assert!(five >= Rc::new(5));
2759    /// ```
2760    #[inline(always)]
2761    fn ge(&self, other: &Rc<T, A>) -> bool {
2762        **self >= **other
2763    }
2764}
2765
2766#[stable(feature = "rust1", since = "1.0.0")]
2767impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2768    /// Comparison for two `Rc`s.
2769    ///
2770    /// The two are compared by calling `cmp()` on their inner values.
2771    ///
2772    /// # Examples
2773    ///
2774    /// ```
2775    /// use std::rc::Rc;
2776    /// use std::cmp::Ordering;
2777    ///
2778    /// let five = Rc::new(5);
2779    ///
2780    /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2781    /// ```
2782    #[inline]
2783    fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2784        (**self).cmp(&**other)
2785    }
2786}
2787
2788#[stable(feature = "rust1", since = "1.0.0")]
2789impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2790    fn hash<H: Hasher>(&self, state: &mut H) {
2791        (**self).hash(state);
2792    }
2793}
2794
2795#[stable(feature = "rust1", since = "1.0.0")]
2796impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2797    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2798        fmt::Display::fmt(&**self, f)
2799    }
2800}
2801
2802#[stable(feature = "rust1", since = "1.0.0")]
2803impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2804    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2805        fmt::Debug::fmt(&**self, f)
2806    }
2807}
2808
2809#[stable(feature = "rust1", since = "1.0.0")]
2810impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2811    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2812        fmt::Pointer::fmt(&(&raw const **self), f)
2813    }
2814}
2815
2816#[cfg(not(no_global_oom_handling))]
2817#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2818impl<T> From<T> for Rc<T> {
2819    /// Converts a generic type `T` into an `Rc<T>`
2820    ///
2821    /// The conversion allocates on the heap and moves `t`
2822    /// from the stack into it.
2823    ///
2824    /// # Example
2825    /// ```rust
2826    /// # use std::rc::Rc;
2827    /// let x = 5;
2828    /// let rc = Rc::new(5);
2829    ///
2830    /// assert_eq!(Rc::from(x), rc);
2831    /// ```
2832    fn from(t: T) -> Self {
2833        Rc::new(t)
2834    }
2835}
2836
2837#[cfg(not(no_global_oom_handling))]
2838#[stable(feature = "shared_from_array", since = "1.74.0")]
2839impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2840    /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2841    ///
2842    /// The conversion moves the array into a newly allocated `Rc`.
2843    ///
2844    /// # Example
2845    ///
2846    /// ```
2847    /// # use std::rc::Rc;
2848    /// let original: [i32; 3] = [1, 2, 3];
2849    /// let shared: Rc<[i32]> = Rc::from(original);
2850    /// assert_eq!(&[1, 2, 3], &shared[..]);
2851    /// ```
2852    #[inline]
2853    fn from(v: [T; N]) -> Rc<[T]> {
2854        Rc::<[T; N]>::from(v)
2855    }
2856}
2857
2858#[cfg(not(no_global_oom_handling))]
2859#[stable(feature = "shared_from_slice", since = "1.21.0")]
2860impl<T: Clone> From<&[T]> for Rc<[T]> {
2861    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2862    ///
2863    /// # Example
2864    ///
2865    /// ```
2866    /// # use std::rc::Rc;
2867    /// let original: &[i32] = &[1, 2, 3];
2868    /// let shared: Rc<[i32]> = Rc::from(original);
2869    /// assert_eq!(&[1, 2, 3], &shared[..]);
2870    /// ```
2871    #[inline]
2872    fn from(v: &[T]) -> Rc<[T]> {
2873        <Self as RcFromSlice<T>>::from_slice(v)
2874    }
2875}
2876
2877#[cfg(not(no_global_oom_handling))]
2878#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2879impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2880    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2881    ///
2882    /// # Example
2883    ///
2884    /// ```
2885    /// # use std::rc::Rc;
2886    /// let mut original = [1, 2, 3];
2887    /// let original: &mut [i32] = &mut original;
2888    /// let shared: Rc<[i32]> = Rc::from(original);
2889    /// assert_eq!(&[1, 2, 3], &shared[..]);
2890    /// ```
2891    #[inline]
2892    fn from(v: &mut [T]) -> Rc<[T]> {
2893        Rc::from(&*v)
2894    }
2895}
2896
2897#[cfg(not(no_global_oom_handling))]
2898#[stable(feature = "shared_from_slice", since = "1.21.0")]
2899impl From<&str> for Rc<str> {
2900    /// Allocates a reference-counted string slice and copies `v` into it.
2901    ///
2902    /// # Example
2903    ///
2904    /// ```
2905    /// # use std::rc::Rc;
2906    /// let shared: Rc<str> = Rc::from("statue");
2907    /// assert_eq!("statue", &shared[..]);
2908    /// ```
2909    #[inline]
2910    fn from(v: &str) -> Rc<str> {
2911        let rc = Rc::<[u8]>::from(v.as_bytes());
2912        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2913    }
2914}
2915
2916#[cfg(not(no_global_oom_handling))]
2917#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2918impl From<&mut str> for Rc<str> {
2919    /// Allocates a reference-counted string slice and copies `v` into it.
2920    ///
2921    /// # Example
2922    ///
2923    /// ```
2924    /// # use std::rc::Rc;
2925    /// let mut original = String::from("statue");
2926    /// let original: &mut str = &mut original;
2927    /// let shared: Rc<str> = Rc::from(original);
2928    /// assert_eq!("statue", &shared[..]);
2929    /// ```
2930    #[inline]
2931    fn from(v: &mut str) -> Rc<str> {
2932        Rc::from(&*v)
2933    }
2934}
2935
2936#[cfg(not(no_global_oom_handling))]
2937#[stable(feature = "shared_from_slice", since = "1.21.0")]
2938impl From<String> for Rc<str> {
2939    /// Allocates a reference-counted string slice and copies `v` into it.
2940    ///
2941    /// # Example
2942    ///
2943    /// ```
2944    /// # use std::rc::Rc;
2945    /// let original: String = "statue".to_owned();
2946    /// let shared: Rc<str> = Rc::from(original);
2947    /// assert_eq!("statue", &shared[..]);
2948    /// ```
2949    #[inline]
2950    fn from(v: String) -> Rc<str> {
2951        Rc::from(&v[..])
2952    }
2953}
2954
2955#[cfg(not(no_global_oom_handling))]
2956#[stable(feature = "shared_from_slice", since = "1.21.0")]
2957impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2958    /// Move a boxed object to a new, reference counted, allocation.
2959    ///
2960    /// # Example
2961    ///
2962    /// ```
2963    /// # use std::rc::Rc;
2964    /// let original: Box<i32> = Box::new(1);
2965    /// let shared: Rc<i32> = Rc::from(original);
2966    /// assert_eq!(1, *shared);
2967    /// ```
2968    #[inline]
2969    fn from(v: Box<T, A>) -> Rc<T, A> {
2970        Rc::from_box_in(v)
2971    }
2972}
2973
2974#[cfg(not(no_global_oom_handling))]
2975#[stable(feature = "shared_from_slice", since = "1.21.0")]
2976impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2977    /// Allocates a reference-counted slice and moves `v`'s items into it.
2978    ///
2979    /// # Example
2980    ///
2981    /// ```
2982    /// # use std::rc::Rc;
2983    /// let unique: Vec<i32> = vec![1, 2, 3];
2984    /// let shared: Rc<[i32]> = Rc::from(unique);
2985    /// assert_eq!(&[1, 2, 3], &shared[..]);
2986    /// ```
2987    #[inline]
2988    fn from(v: Vec<T, A>) -> Rc<[T], A> {
2989        unsafe {
2990            let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2991
2992            let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2993            ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
2994
2995            // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2996            // without dropping its contents or the allocator
2997            let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2998
2999            Self::from_ptr_in(rc_ptr, alloc)
3000        }
3001    }
3002}
3003
3004#[stable(feature = "shared_from_cow", since = "1.45.0")]
3005impl<'a, B> From<Cow<'a, B>> for Rc<B>
3006where
3007    B: ToOwned + ?Sized,
3008    Rc<B>: From<&'a B> + From<B::Owned>,
3009{
3010    /// Creates a reference-counted pointer from a clone-on-write pointer by
3011    /// copying its content.
3012    ///
3013    /// # Example
3014    ///
3015    /// ```rust
3016    /// # use std::rc::Rc;
3017    /// # use std::borrow::Cow;
3018    /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
3019    /// let shared: Rc<str> = Rc::from(cow);
3020    /// assert_eq!("eggplant", &shared[..]);
3021    /// ```
3022    #[inline]
3023    fn from(cow: Cow<'a, B>) -> Rc<B> {
3024        match cow {
3025            Cow::Borrowed(s) => Rc::from(s),
3026            Cow::Owned(s) => Rc::from(s),
3027        }
3028    }
3029}
3030
3031#[stable(feature = "shared_from_str", since = "1.62.0")]
3032impl From<Rc<str>> for Rc<[u8]> {
3033    /// Converts a reference-counted string slice into a byte slice.
3034    ///
3035    /// # Example
3036    ///
3037    /// ```
3038    /// # use std::rc::Rc;
3039    /// let string: Rc<str> = Rc::from("eggplant");
3040    /// let bytes: Rc<[u8]> = Rc::from(string);
3041    /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
3042    /// ```
3043    #[inline]
3044    fn from(rc: Rc<str>) -> Self {
3045        // SAFETY: `str` has the same layout as `[u8]`.
3046        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
3047    }
3048}
3049
3050#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
3051impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
3052    type Error = Rc<[T], A>;
3053
3054    fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
3055        if boxed_slice.len() == N {
3056            let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
3057            Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
3058        } else {
3059            Err(boxed_slice)
3060        }
3061    }
3062}
3063
3064#[cfg(not(no_global_oom_handling))]
3065#[stable(feature = "shared_from_iter", since = "1.37.0")]
3066impl<T> FromIterator<T> for Rc<[T]> {
3067    /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
3068    ///
3069    /// # Performance characteristics
3070    ///
3071    /// ## The general case
3072    ///
3073    /// In the general case, collecting into `Rc<[T]>` is done by first
3074    /// collecting into a `Vec<T>`. That is, when writing the following:
3075    ///
3076    /// ```rust
3077    /// # use std::rc::Rc;
3078    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
3079    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
3080    /// ```
3081    ///
3082    /// this behaves as if we wrote:
3083    ///
3084    /// ```rust
3085    /// # use std::rc::Rc;
3086    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
3087    ///     .collect::<Vec<_>>() // The first set of allocations happens here.
3088    ///     .into(); // A second allocation for `Rc<[T]>` happens here.
3089    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
3090    /// ```
3091    ///
3092    /// This will allocate as many times as needed for constructing the `Vec<T>`
3093    /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
3094    ///
3095    /// ## Iterators of known length
3096    ///
3097    /// When your `Iterator` implements `TrustedLen` and is of an exact size,
3098    /// a single allocation will be made for the `Rc<[T]>`. For example:
3099    ///
3100    /// ```rust
3101    /// # use std::rc::Rc;
3102    /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
3103    /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
3104    /// ```
3105    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
3106        ToRcSlice::to_rc_slice(iter.into_iter())
3107    }
3108}
3109
3110/// Specialization trait used for collecting into `Rc<[T]>`.
3111#[cfg(not(no_global_oom_handling))]
3112trait ToRcSlice<T>: Iterator<Item = T> + Sized {
3113    fn to_rc_slice(self) -> Rc<[T]>;
3114}
3115
3116#[cfg(not(no_global_oom_handling))]
3117impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
3118    default fn to_rc_slice(self) -> Rc<[T]> {
3119        self.collect::<Vec<T>>().into()
3120    }
3121}
3122
3123#[cfg(not(no_global_oom_handling))]
3124impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
3125    fn to_rc_slice(self) -> Rc<[T]> {
3126        // This is the case for a `TrustedLen` iterator.
3127        let (low, high) = self.size_hint();
3128        if let Some(high) = high {
3129            debug_assert_eq!(
3130                low,
3131                high,
3132                "TrustedLen iterator's size hint is not exact: {:?}",
3133                (low, high)
3134            );
3135
3136            unsafe {
3137                // SAFETY: We need to ensure that the iterator has an exact length and we have.
3138                Rc::from_iter_exact(self, low)
3139            }
3140        } else {
3141            // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
3142            // length exceeding `usize::MAX`.
3143            // The default implementation would collect into a vec which would panic.
3144            // Thus we panic here immediately without invoking `Vec` code.
3145            panic!("capacity overflow");
3146        }
3147    }
3148}
3149
3150/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
3151/// managed allocation.
3152///
3153/// The allocation is accessed by calling [`upgrade`] on the `Weak`
3154/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
3155///
3156/// Since a `Weak` reference does not count towards ownership, it will not
3157/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
3158/// guarantees about the value still being present. Thus it may return [`None`]
3159/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
3160/// itself (the backing store) from being deallocated.
3161///
3162/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
3163/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
3164/// prevent circular references between [`Rc`] pointers, since mutual owning references
3165/// would never allow either [`Rc`] to be dropped. For example, a tree could
3166/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
3167/// pointers from children back to their parents.
3168///
3169/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
3170///
3171/// [`upgrade`]: Weak::upgrade
3172#[stable(feature = "rc_weak", since = "1.4.0")]
3173#[rustc_diagnostic_item = "RcWeak"]
3174pub struct Weak<
3175    T: ?Sized,
3176    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3177> {
3178    // This is a `NonNull` to allow optimizing the size of this type in enums,
3179    // but it is not necessarily a valid pointer.
3180    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
3181    // to allocate space on the heap. That's not a value a real pointer
3182    // will ever have because RcInner has alignment at least 2.
3183    ptr: NonNull<RcInner<T>>,
3184    alloc: A,
3185}
3186
3187#[stable(feature = "rc_weak", since = "1.4.0")]
3188impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3189#[stable(feature = "rc_weak", since = "1.4.0")]
3190impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3191
3192#[unstable(feature = "coerce_unsized", issue = "18598")]
3193impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3194
3195#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3196impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3197
3198// SAFETY: `Weak::clone` doesn't access any `Cell`s which could contain the `Weak` being cloned.
3199#[unstable(feature = "cell_get_cloned", issue = "145329")]
3200unsafe impl<T: ?Sized> CloneFromCell for Weak<T> {}
3201
3202impl<T> Weak<T> {
3203    /// Constructs a new `Weak<T>`, without allocating any memory.
3204    /// Calling [`upgrade`] on the return value always gives [`None`].
3205    ///
3206    /// [`upgrade`]: Weak::upgrade
3207    ///
3208    /// # Examples
3209    ///
3210    /// ```
3211    /// use std::rc::Weak;
3212    ///
3213    /// let empty: Weak<i64> = Weak::new();
3214    /// assert!(empty.upgrade().is_none());
3215    /// ```
3216    #[inline]
3217    #[stable(feature = "downgraded_weak", since = "1.10.0")]
3218    #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3219    #[must_use]
3220    pub const fn new() -> Weak<T> {
3221        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3222    }
3223}
3224
3225impl<T, A: Allocator> Weak<T, A> {
3226    /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3227    /// allocator.
3228    /// Calling [`upgrade`] on the return value always gives [`None`].
3229    ///
3230    /// [`upgrade`]: Weak::upgrade
3231    ///
3232    /// # Examples
3233    ///
3234    /// ```
3235    /// use std::rc::Weak;
3236    ///
3237    /// let empty: Weak<i64> = Weak::new();
3238    /// assert!(empty.upgrade().is_none());
3239    /// ```
3240    #[inline]
3241    #[unstable(feature = "allocator_api", issue = "32838")]
3242    pub fn new_in(alloc: A) -> Weak<T, A> {
3243        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3244    }
3245}
3246
3247pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3248    (ptr.cast::<()>()).addr() == usize::MAX
3249}
3250
3251/// Helper type to allow accessing the reference counts without
3252/// making any assertions about the data field.
3253struct WeakInner<'a> {
3254    weak: &'a Cell<usize>,
3255    strong: &'a Cell<usize>,
3256}
3257
3258impl<T: ?Sized> Weak<T> {
3259    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3260    ///
3261    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3262    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3263    ///
3264    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3265    /// as these don't own anything; the method still works on them).
3266    ///
3267    /// # Safety
3268    ///
3269    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3270    /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3271    ///
3272    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3273    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3274    /// count is not modified by this operation) and therefore it must be paired with a previous
3275    /// call to [`into_raw`].
3276    ///
3277    /// # Examples
3278    ///
3279    /// ```
3280    /// use std::rc::{Rc, Weak};
3281    ///
3282    /// let strong = Rc::new("hello".to_owned());
3283    ///
3284    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3285    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3286    ///
3287    /// assert_eq!(2, Rc::weak_count(&strong));
3288    ///
3289    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3290    /// assert_eq!(1, Rc::weak_count(&strong));
3291    ///
3292    /// drop(strong);
3293    ///
3294    /// // Decrement the last weak count.
3295    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3296    /// ```
3297    ///
3298    /// [`into_raw`]: Weak::into_raw
3299    /// [`upgrade`]: Weak::upgrade
3300    /// [`new`]: Weak::new
3301    #[inline]
3302    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3303    pub unsafe fn from_raw(ptr: *const T) -> Self {
3304        unsafe { Self::from_raw_in(ptr, Global) }
3305    }
3306
3307    /// Consumes the `Weak<T>` and turns it into a raw pointer.
3308    ///
3309    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3310    /// one weak reference (the weak count is not modified by this operation). It can be turned
3311    /// back into the `Weak<T>` with [`from_raw`].
3312    ///
3313    /// The same restrictions of accessing the target of the pointer as with
3314    /// [`as_ptr`] apply.
3315    ///
3316    /// # Examples
3317    ///
3318    /// ```
3319    /// use std::rc::{Rc, Weak};
3320    ///
3321    /// let strong = Rc::new("hello".to_owned());
3322    /// let weak = Rc::downgrade(&strong);
3323    /// let raw = weak.into_raw();
3324    ///
3325    /// assert_eq!(1, Rc::weak_count(&strong));
3326    /// assert_eq!("hello", unsafe { &*raw });
3327    ///
3328    /// drop(unsafe { Weak::from_raw(raw) });
3329    /// assert_eq!(0, Rc::weak_count(&strong));
3330    /// ```
3331    ///
3332    /// [`from_raw`]: Weak::from_raw
3333    /// [`as_ptr`]: Weak::as_ptr
3334    #[must_use = "losing the pointer will leak memory"]
3335    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3336    pub fn into_raw(self) -> *const T {
3337        mem::ManuallyDrop::new(self).as_ptr()
3338    }
3339}
3340
3341impl<T: ?Sized, A: Allocator> Weak<T, A> {
3342    /// Returns a reference to the underlying allocator.
3343    #[inline]
3344    #[unstable(feature = "allocator_api", issue = "32838")]
3345    pub fn allocator(&self) -> &A {
3346        &self.alloc
3347    }
3348
3349    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3350    ///
3351    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3352    /// unaligned or even [`null`] otherwise.
3353    ///
3354    /// # Examples
3355    ///
3356    /// ```
3357    /// use std::rc::Rc;
3358    /// use std::ptr;
3359    ///
3360    /// let strong = Rc::new("hello".to_owned());
3361    /// let weak = Rc::downgrade(&strong);
3362    /// // Both point to the same object
3363    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3364    /// // The strong here keeps it alive, so we can still access the object.
3365    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3366    ///
3367    /// drop(strong);
3368    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3369    /// // undefined behavior.
3370    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3371    /// ```
3372    ///
3373    /// [`null`]: ptr::null
3374    #[must_use]
3375    #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3376    pub fn as_ptr(&self) -> *const T {
3377        let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3378
3379        if is_dangling(ptr) {
3380            // If the pointer is dangling, we return the sentinel directly. This cannot be
3381            // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3382            ptr as *const T
3383        } else {
3384            // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3385            // The payload may be dropped at this point, and we have to maintain provenance,
3386            // so use raw pointer manipulation.
3387            unsafe { &raw mut (*ptr).value }
3388        }
3389    }
3390
3391    /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3392    ///
3393    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3394    /// one weak reference (the weak count is not modified by this operation). It can be turned
3395    /// back into the `Weak<T>` with [`from_raw_in`].
3396    ///
3397    /// The same restrictions of accessing the target of the pointer as with
3398    /// [`as_ptr`] apply.
3399    ///
3400    /// # Examples
3401    ///
3402    /// ```
3403    /// #![feature(allocator_api)]
3404    /// use std::rc::{Rc, Weak};
3405    /// use std::alloc::System;
3406    ///
3407    /// let strong = Rc::new_in("hello".to_owned(), System);
3408    /// let weak = Rc::downgrade(&strong);
3409    /// let (raw, alloc) = weak.into_raw_with_allocator();
3410    ///
3411    /// assert_eq!(1, Rc::weak_count(&strong));
3412    /// assert_eq!("hello", unsafe { &*raw });
3413    ///
3414    /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3415    /// assert_eq!(0, Rc::weak_count(&strong));
3416    /// ```
3417    ///
3418    /// [`from_raw_in`]: Weak::from_raw_in
3419    /// [`as_ptr`]: Weak::as_ptr
3420    #[must_use = "losing the pointer will leak memory"]
3421    #[inline]
3422    #[unstable(feature = "allocator_api", issue = "32838")]
3423    pub fn into_raw_with_allocator(self) -> (*const T, A) {
3424        let this = mem::ManuallyDrop::new(self);
3425        let result = this.as_ptr();
3426        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3427        let alloc = unsafe { ptr::read(&this.alloc) };
3428        (result, alloc)
3429    }
3430
3431    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3432    ///
3433    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3434    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3435    ///
3436    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3437    /// as these don't own anything; the method still works on them).
3438    ///
3439    /// # Safety
3440    ///
3441    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3442    /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3443    ///
3444    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3445    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3446    /// count is not modified by this operation) and therefore it must be paired with a previous
3447    /// call to [`into_raw`].
3448    ///
3449    /// # Examples
3450    ///
3451    /// ```
3452    /// use std::rc::{Rc, Weak};
3453    ///
3454    /// let strong = Rc::new("hello".to_owned());
3455    ///
3456    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3457    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3458    ///
3459    /// assert_eq!(2, Rc::weak_count(&strong));
3460    ///
3461    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3462    /// assert_eq!(1, Rc::weak_count(&strong));
3463    ///
3464    /// drop(strong);
3465    ///
3466    /// // Decrement the last weak count.
3467    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3468    /// ```
3469    ///
3470    /// [`into_raw`]: Weak::into_raw
3471    /// [`upgrade`]: Weak::upgrade
3472    /// [`new`]: Weak::new
3473    #[inline]
3474    #[unstable(feature = "allocator_api", issue = "32838")]
3475    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3476        // See Weak::as_ptr for context on how the input pointer is derived.
3477
3478        let ptr = if is_dangling(ptr) {
3479            // This is a dangling Weak.
3480            ptr as *mut RcInner<T>
3481        } else {
3482            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3483            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3484            let offset = unsafe { data_offset(ptr) };
3485            // Thus, we reverse the offset to get the whole RcInner.
3486            // SAFETY: the pointer originated from a Weak, so this offset is safe.
3487            unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3488        };
3489
3490        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3491        Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3492    }
3493
3494    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3495    /// dropping of the inner value if successful.
3496    ///
3497    /// Returns [`None`] if the inner value has since been dropped.
3498    ///
3499    /// # Examples
3500    ///
3501    /// ```
3502    /// use std::rc::Rc;
3503    ///
3504    /// let five = Rc::new(5);
3505    ///
3506    /// let weak_five = Rc::downgrade(&five);
3507    ///
3508    /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3509    /// assert!(strong_five.is_some());
3510    ///
3511    /// // Destroy all strong pointers.
3512    /// drop(strong_five);
3513    /// drop(five);
3514    ///
3515    /// assert!(weak_five.upgrade().is_none());
3516    /// ```
3517    #[must_use = "this returns a new `Rc`, \
3518                  without modifying the original weak pointer"]
3519    #[stable(feature = "rc_weak", since = "1.4.0")]
3520    pub fn upgrade(&self) -> Option<Rc<T, A>>
3521    where
3522        A: Clone,
3523    {
3524        let inner = self.inner()?;
3525
3526        if inner.strong() == 0 {
3527            None
3528        } else {
3529            unsafe {
3530                inner.inc_strong();
3531                Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3532            }
3533        }
3534    }
3535
3536    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3537    ///
3538    /// If `self` was created using [`Weak::new`], this will return 0.
3539    #[must_use]
3540    #[stable(feature = "weak_counts", since = "1.41.0")]
3541    pub fn strong_count(&self) -> usize {
3542        if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3543    }
3544
3545    /// Gets the number of `Weak` pointers pointing to this allocation.
3546    ///
3547    /// If no strong pointers remain, this will return zero.
3548    #[must_use]
3549    #[stable(feature = "weak_counts", since = "1.41.0")]
3550    pub fn weak_count(&self) -> usize {
3551        if let Some(inner) = self.inner() {
3552            if inner.strong() > 0 {
3553                inner.weak() - 1 // subtract the implicit weak ptr
3554            } else {
3555                0
3556            }
3557        } else {
3558            0
3559        }
3560    }
3561
3562    /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3563    /// (i.e., when this `Weak` was created by `Weak::new`).
3564    #[inline]
3565    fn inner(&self) -> Option<WeakInner<'_>> {
3566        if is_dangling(self.ptr.as_ptr()) {
3567            None
3568        } else {
3569            // We are careful to *not* create a reference covering the "data" field, as
3570            // the field may be mutated concurrently (for example, if the last `Rc`
3571            // is dropped, the data field will be dropped in-place).
3572            Some(unsafe {
3573                let ptr = self.ptr.as_ptr();
3574                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3575            })
3576        }
3577    }
3578
3579    /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3580    /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3581    /// this function ignores the metadata of  `dyn Trait` pointers.
3582    ///
3583    /// # Notes
3584    ///
3585    /// Since this compares pointers it means that `Weak::new()` will equal each
3586    /// other, even though they don't point to any allocation.
3587    ///
3588    /// # Examples
3589    ///
3590    /// ```
3591    /// use std::rc::Rc;
3592    ///
3593    /// let first_rc = Rc::new(5);
3594    /// let first = Rc::downgrade(&first_rc);
3595    /// let second = Rc::downgrade(&first_rc);
3596    ///
3597    /// assert!(first.ptr_eq(&second));
3598    ///
3599    /// let third_rc = Rc::new(5);
3600    /// let third = Rc::downgrade(&third_rc);
3601    ///
3602    /// assert!(!first.ptr_eq(&third));
3603    /// ```
3604    ///
3605    /// Comparing `Weak::new`.
3606    ///
3607    /// ```
3608    /// use std::rc::{Rc, Weak};
3609    ///
3610    /// let first = Weak::new();
3611    /// let second = Weak::new();
3612    /// assert!(first.ptr_eq(&second));
3613    ///
3614    /// let third_rc = Rc::new(());
3615    /// let third = Rc::downgrade(&third_rc);
3616    /// assert!(!first.ptr_eq(&third));
3617    /// ```
3618    #[inline]
3619    #[must_use]
3620    #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3621    pub fn ptr_eq(&self, other: &Self) -> bool {
3622        ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3623    }
3624}
3625
3626#[stable(feature = "rc_weak", since = "1.4.0")]
3627unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3628    /// Drops the `Weak` pointer.
3629    ///
3630    /// # Examples
3631    ///
3632    /// ```
3633    /// use std::rc::{Rc, Weak};
3634    ///
3635    /// struct Foo;
3636    ///
3637    /// impl Drop for Foo {
3638    ///     fn drop(&mut self) {
3639    ///         println!("dropped!");
3640    ///     }
3641    /// }
3642    ///
3643    /// let foo = Rc::new(Foo);
3644    /// let weak_foo = Rc::downgrade(&foo);
3645    /// let other_weak_foo = Weak::clone(&weak_foo);
3646    ///
3647    /// drop(weak_foo);   // Doesn't print anything
3648    /// drop(foo);        // Prints "dropped!"
3649    ///
3650    /// assert!(other_weak_foo.upgrade().is_none());
3651    /// ```
3652    fn drop(&mut self) {
3653        let inner = if let Some(inner) = self.inner() { inner } else { return };
3654
3655        inner.dec_weak();
3656        // the weak count starts at 1, and will only go to zero if all
3657        // the strong pointers have disappeared.
3658        if inner.weak() == 0 {
3659            unsafe {
3660                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3661            }
3662        }
3663    }
3664}
3665
3666#[stable(feature = "rc_weak", since = "1.4.0")]
3667impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3668    /// Makes a clone of the `Weak` pointer that points to the same allocation.
3669    ///
3670    /// # Examples
3671    ///
3672    /// ```
3673    /// use std::rc::{Rc, Weak};
3674    ///
3675    /// let weak_five = Rc::downgrade(&Rc::new(5));
3676    ///
3677    /// let _ = Weak::clone(&weak_five);
3678    /// ```
3679    #[inline]
3680    fn clone(&self) -> Weak<T, A> {
3681        if let Some(inner) = self.inner() {
3682            inner.inc_weak()
3683        }
3684        Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3685    }
3686}
3687
3688#[unstable(feature = "ergonomic_clones", issue = "132290")]
3689impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
3690
3691#[stable(feature = "rc_weak", since = "1.4.0")]
3692impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3693    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3694        write!(f, "(Weak)")
3695    }
3696}
3697
3698#[stable(feature = "downgraded_weak", since = "1.10.0")]
3699impl<T> Default for Weak<T> {
3700    /// Constructs a new `Weak<T>`, without allocating any memory.
3701    /// Calling [`upgrade`] on the return value always gives [`None`].
3702    ///
3703    /// [`upgrade`]: Weak::upgrade
3704    ///
3705    /// # Examples
3706    ///
3707    /// ```
3708    /// use std::rc::Weak;
3709    ///
3710    /// let empty: Weak<i64> = Default::default();
3711    /// assert!(empty.upgrade().is_none());
3712    /// ```
3713    fn default() -> Weak<T> {
3714        Weak::new()
3715    }
3716}
3717
3718// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count
3719// is not decremented, meaning the ref-count can overflow, and then you can
3720// free the allocation while outstanding Rcs (or Weaks) exist, which would be
3721// unsound. We abort because this is such a degenerate scenario that we don't
3722// care about what happens -- no real program should ever experience this.
3723//
3724// This should have negligible overhead since you don't actually need to
3725// clone these much in Rust thanks to ownership and move-semantics.
3726
3727#[doc(hidden)]
3728trait RcInnerPtr {
3729    fn weak_ref(&self) -> &Cell<usize>;
3730    fn strong_ref(&self) -> &Cell<usize>;
3731
3732    #[inline]
3733    fn strong(&self) -> usize {
3734        self.strong_ref().get()
3735    }
3736
3737    #[inline]
3738    fn inc_strong(&self) {
3739        let strong = self.strong();
3740
3741        // We insert an `assume` here to hint LLVM at an otherwise
3742        // missed optimization.
3743        // SAFETY: The reference count will never be zero when this is
3744        // called.
3745        unsafe {
3746            hint::assert_unchecked(strong != 0);
3747        }
3748
3749        let strong = strong.wrapping_add(1);
3750        self.strong_ref().set(strong);
3751
3752        // We want to abort on overflow instead of dropping the value.
3753        // Checking for overflow after the store instead of before
3754        // allows for slightly better code generation.
3755        if core::intrinsics::unlikely(strong == 0) {
3756            abort();
3757        }
3758    }
3759
3760    #[inline]
3761    fn dec_strong(&self) {
3762        self.strong_ref().set(self.strong() - 1);
3763    }
3764
3765    #[inline]
3766    fn weak(&self) -> usize {
3767        self.weak_ref().get()
3768    }
3769
3770    #[inline]
3771    fn inc_weak(&self) {
3772        let weak = self.weak();
3773
3774        // We insert an `assume` here to hint LLVM at an otherwise
3775        // missed optimization.
3776        // SAFETY: The reference count will never be zero when this is
3777        // called.
3778        unsafe {
3779            hint::assert_unchecked(weak != 0);
3780        }
3781
3782        let weak = weak.wrapping_add(1);
3783        self.weak_ref().set(weak);
3784
3785        // We want to abort on overflow instead of dropping the value.
3786        // Checking for overflow after the store instead of before
3787        // allows for slightly better code generation.
3788        if core::intrinsics::unlikely(weak == 0) {
3789            abort();
3790        }
3791    }
3792
3793    #[inline]
3794    fn dec_weak(&self) {
3795        self.weak_ref().set(self.weak() - 1);
3796    }
3797}
3798
3799impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3800    #[inline(always)]
3801    fn weak_ref(&self) -> &Cell<usize> {
3802        &self.weak
3803    }
3804
3805    #[inline(always)]
3806    fn strong_ref(&self) -> &Cell<usize> {
3807        &self.strong
3808    }
3809}
3810
3811impl<'a> RcInnerPtr for WeakInner<'a> {
3812    #[inline(always)]
3813    fn weak_ref(&self) -> &Cell<usize> {
3814        self.weak
3815    }
3816
3817    #[inline(always)]
3818    fn strong_ref(&self) -> &Cell<usize> {
3819        self.strong
3820    }
3821}
3822
3823#[stable(feature = "rust1", since = "1.0.0")]
3824impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3825    fn borrow(&self) -> &T {
3826        &**self
3827    }
3828}
3829
3830#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3831impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3832    fn as_ref(&self) -> &T {
3833        &**self
3834    }
3835}
3836
3837#[stable(feature = "pin", since = "1.33.0")]
3838impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3839
3840/// Gets the offset within an `RcInner` for the payload behind a pointer.
3841///
3842/// # Safety
3843///
3844/// The pointer must point to (and have valid metadata for) a previously
3845/// valid instance of T, but the T is allowed to be dropped.
3846unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3847    // Align the unsized value to the end of the RcInner.
3848    // Because RcInner is repr(C), it will always be the last field in memory.
3849    // SAFETY: since the only unsized types possible are slices, trait objects,
3850    // and extern types, the input safety requirement is currently enough to
3851    // satisfy the requirements of Alignment::of_val_raw; this is an implementation
3852    // detail of the language that must not be relied upon outside of std.
3853    unsafe { data_offset_alignment(Alignment::of_val_raw(ptr)) }
3854}
3855
3856#[inline]
3857fn data_offset_alignment(alignment: Alignment) -> usize {
3858    let layout = Layout::new::<RcInner<()>>();
3859    layout.size() + layout.padding_needed_for(alignment)
3860}
3861
3862/// A uniquely owned [`Rc`].
3863///
3864/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3865/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3866/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3867///
3868/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3869/// use case is to have an object be mutable during its initialization phase but then have it become
3870/// immutable and converted to a normal `Rc`.
3871///
3872/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3873///
3874/// ```
3875/// #![feature(unique_rc_arc)]
3876/// use std::rc::{Rc, Weak, UniqueRc};
3877///
3878/// struct Gadget {
3879///     #[allow(dead_code)]
3880///     me: Weak<Gadget>,
3881/// }
3882///
3883/// fn create_gadget() -> Option<Rc<Gadget>> {
3884///     let mut rc = UniqueRc::new(Gadget {
3885///         me: Weak::new(),
3886///     });
3887///     rc.me = UniqueRc::downgrade(&rc);
3888///     Some(UniqueRc::into_rc(rc))
3889/// }
3890///
3891/// create_gadget().unwrap();
3892/// ```
3893///
3894/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3895/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3896/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3897/// including fallible or async constructors.
3898#[unstable(feature = "unique_rc_arc", issue = "112566")]
3899pub struct UniqueRc<
3900    T: ?Sized,
3901    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3902> {
3903    ptr: NonNull<RcInner<T>>,
3904    // Define the ownership of `RcInner<T>` for drop-check
3905    _marker: PhantomData<RcInner<T>>,
3906    // Invariance is necessary for soundness: once other `Weak`
3907    // references exist, we already have a form of shared mutability!
3908    _marker2: PhantomData<*mut T>,
3909    alloc: A,
3910}
3911
3912// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3913// but having an explicit negative impl is nice for documentation purposes
3914// and results in nicer error messages.
3915#[unstable(feature = "unique_rc_arc", issue = "112566")]
3916impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3917
3918// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3919// but having an explicit negative impl is nice for documentation purposes
3920// and results in nicer error messages.
3921#[unstable(feature = "unique_rc_arc", issue = "112566")]
3922impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3923
3924#[unstable(feature = "unique_rc_arc", issue = "112566")]
3925impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3926    for UniqueRc<T, A>
3927{
3928}
3929
3930//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3931#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3932impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3933
3934#[unstable(feature = "unique_rc_arc", issue = "112566")]
3935impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3936    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3937        fmt::Display::fmt(&**self, f)
3938    }
3939}
3940
3941#[unstable(feature = "unique_rc_arc", issue = "112566")]
3942impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3943    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3944        fmt::Debug::fmt(&**self, f)
3945    }
3946}
3947
3948#[unstable(feature = "unique_rc_arc", issue = "112566")]
3949impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3950    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3951        fmt::Pointer::fmt(&(&raw const **self), f)
3952    }
3953}
3954
3955#[unstable(feature = "unique_rc_arc", issue = "112566")]
3956impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3957    fn borrow(&self) -> &T {
3958        &**self
3959    }
3960}
3961
3962#[unstable(feature = "unique_rc_arc", issue = "112566")]
3963impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3964    fn borrow_mut(&mut self) -> &mut T {
3965        &mut **self
3966    }
3967}
3968
3969#[unstable(feature = "unique_rc_arc", issue = "112566")]
3970impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3971    fn as_ref(&self) -> &T {
3972        &**self
3973    }
3974}
3975
3976#[unstable(feature = "unique_rc_arc", issue = "112566")]
3977impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3978    fn as_mut(&mut self) -> &mut T {
3979        &mut **self
3980    }
3981}
3982
3983#[unstable(feature = "unique_rc_arc", issue = "112566")]
3984impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
3985
3986#[cfg(not(no_global_oom_handling))]
3987#[unstable(feature = "unique_rc_arc", issue = "112566")]
3988impl<T> From<T> for UniqueRc<T> {
3989    #[inline(always)]
3990    fn from(value: T) -> Self {
3991        Self::new(value)
3992    }
3993}
3994
3995#[unstable(feature = "unique_rc_arc", issue = "112566")]
3996impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
3997    /// Equality for two `UniqueRc`s.
3998    ///
3999    /// Two `UniqueRc`s are equal if their inner values are equal.
4000    ///
4001    /// # Examples
4002    ///
4003    /// ```
4004    /// #![feature(unique_rc_arc)]
4005    /// use std::rc::UniqueRc;
4006    ///
4007    /// let five = UniqueRc::new(5);
4008    ///
4009    /// assert!(five == UniqueRc::new(5));
4010    /// ```
4011    #[inline]
4012    fn eq(&self, other: &Self) -> bool {
4013        PartialEq::eq(&**self, &**other)
4014    }
4015
4016    /// Inequality for two `UniqueRc`s.
4017    ///
4018    /// Two `UniqueRc`s are not equal if their inner values are not equal.
4019    ///
4020    /// # Examples
4021    ///
4022    /// ```
4023    /// #![feature(unique_rc_arc)]
4024    /// use std::rc::UniqueRc;
4025    ///
4026    /// let five = UniqueRc::new(5);
4027    ///
4028    /// assert!(five != UniqueRc::new(6));
4029    /// ```
4030    #[inline]
4031    fn ne(&self, other: &Self) -> bool {
4032        PartialEq::ne(&**self, &**other)
4033    }
4034}
4035
4036#[unstable(feature = "unique_rc_arc", issue = "112566")]
4037impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
4038    /// Partial comparison for two `UniqueRc`s.
4039    ///
4040    /// The two are compared by calling `partial_cmp()` on their inner values.
4041    ///
4042    /// # Examples
4043    ///
4044    /// ```
4045    /// #![feature(unique_rc_arc)]
4046    /// use std::rc::UniqueRc;
4047    /// use std::cmp::Ordering;
4048    ///
4049    /// let five = UniqueRc::new(5);
4050    ///
4051    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
4052    /// ```
4053    #[inline(always)]
4054    fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
4055        (**self).partial_cmp(&**other)
4056    }
4057
4058    /// Less-than comparison for two `UniqueRc`s.
4059    ///
4060    /// The two are compared by calling `<` on their inner values.
4061    ///
4062    /// # Examples
4063    ///
4064    /// ```
4065    /// #![feature(unique_rc_arc)]
4066    /// use std::rc::UniqueRc;
4067    ///
4068    /// let five = UniqueRc::new(5);
4069    ///
4070    /// assert!(five < UniqueRc::new(6));
4071    /// ```
4072    #[inline(always)]
4073    fn lt(&self, other: &UniqueRc<T, A>) -> bool {
4074        **self < **other
4075    }
4076
4077    /// 'Less than or equal to' comparison for two `UniqueRc`s.
4078    ///
4079    /// The two are compared by calling `<=` on their inner values.
4080    ///
4081    /// # Examples
4082    ///
4083    /// ```
4084    /// #![feature(unique_rc_arc)]
4085    /// use std::rc::UniqueRc;
4086    ///
4087    /// let five = UniqueRc::new(5);
4088    ///
4089    /// assert!(five <= UniqueRc::new(5));
4090    /// ```
4091    #[inline(always)]
4092    fn le(&self, other: &UniqueRc<T, A>) -> bool {
4093        **self <= **other
4094    }
4095
4096    /// Greater-than comparison for two `UniqueRc`s.
4097    ///
4098    /// The two are compared by calling `>` on their inner values.
4099    ///
4100    /// # Examples
4101    ///
4102    /// ```
4103    /// #![feature(unique_rc_arc)]
4104    /// use std::rc::UniqueRc;
4105    ///
4106    /// let five = UniqueRc::new(5);
4107    ///
4108    /// assert!(five > UniqueRc::new(4));
4109    /// ```
4110    #[inline(always)]
4111    fn gt(&self, other: &UniqueRc<T, A>) -> bool {
4112        **self > **other
4113    }
4114
4115    /// 'Greater than or equal to' comparison for two `UniqueRc`s.
4116    ///
4117    /// The two are compared by calling `>=` on their inner values.
4118    ///
4119    /// # Examples
4120    ///
4121    /// ```
4122    /// #![feature(unique_rc_arc)]
4123    /// use std::rc::UniqueRc;
4124    ///
4125    /// let five = UniqueRc::new(5);
4126    ///
4127    /// assert!(five >= UniqueRc::new(5));
4128    /// ```
4129    #[inline(always)]
4130    fn ge(&self, other: &UniqueRc<T, A>) -> bool {
4131        **self >= **other
4132    }
4133}
4134
4135#[unstable(feature = "unique_rc_arc", issue = "112566")]
4136impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
4137    /// Comparison for two `UniqueRc`s.
4138    ///
4139    /// The two are compared by calling `cmp()` on their inner values.
4140    ///
4141    /// # Examples
4142    ///
4143    /// ```
4144    /// #![feature(unique_rc_arc)]
4145    /// use std::rc::UniqueRc;
4146    /// use std::cmp::Ordering;
4147    ///
4148    /// let five = UniqueRc::new(5);
4149    ///
4150    /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
4151    /// ```
4152    #[inline]
4153    fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
4154        (**self).cmp(&**other)
4155    }
4156}
4157
4158#[unstable(feature = "unique_rc_arc", issue = "112566")]
4159impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
4160
4161#[unstable(feature = "unique_rc_arc", issue = "112566")]
4162impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
4163    fn hash<H: Hasher>(&self, state: &mut H) {
4164        (**self).hash(state);
4165    }
4166}
4167
4168// Depends on A = Global
4169impl<T> UniqueRc<T> {
4170    /// Creates a new `UniqueRc`.
4171    ///
4172    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4173    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4174    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4175    /// point to the new [`Rc`].
4176    #[cfg(not(no_global_oom_handling))]
4177    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4178    pub fn new(value: T) -> Self {
4179        Self::new_in(value, Global)
4180    }
4181
4182    /// Maps the value in a `UniqueRc`, reusing the allocation if possible.
4183    ///
4184    /// `f` is called on a reference to the value in the `UniqueRc`, and the result is returned,
4185    /// also in a `UniqueRc`.
4186    ///
4187    /// Note: this is an associated function, which means that you have
4188    /// to call it as `UniqueRc::map(u, f)` instead of `u.map(f)`. This
4189    /// is so that there is no conflict with a method on the inner type.
4190    ///
4191    /// # Examples
4192    ///
4193    /// ```
4194    /// #![feature(smart_pointer_try_map)]
4195    /// #![feature(unique_rc_arc)]
4196    ///
4197    /// use std::rc::UniqueRc;
4198    ///
4199    /// let r = UniqueRc::new(7);
4200    /// let new = UniqueRc::map(r, |i| i + 7);
4201    /// assert_eq!(*new, 14);
4202    /// ```
4203    #[cfg(not(no_global_oom_handling))]
4204    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4205    pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> UniqueRc<U> {
4206        if size_of::<T>() == size_of::<U>()
4207            && align_of::<T>() == align_of::<U>()
4208            && UniqueRc::weak_count(&this) == 0
4209        {
4210            unsafe {
4211                let ptr = UniqueRc::into_raw(this);
4212                let value = ptr.read();
4213                let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
4214
4215                allocation.write(f(value));
4216                allocation.assume_init()
4217            }
4218        } else {
4219            UniqueRc::new(f(UniqueRc::unwrap(this)))
4220        }
4221    }
4222
4223    /// Attempts to map the value in a `UniqueRc`, reusing the allocation if possible.
4224    ///
4225    /// `f` is called on a reference to the value in the `UniqueRc`, and if the operation succeeds,
4226    /// the result is returned, also in a `UniqueRc`.
4227    ///
4228    /// Note: this is an associated function, which means that you have
4229    /// to call it as `UniqueRc::try_map(u, f)` instead of `u.try_map(f)`. This
4230    /// is so that there is no conflict with a method on the inner type.
4231    ///
4232    /// # Examples
4233    ///
4234    /// ```
4235    /// #![feature(smart_pointer_try_map)]
4236    /// #![feature(unique_rc_arc)]
4237    ///
4238    /// use std::rc::UniqueRc;
4239    ///
4240    /// let b = UniqueRc::new(7);
4241    /// let new = UniqueRc::try_map(b, u32::try_from).unwrap();
4242    /// assert_eq!(*new, 7);
4243    /// ```
4244    #[cfg(not(no_global_oom_handling))]
4245    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4246    pub fn try_map<R>(
4247        this: Self,
4248        f: impl FnOnce(T) -> R,
4249    ) -> <R::Residual as Residual<UniqueRc<R::Output>>>::TryType
4250    where
4251        R: Try,
4252        R::Residual: Residual<UniqueRc<R::Output>>,
4253    {
4254        if size_of::<T>() == size_of::<R::Output>()
4255            && align_of::<T>() == align_of::<R::Output>()
4256            && UniqueRc::weak_count(&this) == 0
4257        {
4258            unsafe {
4259                let ptr = UniqueRc::into_raw(this);
4260                let value = ptr.read();
4261                let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
4262
4263                allocation.write(f(value)?);
4264                try { allocation.assume_init() }
4265            }
4266        } else {
4267            try { UniqueRc::new(f(UniqueRc::unwrap(this))?) }
4268        }
4269    }
4270
4271    #[cfg(not(no_global_oom_handling))]
4272    fn unwrap(this: Self) -> T {
4273        let this = ManuallyDrop::new(this);
4274        let val: T = unsafe { ptr::read(&**this) };
4275
4276        let _weak = Weak { ptr: this.ptr, alloc: Global };
4277
4278        val
4279    }
4280}
4281
4282impl<T: ?Sized> UniqueRc<T> {
4283    #[cfg(not(no_global_oom_handling))]
4284    unsafe fn from_raw(ptr: *const T) -> Self {
4285        let offset = unsafe { data_offset(ptr) };
4286
4287        // Reverse the offset to find the original RcInner.
4288        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
4289
4290        Self {
4291            ptr: unsafe { NonNull::new_unchecked(rc_ptr) },
4292            _marker: PhantomData,
4293            _marker2: PhantomData,
4294            alloc: Global,
4295        }
4296    }
4297
4298    #[cfg(not(no_global_oom_handling))]
4299    fn into_raw(this: Self) -> *const T {
4300        let this = ManuallyDrop::new(this);
4301        Self::as_ptr(&*this)
4302    }
4303}
4304
4305impl<T, A: Allocator> UniqueRc<T, A> {
4306    /// Creates a new `UniqueRc` in the provided allocator.
4307    ///
4308    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4309    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4310    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4311    /// point to the new [`Rc`].
4312    #[cfg(not(no_global_oom_handling))]
4313    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4314    pub fn new_in(value: T, alloc: A) -> Self {
4315        let (ptr, alloc) = Box::into_unique(Box::new_in(
4316            RcInner {
4317                strong: Cell::new(0),
4318                // keep one weak reference so if all the weak pointers that are created are dropped
4319                // the UniqueRc still stays valid.
4320                weak: Cell::new(1),
4321                value,
4322            },
4323            alloc,
4324        ));
4325        Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4326    }
4327}
4328
4329impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4330    /// Converts the `UniqueRc` into a regular [`Rc`].
4331    ///
4332    /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4333    /// is passed to `into_rc`.
4334    ///
4335    /// Any weak references created before this method is called can now be upgraded to strong
4336    /// references.
4337    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4338    pub fn into_rc(this: Self) -> Rc<T, A> {
4339        let mut this = ManuallyDrop::new(this);
4340
4341        // Move the allocator out.
4342        // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4343        // a `ManuallyDrop`.
4344        let alloc: A = unsafe { ptr::read(&this.alloc) };
4345
4346        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4347        unsafe {
4348            // Convert our weak reference into a strong reference
4349            this.ptr.as_mut().strong.set(1);
4350            Rc::from_inner_in(this.ptr, alloc)
4351        }
4352    }
4353
4354    #[cfg(not(no_global_oom_handling))]
4355    fn weak_count(this: &Self) -> usize {
4356        this.inner().weak() - 1
4357    }
4358
4359    #[cfg(not(no_global_oom_handling))]
4360    fn inner(&self) -> &RcInner<T> {
4361        // SAFETY: while this UniqueRc is alive we're guaranteed that the inner pointer is valid.
4362        unsafe { self.ptr.as_ref() }
4363    }
4364
4365    #[cfg(not(no_global_oom_handling))]
4366    fn as_ptr(this: &Self) -> *const T {
4367        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
4368
4369        // SAFETY: This cannot go through Deref::deref or UniqueRc::inner because
4370        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
4371        // write through the pointer after the Rc is recovered through `from_raw`.
4372        unsafe { &raw mut (*ptr).value }
4373    }
4374
4375    #[inline]
4376    #[cfg(not(no_global_oom_handling))]
4377    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
4378        let this = mem::ManuallyDrop::new(this);
4379        (this.ptr, unsafe { ptr::read(&this.alloc) })
4380    }
4381
4382    #[inline]
4383    #[cfg(not(no_global_oom_handling))]
4384    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
4385        Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc }
4386    }
4387}
4388
4389impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4390    /// Creates a new weak reference to the `UniqueRc`.
4391    ///
4392    /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4393    /// to a [`Rc`] using [`UniqueRc::into_rc`].
4394    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4395    pub fn downgrade(this: &Self) -> Weak<T, A> {
4396        // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4397        // one strong reference before converting to a regular Rc.
4398        unsafe {
4399            this.ptr.as_ref().inc_weak();
4400        }
4401        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4402    }
4403}
4404
4405#[cfg(not(no_global_oom_handling))]
4406impl<T, A: Allocator> UniqueRc<mem::MaybeUninit<T>, A> {
4407    unsafe fn assume_init(self) -> UniqueRc<T, A> {
4408        let (ptr, alloc) = UniqueRc::into_inner_with_allocator(self);
4409        unsafe { UniqueRc::from_inner_in(ptr.cast(), alloc) }
4410    }
4411}
4412
4413#[unstable(feature = "unique_rc_arc", issue = "112566")]
4414impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4415    type Target = T;
4416
4417    fn deref(&self) -> &T {
4418        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4419        unsafe { &self.ptr.as_ref().value }
4420    }
4421}
4422
4423#[unstable(feature = "unique_rc_arc", issue = "112566")]
4424impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4425    fn deref_mut(&mut self) -> &mut T {
4426        // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4427        // have unique ownership and therefore it's safe to make a mutable reference because
4428        // `UniqueRc` owns the only strong reference to itself.
4429        unsafe { &mut (*self.ptr.as_ptr()).value }
4430    }
4431}
4432
4433#[unstable(feature = "unique_rc_arc", issue = "112566")]
4434unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4435    fn drop(&mut self) {
4436        unsafe {
4437            // destroy the contained object
4438            drop_in_place(DerefMut::deref_mut(self));
4439
4440            // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4441            self.ptr.as_ref().dec_weak();
4442
4443            if self.ptr.as_ref().weak() == 0 {
4444                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4445            }
4446        }
4447    }
4448}
4449
4450/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4451/// but will deallocate it (without dropping the value) when dropped.
4452///
4453/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4454/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4455/// which `MaybeUninit` does not.
4456struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4457    ptr: NonNull<RcInner<T>>,
4458    layout_for_value: Layout,
4459    alloc: Option<A>,
4460}
4461
4462impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4463    /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4464    #[cfg(not(no_global_oom_handling))]
4465    fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4466        let layout = Layout::for_value(for_value);
4467        let ptr = unsafe {
4468            Rc::allocate_for_layout(
4469                layout,
4470                |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4471                |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4472            )
4473        };
4474        Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4475    }
4476
4477    /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it,
4478    /// returning an error if allocation fails.
4479    fn try_new(for_value: &T, alloc: A) -> Result<UniqueRcUninit<T, A>, AllocError> {
4480        let layout = Layout::for_value(for_value);
4481        let ptr = unsafe {
4482            Rc::try_allocate_for_layout(
4483                layout,
4484                |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4485                |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4486            )?
4487        };
4488        Ok(Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) })
4489    }
4490
4491    /// Returns the pointer to be written into to initialize the [`Rc`].
4492    fn data_ptr(&mut self) -> *mut T {
4493        let offset = data_offset_alignment(self.layout_for_value.alignment());
4494        unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4495    }
4496
4497    /// Upgrade this into a normal [`Rc`].
4498    ///
4499    /// # Safety
4500    ///
4501    /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4502    unsafe fn into_rc(self) -> Rc<T, A> {
4503        let mut this = ManuallyDrop::new(self);
4504        let ptr = this.ptr;
4505        let alloc = this.alloc.take().unwrap();
4506
4507        // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4508        // for having initialized the data.
4509        unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4510    }
4511}
4512
4513impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4514    fn drop(&mut self) {
4515        // SAFETY:
4516        // * new() produced a pointer safe to deallocate.
4517        // * We own the pointer unless into_rc() was called, which forgets us.
4518        unsafe {
4519            self.alloc.take().unwrap().deallocate(
4520                self.ptr.cast(),
4521                rc_inner_layout_for_value_layout(self.layout_for_value),
4522            );
4523        }
4524    }
4525}
4526
4527#[unstable(feature = "allocator_api", issue = "32838")]
4528unsafe impl<T: ?Sized + Allocator, A: Allocator> Allocator for Rc<T, A> {
4529    #[inline]
4530    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
4531        (**self).allocate(layout)
4532    }
4533
4534    #[inline]
4535    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
4536        (**self).allocate_zeroed(layout)
4537    }
4538
4539    #[inline]
4540    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
4541        // SAFETY: the safety contract must be upheld by the caller
4542        unsafe { (**self).deallocate(ptr, layout) }
4543    }
4544
4545    #[inline]
4546    unsafe fn grow(
4547        &self,
4548        ptr: NonNull<u8>,
4549        old_layout: Layout,
4550        new_layout: Layout,
4551    ) -> Result<NonNull<[u8]>, AllocError> {
4552        // SAFETY: the safety contract must be upheld by the caller
4553        unsafe { (**self).grow(ptr, old_layout, new_layout) }
4554    }
4555
4556    #[inline]
4557    unsafe fn grow_zeroed(
4558        &self,
4559        ptr: NonNull<u8>,
4560        old_layout: Layout,
4561        new_layout: Layout,
4562    ) -> Result<NonNull<[u8]>, AllocError> {
4563        // SAFETY: the safety contract must be upheld by the caller
4564        unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) }
4565    }
4566
4567    #[inline]
4568    unsafe fn shrink(
4569        &self,
4570        ptr: NonNull<u8>,
4571        old_layout: Layout,
4572        new_layout: Layout,
4573    ) -> Result<NonNull<[u8]>, AllocError> {
4574        // SAFETY: the safety contract must be upheld by the caller
4575        unsafe { (**self).shrink(ptr, old_layout, new_layout) }
4576    }
4577}