alloc/rc.rs
1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//! name: String,
96//! // ...other fields
97//! }
98//!
99//! struct Gadget {
100//! id: i32,
101//! owner: Rc<Owner>,
102//! // ...other fields
103//! }
104//!
105//! fn main() {
106//! // Create a reference-counted `Owner`.
107//! let gadget_owner: Rc<Owner> = Rc::new(
108//! Owner {
109//! name: "Gadget Man".to_string(),
110//! }
111//! );
112//!
113//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//! // gives us a new pointer to the same `Owner` allocation, incrementing
115//! // the reference count in the process.
116//! let gadget1 = Gadget {
117//! id: 1,
118//! owner: Rc::clone(&gadget_owner),
119//! };
120//! let gadget2 = Gadget {
121//! id: 2,
122//! owner: Rc::clone(&gadget_owner),
123//! };
124//!
125//! // Dispose of our local variable `gadget_owner`.
126//! drop(gadget_owner);
127//!
128//! // Despite dropping `gadget_owner`, we're still able to print out the name
129//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//! // live. The field projection `gadget1.owner.name` works because
133//! // `Rc<Owner>` automatically dereferences to `Owner`.
134//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//! // with them the last counted references to our `Owner`. Gadget Man now
139//! // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//! name: String,
166//! gadgets: RefCell<Vec<Weak<Gadget>>>,
167//! // ...other fields
168//! }
169//!
170//! struct Gadget {
171//! id: i32,
172//! owner: Rc<Owner>,
173//! // ...other fields
174//! }
175//!
176//! fn main() {
177//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//! // a shared reference.
180//! let gadget_owner: Rc<Owner> = Rc::new(
181//! Owner {
182//! name: "Gadget Man".to_string(),
183//! gadgets: RefCell::new(vec![]),
184//! }
185//! );
186//!
187//! // Create `Gadget`s belonging to `gadget_owner`, as before.
188//! let gadget1 = Rc::new(
189//! Gadget {
190//! id: 1,
191//! owner: Rc::clone(&gadget_owner),
192//! }
193//! );
194//! let gadget2 = Rc::new(
195//! Gadget {
196//! id: 2,
197//! owner: Rc::clone(&gadget_owner),
198//! }
199//! );
200//!
201//! // Add the `Gadget`s to their `Owner`.
202//! {
203//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//! gadgets.push(Rc::downgrade(&gadget1));
205//! gadgets.push(Rc::downgrade(&gadget2));
206//!
207//! // `RefCell` dynamic borrow ends here.
208//! }
209//!
210//! // Iterate over our `Gadget`s, printing their details out.
211//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//! // guarantee the allocation still exists, we need to call
215//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//! //
217//! // In this case we know the allocation still exists, so we simply
218//! // `unwrap` the `Option`. In a more complicated program, you might
219//! // need graceful error handling for a `None` result.
220//!
221//! let gadget = gadget_weak.upgrade().unwrap();
222//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//! }
224//!
225//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//! // are destroyed. There are now no strong (`Rc`) pointers to the
227//! // gadgets, so they are destroyed. This zeroes the reference count on
228//! // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::Cell;
246#[cfg(not(no_global_oom_handling))]
247use core::clone::CloneToUninit;
248use core::cmp::Ordering;
249use core::hash::{Hash, Hasher};
250use core::intrinsics::abort;
251#[cfg(not(no_global_oom_handling))]
252use core::iter;
253use core::marker::{PhantomData, Unsize};
254use core::mem::{self, ManuallyDrop, align_of_val_raw};
255use core::num::NonZeroUsize;
256use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
257use core::panic::{RefUnwindSafe, UnwindSafe};
258#[cfg(not(no_global_oom_handling))]
259use core::pin::Pin;
260use core::pin::PinCoerceUnsized;
261use core::ptr::{self, NonNull, drop_in_place};
262#[cfg(not(no_global_oom_handling))]
263use core::slice::from_raw_parts_mut;
264use core::{borrow, fmt, hint};
265#[cfg(test)]
266use std::boxed::Box;
267
268#[cfg(not(no_global_oom_handling))]
269use crate::alloc::handle_alloc_error;
270use crate::alloc::{AllocError, Allocator, Global, Layout};
271use crate::borrow::{Cow, ToOwned};
272#[cfg(not(test))]
273use crate::boxed::Box;
274#[cfg(not(no_global_oom_handling))]
275use crate::string::String;
276#[cfg(not(no_global_oom_handling))]
277use crate::vec::Vec;
278
279#[cfg(test)]
280mod tests;
281
282// This is repr(C) to future-proof against possible field-reordering, which
283// would interfere with otherwise safe [into|from]_raw() of transmutable
284// inner types.
285#[repr(C)]
286struct RcInner<T: ?Sized> {
287 strong: Cell<usize>,
288 weak: Cell<usize>,
289 value: T,
290}
291
292/// Calculate layout for `RcInner<T>` using the inner value's layout
293fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
294 // Calculate layout using the given value layout.
295 // Previously, layout was calculated on the expression
296 // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
297 // reference (see #54908).
298 Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
299}
300
301/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
302/// Counted'.
303///
304/// See the [module-level documentation](./index.html) for more details.
305///
306/// The inherent methods of `Rc` are all associated functions, which means
307/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
308/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
309///
310/// [get_mut]: Rc::get_mut
311#[doc(search_unbox)]
312#[cfg_attr(not(test), rustc_diagnostic_item = "Rc")]
313#[stable(feature = "rust1", since = "1.0.0")]
314#[rustc_insignificant_dtor]
315pub struct Rc<
316 T: ?Sized,
317 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
318> {
319 ptr: NonNull<RcInner<T>>,
320 phantom: PhantomData<RcInner<T>>,
321 alloc: A,
322}
323
324#[stable(feature = "rust1", since = "1.0.0")]
325impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
326
327// Note that this negative impl isn't strictly necessary for correctness,
328// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
329// However, given how important `Rc`'s `!Sync`-ness is,
330// having an explicit negative impl is nice for documentation purposes
331// and results in nicer error messages.
332#[stable(feature = "rust1", since = "1.0.0")]
333impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
334
335#[stable(feature = "catch_unwind", since = "1.9.0")]
336impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
337#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
338impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
339
340#[unstable(feature = "coerce_unsized", issue = "18598")]
341impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
342
343#[unstable(feature = "dispatch_from_dyn", issue = "none")]
344impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
345
346impl<T: ?Sized> Rc<T> {
347 #[inline]
348 unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
349 unsafe { Self::from_inner_in(ptr, Global) }
350 }
351
352 #[inline]
353 unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
354 unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
355 }
356}
357
358impl<T: ?Sized, A: Allocator> Rc<T, A> {
359 #[inline(always)]
360 fn inner(&self) -> &RcInner<T> {
361 // This unsafety is ok because while this Rc is alive we're guaranteed
362 // that the inner pointer is valid.
363 unsafe { self.ptr.as_ref() }
364 }
365
366 #[inline]
367 fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
368 let this = mem::ManuallyDrop::new(this);
369 (this.ptr, unsafe { ptr::read(&this.alloc) })
370 }
371
372 #[inline]
373 unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
374 Self { ptr, phantom: PhantomData, alloc }
375 }
376
377 #[inline]
378 unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
379 unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
380 }
381
382 // Non-inlined part of `drop`.
383 #[inline(never)]
384 unsafe fn drop_slow(&mut self) {
385 // Reconstruct the "strong weak" pointer and drop it when this
386 // variable goes out of scope. This ensures that the memory is
387 // deallocated even if the destructor of `T` panics.
388 let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
389
390 // Destroy the contained object.
391 // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
392 unsafe {
393 ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
394 }
395 }
396}
397
398impl<T> Rc<T> {
399 /// Constructs a new `Rc<T>`.
400 ///
401 /// # Examples
402 ///
403 /// ```
404 /// use std::rc::Rc;
405 ///
406 /// let five = Rc::new(5);
407 /// ```
408 #[cfg(not(no_global_oom_handling))]
409 #[stable(feature = "rust1", since = "1.0.0")]
410 pub fn new(value: T) -> Rc<T> {
411 // There is an implicit weak pointer owned by all the strong
412 // pointers, which ensures that the weak destructor never frees
413 // the allocation while the strong destructor is running, even
414 // if the weak pointer is stored inside the strong one.
415 unsafe {
416 Self::from_inner(
417 Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
418 .into(),
419 )
420 }
421 }
422
423 /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
424 /// to allow you to construct a `T` which holds a weak pointer to itself.
425 ///
426 /// Generally, a structure circularly referencing itself, either directly or
427 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
428 /// Using this function, you get access to the weak pointer during the
429 /// initialization of `T`, before the `Rc<T>` is created, such that you can
430 /// clone and store it inside the `T`.
431 ///
432 /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
433 /// then calls your closure, giving it a `Weak<T>` to this allocation,
434 /// and only afterwards completes the construction of the `Rc<T>` by placing
435 /// the `T` returned from your closure into the allocation.
436 ///
437 /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
438 /// returns, calling [`upgrade`] on the weak reference inside your closure will
439 /// fail and result in a `None` value.
440 ///
441 /// # Panics
442 ///
443 /// If `data_fn` panics, the panic is propagated to the caller, and the
444 /// temporary [`Weak<T>`] is dropped normally.
445 ///
446 /// # Examples
447 ///
448 /// ```
449 /// # #![allow(dead_code)]
450 /// use std::rc::{Rc, Weak};
451 ///
452 /// struct Gadget {
453 /// me: Weak<Gadget>,
454 /// }
455 ///
456 /// impl Gadget {
457 /// /// Constructs a reference counted Gadget.
458 /// fn new() -> Rc<Self> {
459 /// // `me` is a `Weak<Gadget>` pointing at the new allocation of the
460 /// // `Rc` we're constructing.
461 /// Rc::new_cyclic(|me| {
462 /// // Create the actual struct here.
463 /// Gadget { me: me.clone() }
464 /// })
465 /// }
466 ///
467 /// /// Returns a reference counted pointer to Self.
468 /// fn me(&self) -> Rc<Self> {
469 /// self.me.upgrade().unwrap()
470 /// }
471 /// }
472 /// ```
473 /// [`upgrade`]: Weak::upgrade
474 #[cfg(not(no_global_oom_handling))]
475 #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
476 pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
477 where
478 F: FnOnce(&Weak<T>) -> T,
479 {
480 Self::new_cyclic_in(data_fn, Global)
481 }
482
483 /// Constructs a new `Rc` with uninitialized contents.
484 ///
485 /// # Examples
486 ///
487 /// ```
488 /// #![feature(get_mut_unchecked)]
489 ///
490 /// use std::rc::Rc;
491 ///
492 /// let mut five = Rc::<u32>::new_uninit();
493 ///
494 /// // Deferred initialization:
495 /// Rc::get_mut(&mut five).unwrap().write(5);
496 ///
497 /// let five = unsafe { five.assume_init() };
498 ///
499 /// assert_eq!(*five, 5)
500 /// ```
501 #[cfg(not(no_global_oom_handling))]
502 #[stable(feature = "new_uninit", since = "1.82.0")]
503 #[must_use]
504 pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
505 unsafe {
506 Rc::from_ptr(Rc::allocate_for_layout(
507 Layout::new::<T>(),
508 |layout| Global.allocate(layout),
509 <*mut u8>::cast,
510 ))
511 }
512 }
513
514 /// Constructs a new `Rc` with uninitialized contents, with the memory
515 /// being filled with `0` bytes.
516 ///
517 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
518 /// incorrect usage of this method.
519 ///
520 /// # Examples
521 ///
522 /// ```
523 /// #![feature(new_zeroed_alloc)]
524 ///
525 /// use std::rc::Rc;
526 ///
527 /// let zero = Rc::<u32>::new_zeroed();
528 /// let zero = unsafe { zero.assume_init() };
529 ///
530 /// assert_eq!(*zero, 0)
531 /// ```
532 ///
533 /// [zeroed]: mem::MaybeUninit::zeroed
534 #[cfg(not(no_global_oom_handling))]
535 #[unstable(feature = "new_zeroed_alloc", issue = "129396")]
536 #[must_use]
537 pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
538 unsafe {
539 Rc::from_ptr(Rc::allocate_for_layout(
540 Layout::new::<T>(),
541 |layout| Global.allocate_zeroed(layout),
542 <*mut u8>::cast,
543 ))
544 }
545 }
546
547 /// Constructs a new `Rc<T>`, returning an error if the allocation fails
548 ///
549 /// # Examples
550 ///
551 /// ```
552 /// #![feature(allocator_api)]
553 /// use std::rc::Rc;
554 ///
555 /// let five = Rc::try_new(5);
556 /// # Ok::<(), std::alloc::AllocError>(())
557 /// ```
558 #[unstable(feature = "allocator_api", issue = "32838")]
559 pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
560 // There is an implicit weak pointer owned by all the strong
561 // pointers, which ensures that the weak destructor never frees
562 // the allocation while the strong destructor is running, even
563 // if the weak pointer is stored inside the strong one.
564 unsafe {
565 Ok(Self::from_inner(
566 Box::leak(Box::try_new(RcInner {
567 strong: Cell::new(1),
568 weak: Cell::new(1),
569 value,
570 })?)
571 .into(),
572 ))
573 }
574 }
575
576 /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
577 ///
578 /// # Examples
579 ///
580 /// ```
581 /// #![feature(allocator_api)]
582 /// #![feature(get_mut_unchecked)]
583 ///
584 /// use std::rc::Rc;
585 ///
586 /// let mut five = Rc::<u32>::try_new_uninit()?;
587 ///
588 /// // Deferred initialization:
589 /// Rc::get_mut(&mut five).unwrap().write(5);
590 ///
591 /// let five = unsafe { five.assume_init() };
592 ///
593 /// assert_eq!(*five, 5);
594 /// # Ok::<(), std::alloc::AllocError>(())
595 /// ```
596 #[unstable(feature = "allocator_api", issue = "32838")]
597 // #[unstable(feature = "new_uninit", issue = "63291")]
598 pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
599 unsafe {
600 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
601 Layout::new::<T>(),
602 |layout| Global.allocate(layout),
603 <*mut u8>::cast,
604 )?))
605 }
606 }
607
608 /// Constructs a new `Rc` with uninitialized contents, with the memory
609 /// being filled with `0` bytes, returning an error if the allocation fails
610 ///
611 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
612 /// incorrect usage of this method.
613 ///
614 /// # Examples
615 ///
616 /// ```
617 /// #![feature(allocator_api)]
618 ///
619 /// use std::rc::Rc;
620 ///
621 /// let zero = Rc::<u32>::try_new_zeroed()?;
622 /// let zero = unsafe { zero.assume_init() };
623 ///
624 /// assert_eq!(*zero, 0);
625 /// # Ok::<(), std::alloc::AllocError>(())
626 /// ```
627 ///
628 /// [zeroed]: mem::MaybeUninit::zeroed
629 #[unstable(feature = "allocator_api", issue = "32838")]
630 //#[unstable(feature = "new_uninit", issue = "63291")]
631 pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
632 unsafe {
633 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
634 Layout::new::<T>(),
635 |layout| Global.allocate_zeroed(layout),
636 <*mut u8>::cast,
637 )?))
638 }
639 }
640 /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
641 /// `value` will be pinned in memory and unable to be moved.
642 #[cfg(not(no_global_oom_handling))]
643 #[stable(feature = "pin", since = "1.33.0")]
644 #[must_use]
645 pub fn pin(value: T) -> Pin<Rc<T>> {
646 unsafe { Pin::new_unchecked(Rc::new(value)) }
647 }
648}
649
650impl<T, A: Allocator> Rc<T, A> {
651 /// Constructs a new `Rc` in the provided allocator.
652 ///
653 /// # Examples
654 ///
655 /// ```
656 /// #![feature(allocator_api)]
657 /// use std::rc::Rc;
658 /// use std::alloc::System;
659 ///
660 /// let five = Rc::new_in(5, System);
661 /// ```
662 #[cfg(not(no_global_oom_handling))]
663 #[unstable(feature = "allocator_api", issue = "32838")]
664 #[inline]
665 pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
666 // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
667 // That would make code size bigger.
668 match Self::try_new_in(value, alloc) {
669 Ok(m) => m,
670 Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
671 }
672 }
673
674 /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
675 ///
676 /// # Examples
677 ///
678 /// ```
679 /// #![feature(get_mut_unchecked)]
680 /// #![feature(allocator_api)]
681 ///
682 /// use std::rc::Rc;
683 /// use std::alloc::System;
684 ///
685 /// let mut five = Rc::<u32, _>::new_uninit_in(System);
686 ///
687 /// let five = unsafe {
688 /// // Deferred initialization:
689 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
690 ///
691 /// five.assume_init()
692 /// };
693 ///
694 /// assert_eq!(*five, 5)
695 /// ```
696 #[cfg(not(no_global_oom_handling))]
697 #[unstable(feature = "allocator_api", issue = "32838")]
698 // #[unstable(feature = "new_uninit", issue = "63291")]
699 #[inline]
700 pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
701 unsafe {
702 Rc::from_ptr_in(
703 Rc::allocate_for_layout(
704 Layout::new::<T>(),
705 |layout| alloc.allocate(layout),
706 <*mut u8>::cast,
707 ),
708 alloc,
709 )
710 }
711 }
712
713 /// Constructs a new `Rc` with uninitialized contents, with the memory
714 /// being filled with `0` bytes, in the provided allocator.
715 ///
716 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
717 /// incorrect usage of this method.
718 ///
719 /// # Examples
720 ///
721 /// ```
722 /// #![feature(allocator_api)]
723 ///
724 /// use std::rc::Rc;
725 /// use std::alloc::System;
726 ///
727 /// let zero = Rc::<u32, _>::new_zeroed_in(System);
728 /// let zero = unsafe { zero.assume_init() };
729 ///
730 /// assert_eq!(*zero, 0)
731 /// ```
732 ///
733 /// [zeroed]: mem::MaybeUninit::zeroed
734 #[cfg(not(no_global_oom_handling))]
735 #[unstable(feature = "allocator_api", issue = "32838")]
736 // #[unstable(feature = "new_uninit", issue = "63291")]
737 #[inline]
738 pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
739 unsafe {
740 Rc::from_ptr_in(
741 Rc::allocate_for_layout(
742 Layout::new::<T>(),
743 |layout| alloc.allocate_zeroed(layout),
744 <*mut u8>::cast,
745 ),
746 alloc,
747 )
748 }
749 }
750
751 /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
752 /// to allow you to construct a `T` which holds a weak pointer to itself.
753 ///
754 /// Generally, a structure circularly referencing itself, either directly or
755 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
756 /// Using this function, you get access to the weak pointer during the
757 /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
758 /// clone and store it inside the `T`.
759 ///
760 /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
761 /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
762 /// and only afterwards completes the construction of the `Rc<T, A>` by placing
763 /// the `T` returned from your closure into the allocation.
764 ///
765 /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
766 /// returns, calling [`upgrade`] on the weak reference inside your closure will
767 /// fail and result in a `None` value.
768 ///
769 /// # Panics
770 ///
771 /// If `data_fn` panics, the panic is propagated to the caller, and the
772 /// temporary [`Weak<T, A>`] is dropped normally.
773 ///
774 /// # Examples
775 ///
776 /// See [`new_cyclic`].
777 ///
778 /// [`new_cyclic`]: Rc::new_cyclic
779 /// [`upgrade`]: Weak::upgrade
780 #[cfg(not(no_global_oom_handling))]
781 #[unstable(feature = "allocator_api", issue = "32838")]
782 pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
783 where
784 F: FnOnce(&Weak<T, A>) -> T,
785 {
786 // Construct the inner in the "uninitialized" state with a single
787 // weak reference.
788 let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
789 RcInner {
790 strong: Cell::new(0),
791 weak: Cell::new(1),
792 value: mem::MaybeUninit::<T>::uninit(),
793 },
794 alloc,
795 ));
796 let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
797 let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
798
799 let weak = Weak { ptr: init_ptr, alloc };
800
801 // It's important we don't give up ownership of the weak pointer, or
802 // else the memory might be freed by the time `data_fn` returns. If
803 // we really wanted to pass ownership, we could create an additional
804 // weak pointer for ourselves, but this would result in additional
805 // updates to the weak reference count which might not be necessary
806 // otherwise.
807 let data = data_fn(&weak);
808
809 let strong = unsafe {
810 let inner = init_ptr.as_ptr();
811 ptr::write(&raw mut (*inner).value, data);
812
813 let prev_value = (*inner).strong.get();
814 debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
815 (*inner).strong.set(1);
816
817 // Strong references should collectively own a shared weak reference,
818 // so don't run the destructor for our old weak reference.
819 // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
820 // and forgetting the weak reference.
821 let alloc = weak.into_raw_with_allocator().1;
822
823 Rc::from_inner_in(init_ptr, alloc)
824 };
825
826 strong
827 }
828
829 /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
830 /// fails
831 ///
832 /// # Examples
833 ///
834 /// ```
835 /// #![feature(allocator_api)]
836 /// use std::rc::Rc;
837 /// use std::alloc::System;
838 ///
839 /// let five = Rc::try_new_in(5, System);
840 /// # Ok::<(), std::alloc::AllocError>(())
841 /// ```
842 #[unstable(feature = "allocator_api", issue = "32838")]
843 #[inline]
844 pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
845 // There is an implicit weak pointer owned by all the strong
846 // pointers, which ensures that the weak destructor never frees
847 // the allocation while the strong destructor is running, even
848 // if the weak pointer is stored inside the strong one.
849 let (ptr, alloc) = Box::into_unique(Box::try_new_in(
850 RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
851 alloc,
852 )?);
853 Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
854 }
855
856 /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
857 /// error if the allocation fails
858 ///
859 /// # Examples
860 ///
861 /// ```
862 /// #![feature(allocator_api)]
863 /// #![feature(get_mut_unchecked)]
864 ///
865 /// use std::rc::Rc;
866 /// use std::alloc::System;
867 ///
868 /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
869 ///
870 /// let five = unsafe {
871 /// // Deferred initialization:
872 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
873 ///
874 /// five.assume_init()
875 /// };
876 ///
877 /// assert_eq!(*five, 5);
878 /// # Ok::<(), std::alloc::AllocError>(())
879 /// ```
880 #[unstable(feature = "allocator_api", issue = "32838")]
881 // #[unstable(feature = "new_uninit", issue = "63291")]
882 #[inline]
883 pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
884 unsafe {
885 Ok(Rc::from_ptr_in(
886 Rc::try_allocate_for_layout(
887 Layout::new::<T>(),
888 |layout| alloc.allocate(layout),
889 <*mut u8>::cast,
890 )?,
891 alloc,
892 ))
893 }
894 }
895
896 /// Constructs a new `Rc` with uninitialized contents, with the memory
897 /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
898 /// fails
899 ///
900 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
901 /// incorrect usage of this method.
902 ///
903 /// # Examples
904 ///
905 /// ```
906 /// #![feature(allocator_api)]
907 ///
908 /// use std::rc::Rc;
909 /// use std::alloc::System;
910 ///
911 /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
912 /// let zero = unsafe { zero.assume_init() };
913 ///
914 /// assert_eq!(*zero, 0);
915 /// # Ok::<(), std::alloc::AllocError>(())
916 /// ```
917 ///
918 /// [zeroed]: mem::MaybeUninit::zeroed
919 #[unstable(feature = "allocator_api", issue = "32838")]
920 //#[unstable(feature = "new_uninit", issue = "63291")]
921 #[inline]
922 pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
923 unsafe {
924 Ok(Rc::from_ptr_in(
925 Rc::try_allocate_for_layout(
926 Layout::new::<T>(),
927 |layout| alloc.allocate_zeroed(layout),
928 <*mut u8>::cast,
929 )?,
930 alloc,
931 ))
932 }
933 }
934
935 /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
936 /// `value` will be pinned in memory and unable to be moved.
937 #[cfg(not(no_global_oom_handling))]
938 #[unstable(feature = "allocator_api", issue = "32838")]
939 #[inline]
940 pub fn pin_in(value: T, alloc: A) -> Pin<Self>
941 where
942 A: 'static,
943 {
944 unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
945 }
946
947 /// Returns the inner value, if the `Rc` has exactly one strong reference.
948 ///
949 /// Otherwise, an [`Err`] is returned with the same `Rc` that was
950 /// passed in.
951 ///
952 /// This will succeed even if there are outstanding weak references.
953 ///
954 /// # Examples
955 ///
956 /// ```
957 /// use std::rc::Rc;
958 ///
959 /// let x = Rc::new(3);
960 /// assert_eq!(Rc::try_unwrap(x), Ok(3));
961 ///
962 /// let x = Rc::new(4);
963 /// let _y = Rc::clone(&x);
964 /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
965 /// ```
966 #[inline]
967 #[stable(feature = "rc_unique", since = "1.4.0")]
968 pub fn try_unwrap(this: Self) -> Result<T, Self> {
969 if Rc::strong_count(&this) == 1 {
970 let this = ManuallyDrop::new(this);
971
972 let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
973 let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
974
975 // Indicate to Weaks that they can't be promoted by decrementing
976 // the strong count, and then remove the implicit "strong weak"
977 // pointer while also handling drop logic by just crafting a
978 // fake Weak.
979 this.inner().dec_strong();
980 let _weak = Weak { ptr: this.ptr, alloc };
981 Ok(val)
982 } else {
983 Err(this)
984 }
985 }
986
987 /// Returns the inner value, if the `Rc` has exactly one strong reference.
988 ///
989 /// Otherwise, [`None`] is returned and the `Rc` is dropped.
990 ///
991 /// This will succeed even if there are outstanding weak references.
992 ///
993 /// If `Rc::into_inner` is called on every clone of this `Rc`,
994 /// it is guaranteed that exactly one of the calls returns the inner value.
995 /// This means in particular that the inner value is not dropped.
996 ///
997 /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
998 /// And while they are meant for different use-cases, `Rc::into_inner(this)`
999 /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
1000 /// (Note that the same kind of equivalence does **not** hold true for
1001 /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
1002 ///
1003 /// # Examples
1004 ///
1005 /// ```
1006 /// use std::rc::Rc;
1007 ///
1008 /// let x = Rc::new(3);
1009 /// assert_eq!(Rc::into_inner(x), Some(3));
1010 ///
1011 /// let x = Rc::new(4);
1012 /// let y = Rc::clone(&x);
1013 ///
1014 /// assert_eq!(Rc::into_inner(y), None);
1015 /// assert_eq!(Rc::into_inner(x), Some(4));
1016 /// ```
1017 #[inline]
1018 #[stable(feature = "rc_into_inner", since = "1.70.0")]
1019 pub fn into_inner(this: Self) -> Option<T> {
1020 Rc::try_unwrap(this).ok()
1021 }
1022}
1023
1024impl<T> Rc<[T]> {
1025 /// Constructs a new reference-counted slice with uninitialized contents.
1026 ///
1027 /// # Examples
1028 ///
1029 /// ```
1030 /// #![feature(get_mut_unchecked)]
1031 ///
1032 /// use std::rc::Rc;
1033 ///
1034 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1035 ///
1036 /// // Deferred initialization:
1037 /// let data = Rc::get_mut(&mut values).unwrap();
1038 /// data[0].write(1);
1039 /// data[1].write(2);
1040 /// data[2].write(3);
1041 ///
1042 /// let values = unsafe { values.assume_init() };
1043 ///
1044 /// assert_eq!(*values, [1, 2, 3])
1045 /// ```
1046 #[cfg(not(no_global_oom_handling))]
1047 #[stable(feature = "new_uninit", since = "1.82.0")]
1048 #[must_use]
1049 pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1050 unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1051 }
1052
1053 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1054 /// filled with `0` bytes.
1055 ///
1056 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1057 /// incorrect usage of this method.
1058 ///
1059 /// # Examples
1060 ///
1061 /// ```
1062 /// #![feature(new_zeroed_alloc)]
1063 ///
1064 /// use std::rc::Rc;
1065 ///
1066 /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1067 /// let values = unsafe { values.assume_init() };
1068 ///
1069 /// assert_eq!(*values, [0, 0, 0])
1070 /// ```
1071 ///
1072 /// [zeroed]: mem::MaybeUninit::zeroed
1073 #[cfg(not(no_global_oom_handling))]
1074 #[unstable(feature = "new_zeroed_alloc", issue = "129396")]
1075 #[must_use]
1076 pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1077 unsafe {
1078 Rc::from_ptr(Rc::allocate_for_layout(
1079 Layout::array::<T>(len).unwrap(),
1080 |layout| Global.allocate_zeroed(layout),
1081 |mem| {
1082 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1083 as *mut RcInner<[mem::MaybeUninit<T>]>
1084 },
1085 ))
1086 }
1087 }
1088
1089 /// Converts the reference-counted slice into a reference-counted array.
1090 ///
1091 /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1092 ///
1093 /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1094 #[unstable(feature = "slice_as_array", issue = "133508")]
1095 #[inline]
1096 #[must_use]
1097 pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
1098 if self.len() == N {
1099 let ptr = Self::into_raw(self) as *const [T; N];
1100
1101 // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1102 let me = unsafe { Rc::from_raw(ptr) };
1103 Some(me)
1104 } else {
1105 None
1106 }
1107 }
1108}
1109
1110impl<T, A: Allocator> Rc<[T], A> {
1111 /// Constructs a new reference-counted slice with uninitialized contents.
1112 ///
1113 /// # Examples
1114 ///
1115 /// ```
1116 /// #![feature(get_mut_unchecked)]
1117 /// #![feature(allocator_api)]
1118 ///
1119 /// use std::rc::Rc;
1120 /// use std::alloc::System;
1121 ///
1122 /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1123 ///
1124 /// let values = unsafe {
1125 /// // Deferred initialization:
1126 /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1127 /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1128 /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1129 ///
1130 /// values.assume_init()
1131 /// };
1132 ///
1133 /// assert_eq!(*values, [1, 2, 3])
1134 /// ```
1135 #[cfg(not(no_global_oom_handling))]
1136 #[unstable(feature = "allocator_api", issue = "32838")]
1137 // #[unstable(feature = "new_uninit", issue = "63291")]
1138 #[inline]
1139 pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1140 unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1141 }
1142
1143 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1144 /// filled with `0` bytes.
1145 ///
1146 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1147 /// incorrect usage of this method.
1148 ///
1149 /// # Examples
1150 ///
1151 /// ```
1152 /// #![feature(allocator_api)]
1153 ///
1154 /// use std::rc::Rc;
1155 /// use std::alloc::System;
1156 ///
1157 /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1158 /// let values = unsafe { values.assume_init() };
1159 ///
1160 /// assert_eq!(*values, [0, 0, 0])
1161 /// ```
1162 ///
1163 /// [zeroed]: mem::MaybeUninit::zeroed
1164 #[cfg(not(no_global_oom_handling))]
1165 #[unstable(feature = "allocator_api", issue = "32838")]
1166 // #[unstable(feature = "new_uninit", issue = "63291")]
1167 #[inline]
1168 pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1169 unsafe {
1170 Rc::from_ptr_in(
1171 Rc::allocate_for_layout(
1172 Layout::array::<T>(len).unwrap(),
1173 |layout| alloc.allocate_zeroed(layout),
1174 |mem| {
1175 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1176 as *mut RcInner<[mem::MaybeUninit<T>]>
1177 },
1178 ),
1179 alloc,
1180 )
1181 }
1182 }
1183}
1184
1185impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1186 /// Converts to `Rc<T>`.
1187 ///
1188 /// # Safety
1189 ///
1190 /// As with [`MaybeUninit::assume_init`],
1191 /// it is up to the caller to guarantee that the inner value
1192 /// really is in an initialized state.
1193 /// Calling this when the content is not yet fully initialized
1194 /// causes immediate undefined behavior.
1195 ///
1196 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1197 ///
1198 /// # Examples
1199 ///
1200 /// ```
1201 /// #![feature(get_mut_unchecked)]
1202 ///
1203 /// use std::rc::Rc;
1204 ///
1205 /// let mut five = Rc::<u32>::new_uninit();
1206 ///
1207 /// // Deferred initialization:
1208 /// Rc::get_mut(&mut five).unwrap().write(5);
1209 ///
1210 /// let five = unsafe { five.assume_init() };
1211 ///
1212 /// assert_eq!(*five, 5)
1213 /// ```
1214 #[stable(feature = "new_uninit", since = "1.82.0")]
1215 #[inline]
1216 pub unsafe fn assume_init(self) -> Rc<T, A> {
1217 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1218 unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1219 }
1220}
1221
1222impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1223 /// Converts to `Rc<[T]>`.
1224 ///
1225 /// # Safety
1226 ///
1227 /// As with [`MaybeUninit::assume_init`],
1228 /// it is up to the caller to guarantee that the inner value
1229 /// really is in an initialized state.
1230 /// Calling this when the content is not yet fully initialized
1231 /// causes immediate undefined behavior.
1232 ///
1233 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1234 ///
1235 /// # Examples
1236 ///
1237 /// ```
1238 /// #![feature(get_mut_unchecked)]
1239 ///
1240 /// use std::rc::Rc;
1241 ///
1242 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1243 ///
1244 /// // Deferred initialization:
1245 /// let data = Rc::get_mut(&mut values).unwrap();
1246 /// data[0].write(1);
1247 /// data[1].write(2);
1248 /// data[2].write(3);
1249 ///
1250 /// let values = unsafe { values.assume_init() };
1251 ///
1252 /// assert_eq!(*values, [1, 2, 3])
1253 /// ```
1254 #[stable(feature = "new_uninit", since = "1.82.0")]
1255 #[inline]
1256 pub unsafe fn assume_init(self) -> Rc<[T], A> {
1257 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1258 unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1259 }
1260}
1261
1262impl<T: ?Sized> Rc<T> {
1263 /// Constructs an `Rc<T>` from a raw pointer.
1264 ///
1265 /// The raw pointer must have been previously returned by a call to
1266 /// [`Rc<U>::into_raw`][into_raw] with the following requirements:
1267 ///
1268 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1269 /// is trivially true if `U` is `T`.
1270 /// * If `U` is unsized, its data pointer must have the same size and
1271 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1272 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1273 /// coercion].
1274 ///
1275 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1276 /// and alignment, this is basically like transmuting references of
1277 /// different types. See [`mem::transmute`][transmute] for more information
1278 /// on what restrictions apply in this case.
1279 ///
1280 /// The raw pointer must point to a block of memory allocated by the global allocator
1281 ///
1282 /// The user of `from_raw` has to make sure a specific value of `T` is only
1283 /// dropped once.
1284 ///
1285 /// This function is unsafe because improper use may lead to memory unsafety,
1286 /// even if the returned `Rc<T>` is never accessed.
1287 ///
1288 /// [into_raw]: Rc::into_raw
1289 /// [transmute]: core::mem::transmute
1290 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1291 ///
1292 /// # Examples
1293 ///
1294 /// ```
1295 /// use std::rc::Rc;
1296 ///
1297 /// let x = Rc::new("hello".to_owned());
1298 /// let x_ptr = Rc::into_raw(x);
1299 ///
1300 /// unsafe {
1301 /// // Convert back to an `Rc` to prevent leak.
1302 /// let x = Rc::from_raw(x_ptr);
1303 /// assert_eq!(&*x, "hello");
1304 ///
1305 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1306 /// }
1307 ///
1308 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1309 /// ```
1310 ///
1311 /// Convert a slice back into its original array:
1312 ///
1313 /// ```
1314 /// use std::rc::Rc;
1315 ///
1316 /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1317 /// let x_ptr: *const [u32] = Rc::into_raw(x);
1318 ///
1319 /// unsafe {
1320 /// let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1321 /// assert_eq!(&*x, &[1, 2, 3]);
1322 /// }
1323 /// ```
1324 #[inline]
1325 #[stable(feature = "rc_raw", since = "1.17.0")]
1326 pub unsafe fn from_raw(ptr: *const T) -> Self {
1327 unsafe { Self::from_raw_in(ptr, Global) }
1328 }
1329
1330 /// Increments the strong reference count on the `Rc<T>` associated with the
1331 /// provided pointer by one.
1332 ///
1333 /// # Safety
1334 ///
1335 /// The pointer must have been obtained through `Rc::into_raw`, the
1336 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1337 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1338 /// allocated by the global allocator.
1339 ///
1340 /// # Examples
1341 ///
1342 /// ```
1343 /// use std::rc::Rc;
1344 ///
1345 /// let five = Rc::new(5);
1346 ///
1347 /// unsafe {
1348 /// let ptr = Rc::into_raw(five);
1349 /// Rc::increment_strong_count(ptr);
1350 ///
1351 /// let five = Rc::from_raw(ptr);
1352 /// assert_eq!(2, Rc::strong_count(&five));
1353 /// # // Prevent leaks for Miri.
1354 /// # Rc::decrement_strong_count(ptr);
1355 /// }
1356 /// ```
1357 #[inline]
1358 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1359 pub unsafe fn increment_strong_count(ptr: *const T) {
1360 unsafe { Self::increment_strong_count_in(ptr, Global) }
1361 }
1362
1363 /// Decrements the strong reference count on the `Rc<T>` associated with the
1364 /// provided pointer by one.
1365 ///
1366 /// # Safety
1367 ///
1368 /// The pointer must have been obtained through `Rc::into_raw`, the
1369 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1370 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1371 /// allocated by the global allocator. This method can be used to release the final `Rc` and
1372 /// backing storage, but **should not** be called after the final `Rc` has been released.
1373 ///
1374 /// # Examples
1375 ///
1376 /// ```
1377 /// use std::rc::Rc;
1378 ///
1379 /// let five = Rc::new(5);
1380 ///
1381 /// unsafe {
1382 /// let ptr = Rc::into_raw(five);
1383 /// Rc::increment_strong_count(ptr);
1384 ///
1385 /// let five = Rc::from_raw(ptr);
1386 /// assert_eq!(2, Rc::strong_count(&five));
1387 /// Rc::decrement_strong_count(ptr);
1388 /// assert_eq!(1, Rc::strong_count(&five));
1389 /// }
1390 /// ```
1391 #[inline]
1392 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1393 pub unsafe fn decrement_strong_count(ptr: *const T) {
1394 unsafe { Self::decrement_strong_count_in(ptr, Global) }
1395 }
1396}
1397
1398impl<T: ?Sized, A: Allocator> Rc<T, A> {
1399 /// Returns a reference to the underlying allocator.
1400 ///
1401 /// Note: this is an associated function, which means that you have
1402 /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1403 /// is so that there is no conflict with a method on the inner type.
1404 #[inline]
1405 #[unstable(feature = "allocator_api", issue = "32838")]
1406 pub fn allocator(this: &Self) -> &A {
1407 &this.alloc
1408 }
1409
1410 /// Consumes the `Rc`, returning the wrapped pointer.
1411 ///
1412 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1413 /// [`Rc::from_raw`].
1414 ///
1415 /// # Examples
1416 ///
1417 /// ```
1418 /// use std::rc::Rc;
1419 ///
1420 /// let x = Rc::new("hello".to_owned());
1421 /// let x_ptr = Rc::into_raw(x);
1422 /// assert_eq!(unsafe { &*x_ptr }, "hello");
1423 /// # // Prevent leaks for Miri.
1424 /// # drop(unsafe { Rc::from_raw(x_ptr) });
1425 /// ```
1426 #[must_use = "losing the pointer will leak memory"]
1427 #[stable(feature = "rc_raw", since = "1.17.0")]
1428 #[rustc_never_returns_null_ptr]
1429 pub fn into_raw(this: Self) -> *const T {
1430 let this = ManuallyDrop::new(this);
1431 Self::as_ptr(&*this)
1432 }
1433
1434 /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1435 ///
1436 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1437 /// [`Rc::from_raw_in`].
1438 ///
1439 /// # Examples
1440 ///
1441 /// ```
1442 /// #![feature(allocator_api)]
1443 /// use std::rc::Rc;
1444 /// use std::alloc::System;
1445 ///
1446 /// let x = Rc::new_in("hello".to_owned(), System);
1447 /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1448 /// assert_eq!(unsafe { &*ptr }, "hello");
1449 /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1450 /// assert_eq!(&*x, "hello");
1451 /// ```
1452 #[must_use = "losing the pointer will leak memory"]
1453 #[unstable(feature = "allocator_api", issue = "32838")]
1454 pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1455 let this = mem::ManuallyDrop::new(this);
1456 let ptr = Self::as_ptr(&this);
1457 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1458 let alloc = unsafe { ptr::read(&this.alloc) };
1459 (ptr, alloc)
1460 }
1461
1462 /// Provides a raw pointer to the data.
1463 ///
1464 /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1465 /// for as long as there are strong counts in the `Rc`.
1466 ///
1467 /// # Examples
1468 ///
1469 /// ```
1470 /// use std::rc::Rc;
1471 ///
1472 /// let x = Rc::new(0);
1473 /// let y = Rc::clone(&x);
1474 /// let x_ptr = Rc::as_ptr(&x);
1475 /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1476 /// assert_eq!(unsafe { *x_ptr }, 0);
1477 /// ```
1478 #[stable(feature = "weak_into_raw", since = "1.45.0")]
1479 #[rustc_never_returns_null_ptr]
1480 pub fn as_ptr(this: &Self) -> *const T {
1481 let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1482
1483 // SAFETY: This cannot go through Deref::deref or Rc::inner because
1484 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1485 // write through the pointer after the Rc is recovered through `from_raw`.
1486 unsafe { &raw mut (*ptr).value }
1487 }
1488
1489 /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1490 ///
1491 /// The raw pointer must have been previously returned by a call to [`Rc<U,
1492 /// A>::into_raw`][into_raw] with the following requirements:
1493 ///
1494 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1495 /// is trivially true if `U` is `T`.
1496 /// * If `U` is unsized, its data pointer must have the same size and
1497 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1498 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1499 /// coercion].
1500 ///
1501 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1502 /// and alignment, this is basically like transmuting references of
1503 /// different types. See [`mem::transmute`][transmute] for more information
1504 /// on what restrictions apply in this case.
1505 ///
1506 /// The raw pointer must point to a block of memory allocated by `alloc`
1507 ///
1508 /// The user of `from_raw` has to make sure a specific value of `T` is only
1509 /// dropped once.
1510 ///
1511 /// This function is unsafe because improper use may lead to memory unsafety,
1512 /// even if the returned `Rc<T>` is never accessed.
1513 ///
1514 /// [into_raw]: Rc::into_raw
1515 /// [transmute]: core::mem::transmute
1516 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1517 ///
1518 /// # Examples
1519 ///
1520 /// ```
1521 /// #![feature(allocator_api)]
1522 ///
1523 /// use std::rc::Rc;
1524 /// use std::alloc::System;
1525 ///
1526 /// let x = Rc::new_in("hello".to_owned(), System);
1527 /// let x_ptr = Rc::into_raw(x);
1528 ///
1529 /// unsafe {
1530 /// // Convert back to an `Rc` to prevent leak.
1531 /// let x = Rc::from_raw_in(x_ptr, System);
1532 /// assert_eq!(&*x, "hello");
1533 ///
1534 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1535 /// }
1536 ///
1537 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1538 /// ```
1539 ///
1540 /// Convert a slice back into its original array:
1541 ///
1542 /// ```
1543 /// #![feature(allocator_api)]
1544 ///
1545 /// use std::rc::Rc;
1546 /// use std::alloc::System;
1547 ///
1548 /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1549 /// let x_ptr: *const [u32] = Rc::into_raw(x);
1550 ///
1551 /// unsafe {
1552 /// let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1553 /// assert_eq!(&*x, &[1, 2, 3]);
1554 /// }
1555 /// ```
1556 #[unstable(feature = "allocator_api", issue = "32838")]
1557 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1558 let offset = unsafe { data_offset(ptr) };
1559
1560 // Reverse the offset to find the original RcInner.
1561 let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1562
1563 unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1564 }
1565
1566 /// Creates a new [`Weak`] pointer to this allocation.
1567 ///
1568 /// # Examples
1569 ///
1570 /// ```
1571 /// use std::rc::Rc;
1572 ///
1573 /// let five = Rc::new(5);
1574 ///
1575 /// let weak_five = Rc::downgrade(&five);
1576 /// ```
1577 #[must_use = "this returns a new `Weak` pointer, \
1578 without modifying the original `Rc`"]
1579 #[stable(feature = "rc_weak", since = "1.4.0")]
1580 pub fn downgrade(this: &Self) -> Weak<T, A>
1581 where
1582 A: Clone,
1583 {
1584 this.inner().inc_weak();
1585 // Make sure we do not create a dangling Weak
1586 debug_assert!(!is_dangling(this.ptr.as_ptr()));
1587 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1588 }
1589
1590 /// Gets the number of [`Weak`] pointers to this allocation.
1591 ///
1592 /// # Examples
1593 ///
1594 /// ```
1595 /// use std::rc::Rc;
1596 ///
1597 /// let five = Rc::new(5);
1598 /// let _weak_five = Rc::downgrade(&five);
1599 ///
1600 /// assert_eq!(1, Rc::weak_count(&five));
1601 /// ```
1602 #[inline]
1603 #[stable(feature = "rc_counts", since = "1.15.0")]
1604 pub fn weak_count(this: &Self) -> usize {
1605 this.inner().weak() - 1
1606 }
1607
1608 /// Gets the number of strong (`Rc`) pointers to this allocation.
1609 ///
1610 /// # Examples
1611 ///
1612 /// ```
1613 /// use std::rc::Rc;
1614 ///
1615 /// let five = Rc::new(5);
1616 /// let _also_five = Rc::clone(&five);
1617 ///
1618 /// assert_eq!(2, Rc::strong_count(&five));
1619 /// ```
1620 #[inline]
1621 #[stable(feature = "rc_counts", since = "1.15.0")]
1622 pub fn strong_count(this: &Self) -> usize {
1623 this.inner().strong()
1624 }
1625
1626 /// Increments the strong reference count on the `Rc<T>` associated with the
1627 /// provided pointer by one.
1628 ///
1629 /// # Safety
1630 ///
1631 /// The pointer must have been obtained through `Rc::into_raw`, the
1632 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1633 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1634 /// allocated by `alloc`
1635 ///
1636 /// # Examples
1637 ///
1638 /// ```
1639 /// #![feature(allocator_api)]
1640 ///
1641 /// use std::rc::Rc;
1642 /// use std::alloc::System;
1643 ///
1644 /// let five = Rc::new_in(5, System);
1645 ///
1646 /// unsafe {
1647 /// let ptr = Rc::into_raw(five);
1648 /// Rc::increment_strong_count_in(ptr, System);
1649 ///
1650 /// let five = Rc::from_raw_in(ptr, System);
1651 /// assert_eq!(2, Rc::strong_count(&five));
1652 /// # // Prevent leaks for Miri.
1653 /// # Rc::decrement_strong_count_in(ptr, System);
1654 /// }
1655 /// ```
1656 #[inline]
1657 #[unstable(feature = "allocator_api", issue = "32838")]
1658 pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1659 where
1660 A: Clone,
1661 {
1662 // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1663 let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1664 // Now increase refcount, but don't drop new refcount either
1665 let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1666 }
1667
1668 /// Decrements the strong reference count on the `Rc<T>` associated with the
1669 /// provided pointer by one.
1670 ///
1671 /// # Safety
1672 ///
1673 /// The pointer must have been obtained through `Rc::into_raw`, the
1674 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1675 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1676 /// allocated by `alloc`. This method can be used to release the final `Rc` and backing storage,
1677 /// but **should not** be called after the final `Rc` has been released.
1678 ///
1679 /// # Examples
1680 ///
1681 /// ```
1682 /// #![feature(allocator_api)]
1683 ///
1684 /// use std::rc::Rc;
1685 /// use std::alloc::System;
1686 ///
1687 /// let five = Rc::new_in(5, System);
1688 ///
1689 /// unsafe {
1690 /// let ptr = Rc::into_raw(five);
1691 /// Rc::increment_strong_count_in(ptr, System);
1692 ///
1693 /// let five = Rc::from_raw_in(ptr, System);
1694 /// assert_eq!(2, Rc::strong_count(&five));
1695 /// Rc::decrement_strong_count_in(ptr, System);
1696 /// assert_eq!(1, Rc::strong_count(&five));
1697 /// }
1698 /// ```
1699 #[inline]
1700 #[unstable(feature = "allocator_api", issue = "32838")]
1701 pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1702 unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1703 }
1704
1705 /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1706 /// this allocation.
1707 #[inline]
1708 fn is_unique(this: &Self) -> bool {
1709 Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1710 }
1711
1712 /// Returns a mutable reference into the given `Rc`, if there are
1713 /// no other `Rc` or [`Weak`] pointers to the same allocation.
1714 ///
1715 /// Returns [`None`] otherwise, because it is not safe to
1716 /// mutate a shared value.
1717 ///
1718 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1719 /// the inner value when there are other `Rc` pointers.
1720 ///
1721 /// [make_mut]: Rc::make_mut
1722 /// [clone]: Clone::clone
1723 ///
1724 /// # Examples
1725 ///
1726 /// ```
1727 /// use std::rc::Rc;
1728 ///
1729 /// let mut x = Rc::new(3);
1730 /// *Rc::get_mut(&mut x).unwrap() = 4;
1731 /// assert_eq!(*x, 4);
1732 ///
1733 /// let _y = Rc::clone(&x);
1734 /// assert!(Rc::get_mut(&mut x).is_none());
1735 /// ```
1736 #[inline]
1737 #[stable(feature = "rc_unique", since = "1.4.0")]
1738 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1739 if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1740 }
1741
1742 /// Returns a mutable reference into the given `Rc`,
1743 /// without any check.
1744 ///
1745 /// See also [`get_mut`], which is safe and does appropriate checks.
1746 ///
1747 /// [`get_mut`]: Rc::get_mut
1748 ///
1749 /// # Safety
1750 ///
1751 /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1752 /// they must not be dereferenced or have active borrows for the duration
1753 /// of the returned borrow, and their inner type must be exactly the same as the
1754 /// inner type of this Rc (including lifetimes). This is trivially the case if no
1755 /// such pointers exist, for example immediately after `Rc::new`.
1756 ///
1757 /// # Examples
1758 ///
1759 /// ```
1760 /// #![feature(get_mut_unchecked)]
1761 ///
1762 /// use std::rc::Rc;
1763 ///
1764 /// let mut x = Rc::new(String::new());
1765 /// unsafe {
1766 /// Rc::get_mut_unchecked(&mut x).push_str("foo")
1767 /// }
1768 /// assert_eq!(*x, "foo");
1769 /// ```
1770 /// Other `Rc` pointers to the same allocation must be to the same type.
1771 /// ```no_run
1772 /// #![feature(get_mut_unchecked)]
1773 ///
1774 /// use std::rc::Rc;
1775 ///
1776 /// let x: Rc<str> = Rc::from("Hello, world!");
1777 /// let mut y: Rc<[u8]> = x.clone().into();
1778 /// unsafe {
1779 /// // this is Undefined Behavior, because x's inner type is str, not [u8]
1780 /// Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1781 /// }
1782 /// println!("{}", &*x); // Invalid UTF-8 in a str
1783 /// ```
1784 /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1785 /// ```no_run
1786 /// #![feature(get_mut_unchecked)]
1787 ///
1788 /// use std::rc::Rc;
1789 ///
1790 /// let x: Rc<&str> = Rc::new("Hello, world!");
1791 /// {
1792 /// let s = String::from("Oh, no!");
1793 /// let mut y: Rc<&str> = x.clone();
1794 /// unsafe {
1795 /// // this is Undefined Behavior, because x's inner type
1796 /// // is &'long str, not &'short str
1797 /// *Rc::get_mut_unchecked(&mut y) = &s;
1798 /// }
1799 /// }
1800 /// println!("{}", &*x); // Use-after-free
1801 /// ```
1802 #[inline]
1803 #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1804 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1805 // We are careful to *not* create a reference covering the "count" fields, as
1806 // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1807 unsafe { &mut (*this.ptr.as_ptr()).value }
1808 }
1809
1810 #[inline]
1811 #[stable(feature = "ptr_eq", since = "1.17.0")]
1812 /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1813 /// [`ptr::eq`]. This function ignores the metadata of `dyn Trait` pointers.
1814 ///
1815 /// # Examples
1816 ///
1817 /// ```
1818 /// use std::rc::Rc;
1819 ///
1820 /// let five = Rc::new(5);
1821 /// let same_five = Rc::clone(&five);
1822 /// let other_five = Rc::new(5);
1823 ///
1824 /// assert!(Rc::ptr_eq(&five, &same_five));
1825 /// assert!(!Rc::ptr_eq(&five, &other_five));
1826 /// ```
1827 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
1828 ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
1829 }
1830}
1831
1832#[cfg(not(no_global_oom_handling))]
1833impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
1834 /// Makes a mutable reference into the given `Rc`.
1835 ///
1836 /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1837 /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
1838 /// referred to as clone-on-write.
1839 ///
1840 /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1841 /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1842 /// be cloned.
1843 ///
1844 /// See also [`get_mut`], which will fail rather than cloning the inner value
1845 /// or disassociating [`Weak`] pointers.
1846 ///
1847 /// [`clone`]: Clone::clone
1848 /// [`get_mut`]: Rc::get_mut
1849 ///
1850 /// # Examples
1851 ///
1852 /// ```
1853 /// use std::rc::Rc;
1854 ///
1855 /// let mut data = Rc::new(5);
1856 ///
1857 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1858 /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1859 /// *Rc::make_mut(&mut data) += 1; // Clones inner data
1860 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1861 /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
1862 ///
1863 /// // Now `data` and `other_data` point to different allocations.
1864 /// assert_eq!(*data, 8);
1865 /// assert_eq!(*other_data, 12);
1866 /// ```
1867 ///
1868 /// [`Weak`] pointers will be disassociated:
1869 ///
1870 /// ```
1871 /// use std::rc::Rc;
1872 ///
1873 /// let mut data = Rc::new(75);
1874 /// let weak = Rc::downgrade(&data);
1875 ///
1876 /// assert!(75 == *data);
1877 /// assert!(75 == *weak.upgrade().unwrap());
1878 ///
1879 /// *Rc::make_mut(&mut data) += 1;
1880 ///
1881 /// assert!(76 == *data);
1882 /// assert!(weak.upgrade().is_none());
1883 /// ```
1884 #[inline]
1885 #[stable(feature = "rc_unique", since = "1.4.0")]
1886 pub fn make_mut(this: &mut Self) -> &mut T {
1887 let size_of_val = size_of_val::<T>(&**this);
1888
1889 if Rc::strong_count(this) != 1 {
1890 // Gotta clone the data, there are other Rcs.
1891
1892 let this_data_ref: &T = &**this;
1893 // `in_progress` drops the allocation if we panic before finishing initializing it.
1894 let mut in_progress: UniqueRcUninit<T, A> =
1895 UniqueRcUninit::new(this_data_ref, this.alloc.clone());
1896
1897 // Initialize with clone of this.
1898 let initialized_clone = unsafe {
1899 // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1900 this_data_ref.clone_to_uninit(in_progress.data_ptr().cast());
1901 // Cast type of pointer, now that it is initialized.
1902 in_progress.into_rc()
1903 };
1904
1905 // Replace `this` with newly constructed Rc.
1906 *this = initialized_clone;
1907 } else if Rc::weak_count(this) != 0 {
1908 // Can just steal the data, all that's left is Weaks
1909
1910 // We don't need panic-protection like the above branch does, but we might as well
1911 // use the same mechanism.
1912 let mut in_progress: UniqueRcUninit<T, A> =
1913 UniqueRcUninit::new(&**this, this.alloc.clone());
1914 unsafe {
1915 // Initialize `in_progress` with move of **this.
1916 // We have to express this in terms of bytes because `T: ?Sized`; there is no
1917 // operation that just copies a value based on its `size_of_val()`.
1918 ptr::copy_nonoverlapping(
1919 ptr::from_ref(&**this).cast::<u8>(),
1920 in_progress.data_ptr().cast::<u8>(),
1921 size_of_val,
1922 );
1923
1924 this.inner().dec_strong();
1925 // Remove implicit strong-weak ref (no need to craft a fake
1926 // Weak here -- we know other Weaks can clean up for us)
1927 this.inner().dec_weak();
1928 // Replace `this` with newly constructed Rc that has the moved data.
1929 ptr::write(this, in_progress.into_rc());
1930 }
1931 }
1932 // This unsafety is ok because we're guaranteed that the pointer
1933 // returned is the *only* pointer that will ever be returned to T. Our
1934 // reference count is guaranteed to be 1 at this point, and we required
1935 // the `Rc<T>` itself to be `mut`, so we're returning the only possible
1936 // reference to the allocation.
1937 unsafe { &mut this.ptr.as_mut().value }
1938 }
1939}
1940
1941impl<T: Clone, A: Allocator> Rc<T, A> {
1942 /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
1943 /// clone.
1944 ///
1945 /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
1946 /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
1947 ///
1948 /// # Examples
1949 ///
1950 /// ```
1951 /// # use std::{ptr, rc::Rc};
1952 /// let inner = String::from("test");
1953 /// let ptr = inner.as_ptr();
1954 ///
1955 /// let rc = Rc::new(inner);
1956 /// let inner = Rc::unwrap_or_clone(rc);
1957 /// // The inner value was not cloned
1958 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1959 ///
1960 /// let rc = Rc::new(inner);
1961 /// let rc2 = rc.clone();
1962 /// let inner = Rc::unwrap_or_clone(rc);
1963 /// // Because there were 2 references, we had to clone the inner value.
1964 /// assert!(!ptr::eq(ptr, inner.as_ptr()));
1965 /// // `rc2` is the last reference, so when we unwrap it we get back
1966 /// // the original `String`.
1967 /// let inner = Rc::unwrap_or_clone(rc2);
1968 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1969 /// ```
1970 #[inline]
1971 #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
1972 pub fn unwrap_or_clone(this: Self) -> T {
1973 Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
1974 }
1975}
1976
1977impl<A: Allocator> Rc<dyn Any, A> {
1978 /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
1979 ///
1980 /// # Examples
1981 ///
1982 /// ```
1983 /// use std::any::Any;
1984 /// use std::rc::Rc;
1985 ///
1986 /// fn print_if_string(value: Rc<dyn Any>) {
1987 /// if let Ok(string) = value.downcast::<String>() {
1988 /// println!("String ({}): {}", string.len(), string);
1989 /// }
1990 /// }
1991 ///
1992 /// let my_string = "Hello World".to_string();
1993 /// print_if_string(Rc::new(my_string));
1994 /// print_if_string(Rc::new(0i8));
1995 /// ```
1996 #[inline]
1997 #[stable(feature = "rc_downcast", since = "1.29.0")]
1998 pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
1999 if (*self).is::<T>() {
2000 unsafe {
2001 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2002 Ok(Rc::from_inner_in(ptr.cast(), alloc))
2003 }
2004 } else {
2005 Err(self)
2006 }
2007 }
2008
2009 /// Downcasts the `Rc<dyn Any>` to a concrete type.
2010 ///
2011 /// For a safe alternative see [`downcast`].
2012 ///
2013 /// # Examples
2014 ///
2015 /// ```
2016 /// #![feature(downcast_unchecked)]
2017 ///
2018 /// use std::any::Any;
2019 /// use std::rc::Rc;
2020 ///
2021 /// let x: Rc<dyn Any> = Rc::new(1_usize);
2022 ///
2023 /// unsafe {
2024 /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2025 /// }
2026 /// ```
2027 ///
2028 /// # Safety
2029 ///
2030 /// The contained value must be of type `T`. Calling this method
2031 /// with the incorrect type is *undefined behavior*.
2032 ///
2033 ///
2034 /// [`downcast`]: Self::downcast
2035 #[inline]
2036 #[unstable(feature = "downcast_unchecked", issue = "90850")]
2037 pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2038 unsafe {
2039 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2040 Rc::from_inner_in(ptr.cast(), alloc)
2041 }
2042 }
2043}
2044
2045impl<T: ?Sized> Rc<T> {
2046 /// Allocates an `RcInner<T>` with sufficient space for
2047 /// a possibly-unsized inner value where the value has the layout provided.
2048 ///
2049 /// The function `mem_to_rc_inner` is called with the data pointer
2050 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2051 #[cfg(not(no_global_oom_handling))]
2052 unsafe fn allocate_for_layout(
2053 value_layout: Layout,
2054 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2055 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2056 ) -> *mut RcInner<T> {
2057 let layout = rc_inner_layout_for_value_layout(value_layout);
2058 unsafe {
2059 Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2060 .unwrap_or_else(|_| handle_alloc_error(layout))
2061 }
2062 }
2063
2064 /// Allocates an `RcInner<T>` with sufficient space for
2065 /// a possibly-unsized inner value where the value has the layout provided,
2066 /// returning an error if allocation fails.
2067 ///
2068 /// The function `mem_to_rc_inner` is called with the data pointer
2069 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2070 #[inline]
2071 unsafe fn try_allocate_for_layout(
2072 value_layout: Layout,
2073 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2074 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2075 ) -> Result<*mut RcInner<T>, AllocError> {
2076 let layout = rc_inner_layout_for_value_layout(value_layout);
2077
2078 // Allocate for the layout.
2079 let ptr = allocate(layout)?;
2080
2081 // Initialize the RcInner
2082 let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2083 unsafe {
2084 debug_assert_eq!(Layout::for_value_raw(inner), layout);
2085
2086 (&raw mut (*inner).strong).write(Cell::new(1));
2087 (&raw mut (*inner).weak).write(Cell::new(1));
2088 }
2089
2090 Ok(inner)
2091 }
2092}
2093
2094impl<T: ?Sized, A: Allocator> Rc<T, A> {
2095 /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2096 #[cfg(not(no_global_oom_handling))]
2097 unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2098 // Allocate for the `RcInner<T>` using the given value.
2099 unsafe {
2100 Rc::<T>::allocate_for_layout(
2101 Layout::for_value_raw(ptr),
2102 |layout| alloc.allocate(layout),
2103 |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2104 )
2105 }
2106 }
2107
2108 #[cfg(not(no_global_oom_handling))]
2109 fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2110 unsafe {
2111 let value_size = size_of_val(&*src);
2112 let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2113
2114 // Copy value as bytes
2115 ptr::copy_nonoverlapping(
2116 (&raw const *src) as *const u8,
2117 (&raw mut (*ptr).value) as *mut u8,
2118 value_size,
2119 );
2120
2121 // Free the allocation without dropping its contents
2122 let (bptr, alloc) = Box::into_raw_with_allocator(src);
2123 let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2124 drop(src);
2125
2126 Self::from_ptr_in(ptr, alloc)
2127 }
2128 }
2129}
2130
2131impl<T> Rc<[T]> {
2132 /// Allocates an `RcInner<[T]>` with the given length.
2133 #[cfg(not(no_global_oom_handling))]
2134 unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2135 unsafe {
2136 Self::allocate_for_layout(
2137 Layout::array::<T>(len).unwrap(),
2138 |layout| Global.allocate(layout),
2139 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2140 )
2141 }
2142 }
2143
2144 /// Copy elements from slice into newly allocated `Rc<[T]>`
2145 ///
2146 /// Unsafe because the caller must either take ownership or bind `T: Copy`
2147 #[cfg(not(no_global_oom_handling))]
2148 unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2149 unsafe {
2150 let ptr = Self::allocate_for_slice(v.len());
2151 ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2152 Self::from_ptr(ptr)
2153 }
2154 }
2155
2156 /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2157 ///
2158 /// Behavior is undefined should the size be wrong.
2159 #[cfg(not(no_global_oom_handling))]
2160 unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2161 // Panic guard while cloning T elements.
2162 // In the event of a panic, elements that have been written
2163 // into the new RcInner will be dropped, then the memory freed.
2164 struct Guard<T> {
2165 mem: NonNull<u8>,
2166 elems: *mut T,
2167 layout: Layout,
2168 n_elems: usize,
2169 }
2170
2171 impl<T> Drop for Guard<T> {
2172 fn drop(&mut self) {
2173 unsafe {
2174 let slice = from_raw_parts_mut(self.elems, self.n_elems);
2175 ptr::drop_in_place(slice);
2176
2177 Global.deallocate(self.mem, self.layout);
2178 }
2179 }
2180 }
2181
2182 unsafe {
2183 let ptr = Self::allocate_for_slice(len);
2184
2185 let mem = ptr as *mut _ as *mut u8;
2186 let layout = Layout::for_value_raw(ptr);
2187
2188 // Pointer to first element
2189 let elems = (&raw mut (*ptr).value) as *mut T;
2190
2191 let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2192
2193 for (i, item) in iter.enumerate() {
2194 ptr::write(elems.add(i), item);
2195 guard.n_elems += 1;
2196 }
2197
2198 // All clear. Forget the guard so it doesn't free the new RcInner.
2199 mem::forget(guard);
2200
2201 Self::from_ptr(ptr)
2202 }
2203 }
2204}
2205
2206impl<T, A: Allocator> Rc<[T], A> {
2207 /// Allocates an `RcInner<[T]>` with the given length.
2208 #[inline]
2209 #[cfg(not(no_global_oom_handling))]
2210 unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2211 unsafe {
2212 Rc::<[T]>::allocate_for_layout(
2213 Layout::array::<T>(len).unwrap(),
2214 |layout| alloc.allocate(layout),
2215 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2216 )
2217 }
2218 }
2219}
2220
2221#[cfg(not(no_global_oom_handling))]
2222/// Specialization trait used for `From<&[T]>`.
2223trait RcFromSlice<T> {
2224 fn from_slice(slice: &[T]) -> Self;
2225}
2226
2227#[cfg(not(no_global_oom_handling))]
2228impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2229 #[inline]
2230 default fn from_slice(v: &[T]) -> Self {
2231 unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2232 }
2233}
2234
2235#[cfg(not(no_global_oom_handling))]
2236impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
2237 #[inline]
2238 fn from_slice(v: &[T]) -> Self {
2239 unsafe { Rc::copy_from_slice(v) }
2240 }
2241}
2242
2243#[stable(feature = "rust1", since = "1.0.0")]
2244impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2245 type Target = T;
2246
2247 #[inline(always)]
2248 fn deref(&self) -> &T {
2249 &self.inner().value
2250 }
2251}
2252
2253#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2254unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2255
2256//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2257#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2258unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2259
2260#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2261unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Weak<T, A> {}
2262
2263#[unstable(feature = "deref_pure_trait", issue = "87121")]
2264unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2265
2266//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2267#[unstable(feature = "deref_pure_trait", issue = "87121")]
2268unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2269
2270#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2271impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2272
2273#[stable(feature = "rust1", since = "1.0.0")]
2274unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2275 /// Drops the `Rc`.
2276 ///
2277 /// This will decrement the strong reference count. If the strong reference
2278 /// count reaches zero then the only other references (if any) are
2279 /// [`Weak`], so we `drop` the inner value.
2280 ///
2281 /// # Examples
2282 ///
2283 /// ```
2284 /// use std::rc::Rc;
2285 ///
2286 /// struct Foo;
2287 ///
2288 /// impl Drop for Foo {
2289 /// fn drop(&mut self) {
2290 /// println!("dropped!");
2291 /// }
2292 /// }
2293 ///
2294 /// let foo = Rc::new(Foo);
2295 /// let foo2 = Rc::clone(&foo);
2296 ///
2297 /// drop(foo); // Doesn't print anything
2298 /// drop(foo2); // Prints "dropped!"
2299 /// ```
2300 #[inline]
2301 fn drop(&mut self) {
2302 unsafe {
2303 self.inner().dec_strong();
2304 if self.inner().strong() == 0 {
2305 self.drop_slow();
2306 }
2307 }
2308 }
2309}
2310
2311#[stable(feature = "rust1", since = "1.0.0")]
2312impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2313 /// Makes a clone of the `Rc` pointer.
2314 ///
2315 /// This creates another pointer to the same allocation, increasing the
2316 /// strong reference count.
2317 ///
2318 /// # Examples
2319 ///
2320 /// ```
2321 /// use std::rc::Rc;
2322 ///
2323 /// let five = Rc::new(5);
2324 ///
2325 /// let _ = Rc::clone(&five);
2326 /// ```
2327 #[inline]
2328 fn clone(&self) -> Self {
2329 unsafe {
2330 self.inner().inc_strong();
2331 Self::from_inner_in(self.ptr, self.alloc.clone())
2332 }
2333 }
2334}
2335
2336#[cfg(not(no_global_oom_handling))]
2337#[stable(feature = "rust1", since = "1.0.0")]
2338impl<T: Default> Default for Rc<T> {
2339 /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2340 ///
2341 /// # Examples
2342 ///
2343 /// ```
2344 /// use std::rc::Rc;
2345 ///
2346 /// let x: Rc<i32> = Default::default();
2347 /// assert_eq!(*x, 0);
2348 /// ```
2349 #[inline]
2350 fn default() -> Rc<T> {
2351 unsafe {
2352 Self::from_inner(
2353 Box::leak(Box::write(
2354 Box::new_uninit(),
2355 RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2356 ))
2357 .into(),
2358 )
2359 }
2360 }
2361}
2362
2363#[cfg(not(no_global_oom_handling))]
2364#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2365impl Default for Rc<str> {
2366 /// Creates an empty str inside an Rc
2367 ///
2368 /// This may or may not share an allocation with other Rcs on the same thread.
2369 #[inline]
2370 fn default() -> Self {
2371 let rc = Rc::<[u8]>::default();
2372 // `[u8]` has the same layout as `str`.
2373 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2374 }
2375}
2376
2377#[cfg(not(no_global_oom_handling))]
2378#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2379impl<T> Default for Rc<[T]> {
2380 /// Creates an empty `[T]` inside an Rc
2381 ///
2382 /// This may or may not share an allocation with other Rcs on the same thread.
2383 #[inline]
2384 fn default() -> Self {
2385 let arr: [T; 0] = [];
2386 Rc::from(arr)
2387 }
2388}
2389
2390#[stable(feature = "rust1", since = "1.0.0")]
2391trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2392 fn eq(&self, other: &Rc<T, A>) -> bool;
2393 fn ne(&self, other: &Rc<T, A>) -> bool;
2394}
2395
2396#[stable(feature = "rust1", since = "1.0.0")]
2397impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2398 #[inline]
2399 default fn eq(&self, other: &Rc<T, A>) -> bool {
2400 **self == **other
2401 }
2402
2403 #[inline]
2404 default fn ne(&self, other: &Rc<T, A>) -> bool {
2405 **self != **other
2406 }
2407}
2408
2409// Hack to allow specializing on `Eq` even though `Eq` has a method.
2410#[rustc_unsafe_specialization_marker]
2411pub(crate) trait MarkerEq: PartialEq<Self> {}
2412
2413impl<T: Eq> MarkerEq for T {}
2414
2415/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2416/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2417/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2418/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2419/// the same value, than two `&T`s.
2420///
2421/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2422#[stable(feature = "rust1", since = "1.0.0")]
2423impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2424 #[inline]
2425 fn eq(&self, other: &Rc<T, A>) -> bool {
2426 Rc::ptr_eq(self, other) || **self == **other
2427 }
2428
2429 #[inline]
2430 fn ne(&self, other: &Rc<T, A>) -> bool {
2431 !Rc::ptr_eq(self, other) && **self != **other
2432 }
2433}
2434
2435#[stable(feature = "rust1", since = "1.0.0")]
2436impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2437 /// Equality for two `Rc`s.
2438 ///
2439 /// Two `Rc`s are equal if their inner values are equal, even if they are
2440 /// stored in different allocation.
2441 ///
2442 /// If `T` also implements `Eq` (implying reflexivity of equality),
2443 /// two `Rc`s that point to the same allocation are
2444 /// always equal.
2445 ///
2446 /// # Examples
2447 ///
2448 /// ```
2449 /// use std::rc::Rc;
2450 ///
2451 /// let five = Rc::new(5);
2452 ///
2453 /// assert!(five == Rc::new(5));
2454 /// ```
2455 #[inline]
2456 fn eq(&self, other: &Rc<T, A>) -> bool {
2457 RcEqIdent::eq(self, other)
2458 }
2459
2460 /// Inequality for two `Rc`s.
2461 ///
2462 /// Two `Rc`s are not equal if their inner values are not equal.
2463 ///
2464 /// If `T` also implements `Eq` (implying reflexivity of equality),
2465 /// two `Rc`s that point to the same allocation are
2466 /// always equal.
2467 ///
2468 /// # Examples
2469 ///
2470 /// ```
2471 /// use std::rc::Rc;
2472 ///
2473 /// let five = Rc::new(5);
2474 ///
2475 /// assert!(five != Rc::new(6));
2476 /// ```
2477 #[inline]
2478 fn ne(&self, other: &Rc<T, A>) -> bool {
2479 RcEqIdent::ne(self, other)
2480 }
2481}
2482
2483#[stable(feature = "rust1", since = "1.0.0")]
2484impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2485
2486#[stable(feature = "rust1", since = "1.0.0")]
2487impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2488 /// Partial comparison for two `Rc`s.
2489 ///
2490 /// The two are compared by calling `partial_cmp()` on their inner values.
2491 ///
2492 /// # Examples
2493 ///
2494 /// ```
2495 /// use std::rc::Rc;
2496 /// use std::cmp::Ordering;
2497 ///
2498 /// let five = Rc::new(5);
2499 ///
2500 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2501 /// ```
2502 #[inline(always)]
2503 fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2504 (**self).partial_cmp(&**other)
2505 }
2506
2507 /// Less-than comparison for two `Rc`s.
2508 ///
2509 /// The two are compared by calling `<` on their inner values.
2510 ///
2511 /// # Examples
2512 ///
2513 /// ```
2514 /// use std::rc::Rc;
2515 ///
2516 /// let five = Rc::new(5);
2517 ///
2518 /// assert!(five < Rc::new(6));
2519 /// ```
2520 #[inline(always)]
2521 fn lt(&self, other: &Rc<T, A>) -> bool {
2522 **self < **other
2523 }
2524
2525 /// 'Less than or equal to' comparison for two `Rc`s.
2526 ///
2527 /// The two are compared by calling `<=` on their inner values.
2528 ///
2529 /// # Examples
2530 ///
2531 /// ```
2532 /// use std::rc::Rc;
2533 ///
2534 /// let five = Rc::new(5);
2535 ///
2536 /// assert!(five <= Rc::new(5));
2537 /// ```
2538 #[inline(always)]
2539 fn le(&self, other: &Rc<T, A>) -> bool {
2540 **self <= **other
2541 }
2542
2543 /// Greater-than comparison for two `Rc`s.
2544 ///
2545 /// The two are compared by calling `>` on their inner values.
2546 ///
2547 /// # Examples
2548 ///
2549 /// ```
2550 /// use std::rc::Rc;
2551 ///
2552 /// let five = Rc::new(5);
2553 ///
2554 /// assert!(five > Rc::new(4));
2555 /// ```
2556 #[inline(always)]
2557 fn gt(&self, other: &Rc<T, A>) -> bool {
2558 **self > **other
2559 }
2560
2561 /// 'Greater than or equal to' comparison for two `Rc`s.
2562 ///
2563 /// The two are compared by calling `>=` on their inner values.
2564 ///
2565 /// # Examples
2566 ///
2567 /// ```
2568 /// use std::rc::Rc;
2569 ///
2570 /// let five = Rc::new(5);
2571 ///
2572 /// assert!(five >= Rc::new(5));
2573 /// ```
2574 #[inline(always)]
2575 fn ge(&self, other: &Rc<T, A>) -> bool {
2576 **self >= **other
2577 }
2578}
2579
2580#[stable(feature = "rust1", since = "1.0.0")]
2581impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2582 /// Comparison for two `Rc`s.
2583 ///
2584 /// The two are compared by calling `cmp()` on their inner values.
2585 ///
2586 /// # Examples
2587 ///
2588 /// ```
2589 /// use std::rc::Rc;
2590 /// use std::cmp::Ordering;
2591 ///
2592 /// let five = Rc::new(5);
2593 ///
2594 /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2595 /// ```
2596 #[inline]
2597 fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2598 (**self).cmp(&**other)
2599 }
2600}
2601
2602#[stable(feature = "rust1", since = "1.0.0")]
2603impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2604 fn hash<H: Hasher>(&self, state: &mut H) {
2605 (**self).hash(state);
2606 }
2607}
2608
2609#[stable(feature = "rust1", since = "1.0.0")]
2610impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2611 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2612 fmt::Display::fmt(&**self, f)
2613 }
2614}
2615
2616#[stable(feature = "rust1", since = "1.0.0")]
2617impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2618 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2619 fmt::Debug::fmt(&**self, f)
2620 }
2621}
2622
2623#[stable(feature = "rust1", since = "1.0.0")]
2624impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2625 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2626 fmt::Pointer::fmt(&(&raw const **self), f)
2627 }
2628}
2629
2630#[cfg(not(no_global_oom_handling))]
2631#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2632impl<T> From<T> for Rc<T> {
2633 /// Converts a generic type `T` into an `Rc<T>`
2634 ///
2635 /// The conversion allocates on the heap and moves `t`
2636 /// from the stack into it.
2637 ///
2638 /// # Example
2639 /// ```rust
2640 /// # use std::rc::Rc;
2641 /// let x = 5;
2642 /// let rc = Rc::new(5);
2643 ///
2644 /// assert_eq!(Rc::from(x), rc);
2645 /// ```
2646 fn from(t: T) -> Self {
2647 Rc::new(t)
2648 }
2649}
2650
2651#[cfg(not(no_global_oom_handling))]
2652#[stable(feature = "shared_from_array", since = "1.74.0")]
2653impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2654 /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2655 ///
2656 /// The conversion moves the array into a newly allocated `Rc`.
2657 ///
2658 /// # Example
2659 ///
2660 /// ```
2661 /// # use std::rc::Rc;
2662 /// let original: [i32; 3] = [1, 2, 3];
2663 /// let shared: Rc<[i32]> = Rc::from(original);
2664 /// assert_eq!(&[1, 2, 3], &shared[..]);
2665 /// ```
2666 #[inline]
2667 fn from(v: [T; N]) -> Rc<[T]> {
2668 Rc::<[T; N]>::from(v)
2669 }
2670}
2671
2672#[cfg(not(no_global_oom_handling))]
2673#[stable(feature = "shared_from_slice", since = "1.21.0")]
2674impl<T: Clone> From<&[T]> for Rc<[T]> {
2675 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2676 ///
2677 /// # Example
2678 ///
2679 /// ```
2680 /// # use std::rc::Rc;
2681 /// let original: &[i32] = &[1, 2, 3];
2682 /// let shared: Rc<[i32]> = Rc::from(original);
2683 /// assert_eq!(&[1, 2, 3], &shared[..]);
2684 /// ```
2685 #[inline]
2686 fn from(v: &[T]) -> Rc<[T]> {
2687 <Self as RcFromSlice<T>>::from_slice(v)
2688 }
2689}
2690
2691#[cfg(not(no_global_oom_handling))]
2692#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2693impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2694 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2695 ///
2696 /// # Example
2697 ///
2698 /// ```
2699 /// # use std::rc::Rc;
2700 /// let mut original = [1, 2, 3];
2701 /// let original: &mut [i32] = &mut original;
2702 /// let shared: Rc<[i32]> = Rc::from(original);
2703 /// assert_eq!(&[1, 2, 3], &shared[..]);
2704 /// ```
2705 #[inline]
2706 fn from(v: &mut [T]) -> Rc<[T]> {
2707 Rc::from(&*v)
2708 }
2709}
2710
2711#[cfg(not(no_global_oom_handling))]
2712#[stable(feature = "shared_from_slice", since = "1.21.0")]
2713impl From<&str> for Rc<str> {
2714 /// Allocates a reference-counted string slice and copies `v` into it.
2715 ///
2716 /// # Example
2717 ///
2718 /// ```
2719 /// # use std::rc::Rc;
2720 /// let shared: Rc<str> = Rc::from("statue");
2721 /// assert_eq!("statue", &shared[..]);
2722 /// ```
2723 #[inline]
2724 fn from(v: &str) -> Rc<str> {
2725 let rc = Rc::<[u8]>::from(v.as_bytes());
2726 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2727 }
2728}
2729
2730#[cfg(not(no_global_oom_handling))]
2731#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2732impl From<&mut str> for Rc<str> {
2733 /// Allocates a reference-counted string slice and copies `v` into it.
2734 ///
2735 /// # Example
2736 ///
2737 /// ```
2738 /// # use std::rc::Rc;
2739 /// let mut original = String::from("statue");
2740 /// let original: &mut str = &mut original;
2741 /// let shared: Rc<str> = Rc::from(original);
2742 /// assert_eq!("statue", &shared[..]);
2743 /// ```
2744 #[inline]
2745 fn from(v: &mut str) -> Rc<str> {
2746 Rc::from(&*v)
2747 }
2748}
2749
2750#[cfg(not(no_global_oom_handling))]
2751#[stable(feature = "shared_from_slice", since = "1.21.0")]
2752impl From<String> for Rc<str> {
2753 /// Allocates a reference-counted string slice and copies `v` into it.
2754 ///
2755 /// # Example
2756 ///
2757 /// ```
2758 /// # use std::rc::Rc;
2759 /// let original: String = "statue".to_owned();
2760 /// let shared: Rc<str> = Rc::from(original);
2761 /// assert_eq!("statue", &shared[..]);
2762 /// ```
2763 #[inline]
2764 fn from(v: String) -> Rc<str> {
2765 Rc::from(&v[..])
2766 }
2767}
2768
2769#[cfg(not(no_global_oom_handling))]
2770#[stable(feature = "shared_from_slice", since = "1.21.0")]
2771impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2772 /// Move a boxed object to a new, reference counted, allocation.
2773 ///
2774 /// # Example
2775 ///
2776 /// ```
2777 /// # use std::rc::Rc;
2778 /// let original: Box<i32> = Box::new(1);
2779 /// let shared: Rc<i32> = Rc::from(original);
2780 /// assert_eq!(1, *shared);
2781 /// ```
2782 #[inline]
2783 fn from(v: Box<T, A>) -> Rc<T, A> {
2784 Rc::from_box_in(v)
2785 }
2786}
2787
2788#[cfg(not(no_global_oom_handling))]
2789#[stable(feature = "shared_from_slice", since = "1.21.0")]
2790impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2791 /// Allocates a reference-counted slice and moves `v`'s items into it.
2792 ///
2793 /// # Example
2794 ///
2795 /// ```
2796 /// # use std::rc::Rc;
2797 /// let unique: Vec<i32> = vec![1, 2, 3];
2798 /// let shared: Rc<[i32]> = Rc::from(unique);
2799 /// assert_eq!(&[1, 2, 3], &shared[..]);
2800 /// ```
2801 #[inline]
2802 fn from(v: Vec<T, A>) -> Rc<[T], A> {
2803 unsafe {
2804 let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2805
2806 let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2807 ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
2808
2809 // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2810 // without dropping its contents or the allocator
2811 let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2812
2813 Self::from_ptr_in(rc_ptr, alloc)
2814 }
2815 }
2816}
2817
2818#[stable(feature = "shared_from_cow", since = "1.45.0")]
2819impl<'a, B> From<Cow<'a, B>> for Rc<B>
2820where
2821 B: ToOwned + ?Sized,
2822 Rc<B>: From<&'a B> + From<B::Owned>,
2823{
2824 /// Creates a reference-counted pointer from a clone-on-write pointer by
2825 /// copying its content.
2826 ///
2827 /// # Example
2828 ///
2829 /// ```rust
2830 /// # use std::rc::Rc;
2831 /// # use std::borrow::Cow;
2832 /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
2833 /// let shared: Rc<str> = Rc::from(cow);
2834 /// assert_eq!("eggplant", &shared[..]);
2835 /// ```
2836 #[inline]
2837 fn from(cow: Cow<'a, B>) -> Rc<B> {
2838 match cow {
2839 Cow::Borrowed(s) => Rc::from(s),
2840 Cow::Owned(s) => Rc::from(s),
2841 }
2842 }
2843}
2844
2845#[stable(feature = "shared_from_str", since = "1.62.0")]
2846impl From<Rc<str>> for Rc<[u8]> {
2847 /// Converts a reference-counted string slice into a byte slice.
2848 ///
2849 /// # Example
2850 ///
2851 /// ```
2852 /// # use std::rc::Rc;
2853 /// let string: Rc<str> = Rc::from("eggplant");
2854 /// let bytes: Rc<[u8]> = Rc::from(string);
2855 /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
2856 /// ```
2857 #[inline]
2858 fn from(rc: Rc<str>) -> Self {
2859 // SAFETY: `str` has the same layout as `[u8]`.
2860 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
2861 }
2862}
2863
2864#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2865impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
2866 type Error = Rc<[T], A>;
2867
2868 fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
2869 if boxed_slice.len() == N {
2870 let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
2871 Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
2872 } else {
2873 Err(boxed_slice)
2874 }
2875 }
2876}
2877
2878#[cfg(not(no_global_oom_handling))]
2879#[stable(feature = "shared_from_iter", since = "1.37.0")]
2880impl<T> FromIterator<T> for Rc<[T]> {
2881 /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
2882 ///
2883 /// # Performance characteristics
2884 ///
2885 /// ## The general case
2886 ///
2887 /// In the general case, collecting into `Rc<[T]>` is done by first
2888 /// collecting into a `Vec<T>`. That is, when writing the following:
2889 ///
2890 /// ```rust
2891 /// # use std::rc::Rc;
2892 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2893 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2894 /// ```
2895 ///
2896 /// this behaves as if we wrote:
2897 ///
2898 /// ```rust
2899 /// # use std::rc::Rc;
2900 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
2901 /// .collect::<Vec<_>>() // The first set of allocations happens here.
2902 /// .into(); // A second allocation for `Rc<[T]>` happens here.
2903 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2904 /// ```
2905 ///
2906 /// This will allocate as many times as needed for constructing the `Vec<T>`
2907 /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
2908 ///
2909 /// ## Iterators of known length
2910 ///
2911 /// When your `Iterator` implements `TrustedLen` and is of an exact size,
2912 /// a single allocation will be made for the `Rc<[T]>`. For example:
2913 ///
2914 /// ```rust
2915 /// # use std::rc::Rc;
2916 /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
2917 /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
2918 /// ```
2919 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
2920 ToRcSlice::to_rc_slice(iter.into_iter())
2921 }
2922}
2923
2924/// Specialization trait used for collecting into `Rc<[T]>`.
2925#[cfg(not(no_global_oom_handling))]
2926trait ToRcSlice<T>: Iterator<Item = T> + Sized {
2927 fn to_rc_slice(self) -> Rc<[T]>;
2928}
2929
2930#[cfg(not(no_global_oom_handling))]
2931impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
2932 default fn to_rc_slice(self) -> Rc<[T]> {
2933 self.collect::<Vec<T>>().into()
2934 }
2935}
2936
2937#[cfg(not(no_global_oom_handling))]
2938impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
2939 fn to_rc_slice(self) -> Rc<[T]> {
2940 // This is the case for a `TrustedLen` iterator.
2941 let (low, high) = self.size_hint();
2942 if let Some(high) = high {
2943 debug_assert_eq!(
2944 low,
2945 high,
2946 "TrustedLen iterator's size hint is not exact: {:?}",
2947 (low, high)
2948 );
2949
2950 unsafe {
2951 // SAFETY: We need to ensure that the iterator has an exact length and we have.
2952 Rc::from_iter_exact(self, low)
2953 }
2954 } else {
2955 // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
2956 // length exceeding `usize::MAX`.
2957 // The default implementation would collect into a vec which would panic.
2958 // Thus we panic here immediately without invoking `Vec` code.
2959 panic!("capacity overflow");
2960 }
2961 }
2962}
2963
2964/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
2965/// managed allocation.
2966///
2967/// The allocation is accessed by calling [`upgrade`] on the `Weak`
2968/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
2969///
2970/// Since a `Weak` reference does not count towards ownership, it will not
2971/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
2972/// guarantees about the value still being present. Thus it may return [`None`]
2973/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
2974/// itself (the backing store) from being deallocated.
2975///
2976/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
2977/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
2978/// prevent circular references between [`Rc`] pointers, since mutual owning references
2979/// would never allow either [`Rc`] to be dropped. For example, a tree could
2980/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
2981/// pointers from children back to their parents.
2982///
2983/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
2984///
2985/// [`upgrade`]: Weak::upgrade
2986#[stable(feature = "rc_weak", since = "1.4.0")]
2987#[cfg_attr(not(test), rustc_diagnostic_item = "RcWeak")]
2988pub struct Weak<
2989 T: ?Sized,
2990 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
2991> {
2992 // This is a `NonNull` to allow optimizing the size of this type in enums,
2993 // but it is not necessarily a valid pointer.
2994 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
2995 // to allocate space on the heap. That's not a value a real pointer
2996 // will ever have because RcInner has alignment at least 2.
2997 // This is only possible when `T: Sized`; unsized `T` never dangle.
2998 ptr: NonNull<RcInner<T>>,
2999 alloc: A,
3000}
3001
3002#[stable(feature = "rc_weak", since = "1.4.0")]
3003impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3004#[stable(feature = "rc_weak", since = "1.4.0")]
3005impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3006
3007#[unstable(feature = "coerce_unsized", issue = "18598")]
3008impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3009
3010#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3011impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3012
3013impl<T> Weak<T> {
3014 /// Constructs a new `Weak<T>`, without allocating any memory.
3015 /// Calling [`upgrade`] on the return value always gives [`None`].
3016 ///
3017 /// [`upgrade`]: Weak::upgrade
3018 ///
3019 /// # Examples
3020 ///
3021 /// ```
3022 /// use std::rc::Weak;
3023 ///
3024 /// let empty: Weak<i64> = Weak::new();
3025 /// assert!(empty.upgrade().is_none());
3026 /// ```
3027 #[inline]
3028 #[stable(feature = "downgraded_weak", since = "1.10.0")]
3029 #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3030 #[must_use]
3031 pub const fn new() -> Weak<T> {
3032 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3033 }
3034}
3035
3036impl<T, A: Allocator> Weak<T, A> {
3037 /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3038 /// allocator.
3039 /// Calling [`upgrade`] on the return value always gives [`None`].
3040 ///
3041 /// [`upgrade`]: Weak::upgrade
3042 ///
3043 /// # Examples
3044 ///
3045 /// ```
3046 /// use std::rc::Weak;
3047 ///
3048 /// let empty: Weak<i64> = Weak::new();
3049 /// assert!(empty.upgrade().is_none());
3050 /// ```
3051 #[inline]
3052 #[unstable(feature = "allocator_api", issue = "32838")]
3053 pub fn new_in(alloc: A) -> Weak<T, A> {
3054 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3055 }
3056}
3057
3058pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3059 (ptr.cast::<()>()).addr() == usize::MAX
3060}
3061
3062/// Helper type to allow accessing the reference counts without
3063/// making any assertions about the data field.
3064struct WeakInner<'a> {
3065 weak: &'a Cell<usize>,
3066 strong: &'a Cell<usize>,
3067}
3068
3069impl<T: ?Sized> Weak<T> {
3070 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3071 ///
3072 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3073 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3074 ///
3075 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3076 /// as these don't own anything; the method still works on them).
3077 ///
3078 /// # Safety
3079 ///
3080 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3081 /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3082 ///
3083 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3084 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3085 /// count is not modified by this operation) and therefore it must be paired with a previous
3086 /// call to [`into_raw`].
3087 ///
3088 /// # Examples
3089 ///
3090 /// ```
3091 /// use std::rc::{Rc, Weak};
3092 ///
3093 /// let strong = Rc::new("hello".to_owned());
3094 ///
3095 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3096 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3097 ///
3098 /// assert_eq!(2, Rc::weak_count(&strong));
3099 ///
3100 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3101 /// assert_eq!(1, Rc::weak_count(&strong));
3102 ///
3103 /// drop(strong);
3104 ///
3105 /// // Decrement the last weak count.
3106 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3107 /// ```
3108 ///
3109 /// [`into_raw`]: Weak::into_raw
3110 /// [`upgrade`]: Weak::upgrade
3111 /// [`new`]: Weak::new
3112 #[inline]
3113 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3114 pub unsafe fn from_raw(ptr: *const T) -> Self {
3115 unsafe { Self::from_raw_in(ptr, Global) }
3116 }
3117}
3118
3119impl<T: ?Sized, A: Allocator> Weak<T, A> {
3120 /// Returns a reference to the underlying allocator.
3121 #[inline]
3122 #[unstable(feature = "allocator_api", issue = "32838")]
3123 pub fn allocator(&self) -> &A {
3124 &self.alloc
3125 }
3126
3127 /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3128 ///
3129 /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3130 /// unaligned or even [`null`] otherwise.
3131 ///
3132 /// # Examples
3133 ///
3134 /// ```
3135 /// use std::rc::Rc;
3136 /// use std::ptr;
3137 ///
3138 /// let strong = Rc::new("hello".to_owned());
3139 /// let weak = Rc::downgrade(&strong);
3140 /// // Both point to the same object
3141 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3142 /// // The strong here keeps it alive, so we can still access the object.
3143 /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3144 ///
3145 /// drop(strong);
3146 /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3147 /// // undefined behavior.
3148 /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3149 /// ```
3150 ///
3151 /// [`null`]: ptr::null
3152 #[must_use]
3153 #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3154 pub fn as_ptr(&self) -> *const T {
3155 let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3156
3157 if is_dangling(ptr) {
3158 // If the pointer is dangling, we return the sentinel directly. This cannot be
3159 // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3160 ptr as *const T
3161 } else {
3162 // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3163 // The payload may be dropped at this point, and we have to maintain provenance,
3164 // so use raw pointer manipulation.
3165 unsafe { &raw mut (*ptr).value }
3166 }
3167 }
3168
3169 /// Consumes the `Weak<T>` and turns it into a raw pointer.
3170 ///
3171 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3172 /// one weak reference (the weak count is not modified by this operation). It can be turned
3173 /// back into the `Weak<T>` with [`from_raw`].
3174 ///
3175 /// The same restrictions of accessing the target of the pointer as with
3176 /// [`as_ptr`] apply.
3177 ///
3178 /// # Examples
3179 ///
3180 /// ```
3181 /// use std::rc::{Rc, Weak};
3182 ///
3183 /// let strong = Rc::new("hello".to_owned());
3184 /// let weak = Rc::downgrade(&strong);
3185 /// let raw = weak.into_raw();
3186 ///
3187 /// assert_eq!(1, Rc::weak_count(&strong));
3188 /// assert_eq!("hello", unsafe { &*raw });
3189 ///
3190 /// drop(unsafe { Weak::from_raw(raw) });
3191 /// assert_eq!(0, Rc::weak_count(&strong));
3192 /// ```
3193 ///
3194 /// [`from_raw`]: Weak::from_raw
3195 /// [`as_ptr`]: Weak::as_ptr
3196 #[must_use = "losing the pointer will leak memory"]
3197 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3198 pub fn into_raw(self) -> *const T {
3199 mem::ManuallyDrop::new(self).as_ptr()
3200 }
3201
3202 /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3203 ///
3204 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3205 /// one weak reference (the weak count is not modified by this operation). It can be turned
3206 /// back into the `Weak<T>` with [`from_raw_in`].
3207 ///
3208 /// The same restrictions of accessing the target of the pointer as with
3209 /// [`as_ptr`] apply.
3210 ///
3211 /// # Examples
3212 ///
3213 /// ```
3214 /// #![feature(allocator_api)]
3215 /// use std::rc::{Rc, Weak};
3216 /// use std::alloc::System;
3217 ///
3218 /// let strong = Rc::new_in("hello".to_owned(), System);
3219 /// let weak = Rc::downgrade(&strong);
3220 /// let (raw, alloc) = weak.into_raw_with_allocator();
3221 ///
3222 /// assert_eq!(1, Rc::weak_count(&strong));
3223 /// assert_eq!("hello", unsafe { &*raw });
3224 ///
3225 /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3226 /// assert_eq!(0, Rc::weak_count(&strong));
3227 /// ```
3228 ///
3229 /// [`from_raw_in`]: Weak::from_raw_in
3230 /// [`as_ptr`]: Weak::as_ptr
3231 #[must_use = "losing the pointer will leak memory"]
3232 #[inline]
3233 #[unstable(feature = "allocator_api", issue = "32838")]
3234 pub fn into_raw_with_allocator(self) -> (*const T, A) {
3235 let this = mem::ManuallyDrop::new(self);
3236 let result = this.as_ptr();
3237 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3238 let alloc = unsafe { ptr::read(&this.alloc) };
3239 (result, alloc)
3240 }
3241
3242 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3243 ///
3244 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3245 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3246 ///
3247 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3248 /// as these don't own anything; the method still works on them).
3249 ///
3250 /// # Safety
3251 ///
3252 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3253 /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3254 ///
3255 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3256 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3257 /// count is not modified by this operation) and therefore it must be paired with a previous
3258 /// call to [`into_raw`].
3259 ///
3260 /// # Examples
3261 ///
3262 /// ```
3263 /// use std::rc::{Rc, Weak};
3264 ///
3265 /// let strong = Rc::new("hello".to_owned());
3266 ///
3267 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3268 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3269 ///
3270 /// assert_eq!(2, Rc::weak_count(&strong));
3271 ///
3272 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3273 /// assert_eq!(1, Rc::weak_count(&strong));
3274 ///
3275 /// drop(strong);
3276 ///
3277 /// // Decrement the last weak count.
3278 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3279 /// ```
3280 ///
3281 /// [`into_raw`]: Weak::into_raw
3282 /// [`upgrade`]: Weak::upgrade
3283 /// [`new`]: Weak::new
3284 #[inline]
3285 #[unstable(feature = "allocator_api", issue = "32838")]
3286 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3287 // See Weak::as_ptr for context on how the input pointer is derived.
3288
3289 let ptr = if is_dangling(ptr) {
3290 // This is a dangling Weak.
3291 ptr as *mut RcInner<T>
3292 } else {
3293 // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3294 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3295 let offset = unsafe { data_offset(ptr) };
3296 // Thus, we reverse the offset to get the whole RcInner.
3297 // SAFETY: the pointer originated from a Weak, so this offset is safe.
3298 unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3299 };
3300
3301 // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3302 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3303 }
3304
3305 /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3306 /// dropping of the inner value if successful.
3307 ///
3308 /// Returns [`None`] if the inner value has since been dropped.
3309 ///
3310 /// # Examples
3311 ///
3312 /// ```
3313 /// use std::rc::Rc;
3314 ///
3315 /// let five = Rc::new(5);
3316 ///
3317 /// let weak_five = Rc::downgrade(&five);
3318 ///
3319 /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3320 /// assert!(strong_five.is_some());
3321 ///
3322 /// // Destroy all strong pointers.
3323 /// drop(strong_five);
3324 /// drop(five);
3325 ///
3326 /// assert!(weak_five.upgrade().is_none());
3327 /// ```
3328 #[must_use = "this returns a new `Rc`, \
3329 without modifying the original weak pointer"]
3330 #[stable(feature = "rc_weak", since = "1.4.0")]
3331 pub fn upgrade(&self) -> Option<Rc<T, A>>
3332 where
3333 A: Clone,
3334 {
3335 let inner = self.inner()?;
3336
3337 if inner.strong() == 0 {
3338 None
3339 } else {
3340 unsafe {
3341 inner.inc_strong();
3342 Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3343 }
3344 }
3345 }
3346
3347 /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3348 ///
3349 /// If `self` was created using [`Weak::new`], this will return 0.
3350 #[must_use]
3351 #[stable(feature = "weak_counts", since = "1.41.0")]
3352 pub fn strong_count(&self) -> usize {
3353 if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3354 }
3355
3356 /// Gets the number of `Weak` pointers pointing to this allocation.
3357 ///
3358 /// If no strong pointers remain, this will return zero.
3359 #[must_use]
3360 #[stable(feature = "weak_counts", since = "1.41.0")]
3361 pub fn weak_count(&self) -> usize {
3362 if let Some(inner) = self.inner() {
3363 if inner.strong() > 0 {
3364 inner.weak() - 1 // subtract the implicit weak ptr
3365 } else {
3366 0
3367 }
3368 } else {
3369 0
3370 }
3371 }
3372
3373 /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3374 /// (i.e., when this `Weak` was created by `Weak::new`).
3375 #[inline]
3376 fn inner(&self) -> Option<WeakInner<'_>> {
3377 if is_dangling(self.ptr.as_ptr()) {
3378 None
3379 } else {
3380 // We are careful to *not* create a reference covering the "data" field, as
3381 // the field may be mutated concurrently (for example, if the last `Rc`
3382 // is dropped, the data field will be dropped in-place).
3383 Some(unsafe {
3384 let ptr = self.ptr.as_ptr();
3385 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3386 })
3387 }
3388 }
3389
3390 /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3391 /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3392 /// this function ignores the metadata of `dyn Trait` pointers.
3393 ///
3394 /// # Notes
3395 ///
3396 /// Since this compares pointers it means that `Weak::new()` will equal each
3397 /// other, even though they don't point to any allocation.
3398 ///
3399 /// # Examples
3400 ///
3401 /// ```
3402 /// use std::rc::Rc;
3403 ///
3404 /// let first_rc = Rc::new(5);
3405 /// let first = Rc::downgrade(&first_rc);
3406 /// let second = Rc::downgrade(&first_rc);
3407 ///
3408 /// assert!(first.ptr_eq(&second));
3409 ///
3410 /// let third_rc = Rc::new(5);
3411 /// let third = Rc::downgrade(&third_rc);
3412 ///
3413 /// assert!(!first.ptr_eq(&third));
3414 /// ```
3415 ///
3416 /// Comparing `Weak::new`.
3417 ///
3418 /// ```
3419 /// use std::rc::{Rc, Weak};
3420 ///
3421 /// let first = Weak::new();
3422 /// let second = Weak::new();
3423 /// assert!(first.ptr_eq(&second));
3424 ///
3425 /// let third_rc = Rc::new(());
3426 /// let third = Rc::downgrade(&third_rc);
3427 /// assert!(!first.ptr_eq(&third));
3428 /// ```
3429 #[inline]
3430 #[must_use]
3431 #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3432 pub fn ptr_eq(&self, other: &Self) -> bool {
3433 ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3434 }
3435}
3436
3437#[stable(feature = "rc_weak", since = "1.4.0")]
3438unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3439 /// Drops the `Weak` pointer.
3440 ///
3441 /// # Examples
3442 ///
3443 /// ```
3444 /// use std::rc::{Rc, Weak};
3445 ///
3446 /// struct Foo;
3447 ///
3448 /// impl Drop for Foo {
3449 /// fn drop(&mut self) {
3450 /// println!("dropped!");
3451 /// }
3452 /// }
3453 ///
3454 /// let foo = Rc::new(Foo);
3455 /// let weak_foo = Rc::downgrade(&foo);
3456 /// let other_weak_foo = Weak::clone(&weak_foo);
3457 ///
3458 /// drop(weak_foo); // Doesn't print anything
3459 /// drop(foo); // Prints "dropped!"
3460 ///
3461 /// assert!(other_weak_foo.upgrade().is_none());
3462 /// ```
3463 fn drop(&mut self) {
3464 let inner = if let Some(inner) = self.inner() { inner } else { return };
3465
3466 inner.dec_weak();
3467 // the weak count starts at 1, and will only go to zero if all
3468 // the strong pointers have disappeared.
3469 if inner.weak() == 0 {
3470 unsafe {
3471 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3472 }
3473 }
3474 }
3475}
3476
3477#[stable(feature = "rc_weak", since = "1.4.0")]
3478impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3479 /// Makes a clone of the `Weak` pointer that points to the same allocation.
3480 ///
3481 /// # Examples
3482 ///
3483 /// ```
3484 /// use std::rc::{Rc, Weak};
3485 ///
3486 /// let weak_five = Rc::downgrade(&Rc::new(5));
3487 ///
3488 /// let _ = Weak::clone(&weak_five);
3489 /// ```
3490 #[inline]
3491 fn clone(&self) -> Weak<T, A> {
3492 if let Some(inner) = self.inner() {
3493 inner.inc_weak()
3494 }
3495 Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3496 }
3497}
3498
3499#[stable(feature = "rc_weak", since = "1.4.0")]
3500impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3501 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3502 write!(f, "(Weak)")
3503 }
3504}
3505
3506#[stable(feature = "downgraded_weak", since = "1.10.0")]
3507impl<T> Default for Weak<T> {
3508 /// Constructs a new `Weak<T>`, without allocating any memory.
3509 /// Calling [`upgrade`] on the return value always gives [`None`].
3510 ///
3511 /// [`upgrade`]: Weak::upgrade
3512 ///
3513 /// # Examples
3514 ///
3515 /// ```
3516 /// use std::rc::Weak;
3517 ///
3518 /// let empty: Weak<i64> = Default::default();
3519 /// assert!(empty.upgrade().is_none());
3520 /// ```
3521 fn default() -> Weak<T> {
3522 Weak::new()
3523 }
3524}
3525
3526// NOTE: We checked_add here to deal with mem::forget safely. In particular
3527// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
3528// you can free the allocation while outstanding Rcs (or Weaks) exist.
3529// We abort because this is such a degenerate scenario that we don't care about
3530// what happens -- no real program should ever experience this.
3531//
3532// This should have negligible overhead since you don't actually need to
3533// clone these much in Rust thanks to ownership and move-semantics.
3534
3535#[doc(hidden)]
3536trait RcInnerPtr {
3537 fn weak_ref(&self) -> &Cell<usize>;
3538 fn strong_ref(&self) -> &Cell<usize>;
3539
3540 #[inline]
3541 fn strong(&self) -> usize {
3542 self.strong_ref().get()
3543 }
3544
3545 #[inline]
3546 fn inc_strong(&self) {
3547 let strong = self.strong();
3548
3549 // We insert an `assume` here to hint LLVM at an otherwise
3550 // missed optimization.
3551 // SAFETY: The reference count will never be zero when this is
3552 // called.
3553 unsafe {
3554 hint::assert_unchecked(strong != 0);
3555 }
3556
3557 let strong = strong.wrapping_add(1);
3558 self.strong_ref().set(strong);
3559
3560 // We want to abort on overflow instead of dropping the value.
3561 // Checking for overflow after the store instead of before
3562 // allows for slightly better code generation.
3563 if core::intrinsics::unlikely(strong == 0) {
3564 abort();
3565 }
3566 }
3567
3568 #[inline]
3569 fn dec_strong(&self) {
3570 self.strong_ref().set(self.strong() - 1);
3571 }
3572
3573 #[inline]
3574 fn weak(&self) -> usize {
3575 self.weak_ref().get()
3576 }
3577
3578 #[inline]
3579 fn inc_weak(&self) {
3580 let weak = self.weak();
3581
3582 // We insert an `assume` here to hint LLVM at an otherwise
3583 // missed optimization.
3584 // SAFETY: The reference count will never be zero when this is
3585 // called.
3586 unsafe {
3587 hint::assert_unchecked(weak != 0);
3588 }
3589
3590 let weak = weak.wrapping_add(1);
3591 self.weak_ref().set(weak);
3592
3593 // We want to abort on overflow instead of dropping the value.
3594 // Checking for overflow after the store instead of before
3595 // allows for slightly better code generation.
3596 if core::intrinsics::unlikely(weak == 0) {
3597 abort();
3598 }
3599 }
3600
3601 #[inline]
3602 fn dec_weak(&self) {
3603 self.weak_ref().set(self.weak() - 1);
3604 }
3605}
3606
3607impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3608 #[inline(always)]
3609 fn weak_ref(&self) -> &Cell<usize> {
3610 &self.weak
3611 }
3612
3613 #[inline(always)]
3614 fn strong_ref(&self) -> &Cell<usize> {
3615 &self.strong
3616 }
3617}
3618
3619impl<'a> RcInnerPtr for WeakInner<'a> {
3620 #[inline(always)]
3621 fn weak_ref(&self) -> &Cell<usize> {
3622 self.weak
3623 }
3624
3625 #[inline(always)]
3626 fn strong_ref(&self) -> &Cell<usize> {
3627 self.strong
3628 }
3629}
3630
3631#[stable(feature = "rust1", since = "1.0.0")]
3632impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3633 fn borrow(&self) -> &T {
3634 &**self
3635 }
3636}
3637
3638#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3639impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3640 fn as_ref(&self) -> &T {
3641 &**self
3642 }
3643}
3644
3645#[stable(feature = "pin", since = "1.33.0")]
3646impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3647
3648/// Gets the offset within an `RcInner` for the payload behind a pointer.
3649///
3650/// # Safety
3651///
3652/// The pointer must point to (and have valid metadata for) a previously
3653/// valid instance of T, but the T is allowed to be dropped.
3654unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3655 // Align the unsized value to the end of the RcInner.
3656 // Because RcInner is repr(C), it will always be the last field in memory.
3657 // SAFETY: since the only unsized types possible are slices, trait objects,
3658 // and extern types, the input safety requirement is currently enough to
3659 // satisfy the requirements of align_of_val_raw; this is an implementation
3660 // detail of the language that must not be relied upon outside of std.
3661 unsafe { data_offset_align(align_of_val_raw(ptr)) }
3662}
3663
3664#[inline]
3665fn data_offset_align(align: usize) -> usize {
3666 let layout = Layout::new::<RcInner<()>>();
3667 layout.size() + layout.padding_needed_for(align)
3668}
3669
3670/// A uniquely owned [`Rc`].
3671///
3672/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3673/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3674/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3675///
3676/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3677/// use case is to have an object be mutable during its initialization phase but then have it become
3678/// immutable and converted to a normal `Rc`.
3679///
3680/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3681///
3682/// ```
3683/// #![feature(unique_rc_arc)]
3684/// use std::rc::{Rc, Weak, UniqueRc};
3685///
3686/// struct Gadget {
3687/// #[allow(dead_code)]
3688/// me: Weak<Gadget>,
3689/// }
3690///
3691/// fn create_gadget() -> Option<Rc<Gadget>> {
3692/// let mut rc = UniqueRc::new(Gadget {
3693/// me: Weak::new(),
3694/// });
3695/// rc.me = UniqueRc::downgrade(&rc);
3696/// Some(UniqueRc::into_rc(rc))
3697/// }
3698///
3699/// create_gadget().unwrap();
3700/// ```
3701///
3702/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3703/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3704/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3705/// including fallible or async constructors.
3706#[unstable(feature = "unique_rc_arc", issue = "112566")]
3707pub struct UniqueRc<
3708 T: ?Sized,
3709 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3710> {
3711 ptr: NonNull<RcInner<T>>,
3712 // Define the ownership of `RcInner<T>` for drop-check
3713 _marker: PhantomData<RcInner<T>>,
3714 // Invariance is necessary for soundness: once other `Weak`
3715 // references exist, we already have a form of shared mutability!
3716 _marker2: PhantomData<*mut T>,
3717 alloc: A,
3718}
3719
3720// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3721// but having an explicit negative impl is nice for documentation purposes
3722// and results in nicer error messages.
3723#[unstable(feature = "unique_rc_arc", issue = "112566")]
3724impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3725
3726// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3727// but having an explicit negative impl is nice for documentation purposes
3728// and results in nicer error messages.
3729#[unstable(feature = "unique_rc_arc", issue = "112566")]
3730impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3731
3732#[unstable(feature = "unique_rc_arc", issue = "112566")]
3733impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3734 for UniqueRc<T, A>
3735{
3736}
3737
3738//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3739#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3740impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3741
3742#[unstable(feature = "unique_rc_arc", issue = "112566")]
3743impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3744 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3745 fmt::Display::fmt(&**self, f)
3746 }
3747}
3748
3749#[unstable(feature = "unique_rc_arc", issue = "112566")]
3750impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3751 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3752 fmt::Debug::fmt(&**self, f)
3753 }
3754}
3755
3756#[unstable(feature = "unique_rc_arc", issue = "112566")]
3757impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3758 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3759 fmt::Pointer::fmt(&(&raw const **self), f)
3760 }
3761}
3762
3763#[unstable(feature = "unique_rc_arc", issue = "112566")]
3764impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3765 fn borrow(&self) -> &T {
3766 &**self
3767 }
3768}
3769
3770#[unstable(feature = "unique_rc_arc", issue = "112566")]
3771impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3772 fn borrow_mut(&mut self) -> &mut T {
3773 &mut **self
3774 }
3775}
3776
3777#[unstable(feature = "unique_rc_arc", issue = "112566")]
3778impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3779 fn as_ref(&self) -> &T {
3780 &**self
3781 }
3782}
3783
3784#[unstable(feature = "unique_rc_arc", issue = "112566")]
3785impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3786 fn as_mut(&mut self) -> &mut T {
3787 &mut **self
3788 }
3789}
3790
3791#[unstable(feature = "unique_rc_arc", issue = "112566")]
3792impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
3793
3794#[unstable(feature = "unique_rc_arc", issue = "112566")]
3795impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
3796 /// Equality for two `UniqueRc`s.
3797 ///
3798 /// Two `UniqueRc`s are equal if their inner values are equal.
3799 ///
3800 /// # Examples
3801 ///
3802 /// ```
3803 /// #![feature(unique_rc_arc)]
3804 /// use std::rc::UniqueRc;
3805 ///
3806 /// let five = UniqueRc::new(5);
3807 ///
3808 /// assert!(five == UniqueRc::new(5));
3809 /// ```
3810 #[inline]
3811 fn eq(&self, other: &Self) -> bool {
3812 PartialEq::eq(&**self, &**other)
3813 }
3814
3815 /// Inequality for two `UniqueRc`s.
3816 ///
3817 /// Two `UniqueRc`s are not equal if their inner values are not equal.
3818 ///
3819 /// # Examples
3820 ///
3821 /// ```
3822 /// #![feature(unique_rc_arc)]
3823 /// use std::rc::UniqueRc;
3824 ///
3825 /// let five = UniqueRc::new(5);
3826 ///
3827 /// assert!(five != UniqueRc::new(6));
3828 /// ```
3829 #[inline]
3830 fn ne(&self, other: &Self) -> bool {
3831 PartialEq::ne(&**self, &**other)
3832 }
3833}
3834
3835#[unstable(feature = "unique_rc_arc", issue = "112566")]
3836impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
3837 /// Partial comparison for two `UniqueRc`s.
3838 ///
3839 /// The two are compared by calling `partial_cmp()` on their inner values.
3840 ///
3841 /// # Examples
3842 ///
3843 /// ```
3844 /// #![feature(unique_rc_arc)]
3845 /// use std::rc::UniqueRc;
3846 /// use std::cmp::Ordering;
3847 ///
3848 /// let five = UniqueRc::new(5);
3849 ///
3850 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
3851 /// ```
3852 #[inline(always)]
3853 fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
3854 (**self).partial_cmp(&**other)
3855 }
3856
3857 /// Less-than comparison for two `UniqueRc`s.
3858 ///
3859 /// The two are compared by calling `<` on their inner values.
3860 ///
3861 /// # Examples
3862 ///
3863 /// ```
3864 /// #![feature(unique_rc_arc)]
3865 /// use std::rc::UniqueRc;
3866 ///
3867 /// let five = UniqueRc::new(5);
3868 ///
3869 /// assert!(five < UniqueRc::new(6));
3870 /// ```
3871 #[inline(always)]
3872 fn lt(&self, other: &UniqueRc<T, A>) -> bool {
3873 **self < **other
3874 }
3875
3876 /// 'Less than or equal to' comparison for two `UniqueRc`s.
3877 ///
3878 /// The two are compared by calling `<=` on their inner values.
3879 ///
3880 /// # Examples
3881 ///
3882 /// ```
3883 /// #![feature(unique_rc_arc)]
3884 /// use std::rc::UniqueRc;
3885 ///
3886 /// let five = UniqueRc::new(5);
3887 ///
3888 /// assert!(five <= UniqueRc::new(5));
3889 /// ```
3890 #[inline(always)]
3891 fn le(&self, other: &UniqueRc<T, A>) -> bool {
3892 **self <= **other
3893 }
3894
3895 /// Greater-than comparison for two `UniqueRc`s.
3896 ///
3897 /// The two are compared by calling `>` on their inner values.
3898 ///
3899 /// # Examples
3900 ///
3901 /// ```
3902 /// #![feature(unique_rc_arc)]
3903 /// use std::rc::UniqueRc;
3904 ///
3905 /// let five = UniqueRc::new(5);
3906 ///
3907 /// assert!(five > UniqueRc::new(4));
3908 /// ```
3909 #[inline(always)]
3910 fn gt(&self, other: &UniqueRc<T, A>) -> bool {
3911 **self > **other
3912 }
3913
3914 /// 'Greater than or equal to' comparison for two `UniqueRc`s.
3915 ///
3916 /// The two are compared by calling `>=` on their inner values.
3917 ///
3918 /// # Examples
3919 ///
3920 /// ```
3921 /// #![feature(unique_rc_arc)]
3922 /// use std::rc::UniqueRc;
3923 ///
3924 /// let five = UniqueRc::new(5);
3925 ///
3926 /// assert!(five >= UniqueRc::new(5));
3927 /// ```
3928 #[inline(always)]
3929 fn ge(&self, other: &UniqueRc<T, A>) -> bool {
3930 **self >= **other
3931 }
3932}
3933
3934#[unstable(feature = "unique_rc_arc", issue = "112566")]
3935impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
3936 /// Comparison for two `UniqueRc`s.
3937 ///
3938 /// The two are compared by calling `cmp()` on their inner values.
3939 ///
3940 /// # Examples
3941 ///
3942 /// ```
3943 /// #![feature(unique_rc_arc)]
3944 /// use std::rc::UniqueRc;
3945 /// use std::cmp::Ordering;
3946 ///
3947 /// let five = UniqueRc::new(5);
3948 ///
3949 /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
3950 /// ```
3951 #[inline]
3952 fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
3953 (**self).cmp(&**other)
3954 }
3955}
3956
3957#[unstable(feature = "unique_rc_arc", issue = "112566")]
3958impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
3959
3960#[unstable(feature = "unique_rc_arc", issue = "112566")]
3961impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
3962 fn hash<H: Hasher>(&self, state: &mut H) {
3963 (**self).hash(state);
3964 }
3965}
3966
3967// Depends on A = Global
3968impl<T> UniqueRc<T> {
3969 /// Creates a new `UniqueRc`.
3970 ///
3971 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3972 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3973 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3974 /// point to the new [`Rc`].
3975 #[cfg(not(no_global_oom_handling))]
3976 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3977 pub fn new(value: T) -> Self {
3978 Self::new_in(value, Global)
3979 }
3980}
3981
3982impl<T, A: Allocator> UniqueRc<T, A> {
3983 /// Creates a new `UniqueRc` in the provided allocator.
3984 ///
3985 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3986 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3987 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3988 /// point to the new [`Rc`].
3989 #[cfg(not(no_global_oom_handling))]
3990 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3991 pub fn new_in(value: T, alloc: A) -> Self {
3992 let (ptr, alloc) = Box::into_unique(Box::new_in(
3993 RcInner {
3994 strong: Cell::new(0),
3995 // keep one weak reference so if all the weak pointers that are created are dropped
3996 // the UniqueRc still stays valid.
3997 weak: Cell::new(1),
3998 value,
3999 },
4000 alloc,
4001 ));
4002 Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4003 }
4004}
4005
4006impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4007 /// Converts the `UniqueRc` into a regular [`Rc`].
4008 ///
4009 /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4010 /// is passed to `into_rc`.
4011 ///
4012 /// Any weak references created before this method is called can now be upgraded to strong
4013 /// references.
4014 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4015 pub fn into_rc(this: Self) -> Rc<T, A> {
4016 let mut this = ManuallyDrop::new(this);
4017
4018 // Move the allocator out.
4019 // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4020 // a `ManuallyDrop`.
4021 let alloc: A = unsafe { ptr::read(&this.alloc) };
4022
4023 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4024 unsafe {
4025 // Convert our weak reference into a strong reference
4026 this.ptr.as_mut().strong.set(1);
4027 Rc::from_inner_in(this.ptr, alloc)
4028 }
4029 }
4030}
4031
4032impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4033 /// Creates a new weak reference to the `UniqueRc`.
4034 ///
4035 /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4036 /// to a [`Rc`] using [`UniqueRc::into_rc`].
4037 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4038 pub fn downgrade(this: &Self) -> Weak<T, A> {
4039 // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4040 // one strong reference before converting to a regular Rc.
4041 unsafe {
4042 this.ptr.as_ref().inc_weak();
4043 }
4044 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4045 }
4046}
4047
4048#[unstable(feature = "unique_rc_arc", issue = "112566")]
4049impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4050 type Target = T;
4051
4052 fn deref(&self) -> &T {
4053 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4054 unsafe { &self.ptr.as_ref().value }
4055 }
4056}
4057
4058#[unstable(feature = "unique_rc_arc", issue = "112566")]
4059impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4060 fn deref_mut(&mut self) -> &mut T {
4061 // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4062 // have unique ownership and therefore it's safe to make a mutable reference because
4063 // `UniqueRc` owns the only strong reference to itself.
4064 unsafe { &mut (*self.ptr.as_ptr()).value }
4065 }
4066}
4067
4068#[unstable(feature = "unique_rc_arc", issue = "112566")]
4069unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4070 fn drop(&mut self) {
4071 unsafe {
4072 // destroy the contained object
4073 drop_in_place(DerefMut::deref_mut(self));
4074
4075 // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4076 self.ptr.as_ref().dec_weak();
4077
4078 if self.ptr.as_ref().weak() == 0 {
4079 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4080 }
4081 }
4082 }
4083}
4084
4085/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4086/// but will deallocate it (without dropping the value) when dropped.
4087///
4088/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4089/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4090/// which `MaybeUninit` does not.
4091#[cfg(not(no_global_oom_handling))]
4092struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4093 ptr: NonNull<RcInner<T>>,
4094 layout_for_value: Layout,
4095 alloc: Option<A>,
4096}
4097
4098#[cfg(not(no_global_oom_handling))]
4099impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4100 /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4101 fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4102 let layout = Layout::for_value(for_value);
4103 let ptr = unsafe {
4104 Rc::allocate_for_layout(
4105 layout,
4106 |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4107 |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4108 )
4109 };
4110 Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4111 }
4112
4113 /// Returns the pointer to be written into to initialize the [`Rc`].
4114 fn data_ptr(&mut self) -> *mut T {
4115 let offset = data_offset_align(self.layout_for_value.align());
4116 unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4117 }
4118
4119 /// Upgrade this into a normal [`Rc`].
4120 ///
4121 /// # Safety
4122 ///
4123 /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4124 unsafe fn into_rc(self) -> Rc<T, A> {
4125 let mut this = ManuallyDrop::new(self);
4126 let ptr = this.ptr;
4127 let alloc = this.alloc.take().unwrap();
4128
4129 // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4130 // for having initialized the data.
4131 unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4132 }
4133}
4134
4135#[cfg(not(no_global_oom_handling))]
4136impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4137 fn drop(&mut self) {
4138 // SAFETY:
4139 // * new() produced a pointer safe to deallocate.
4140 // * We own the pointer unless into_rc() was called, which forgets us.
4141 unsafe {
4142 self.alloc.take().unwrap().deallocate(
4143 self.ptr.cast(),
4144 rc_inner_layout_for_value_layout(self.layout_for_value),
4145 );
4146 }
4147 }
4148}