alloc/rc.rs
1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//! name: String,
96//! // ...other fields
97//! }
98//!
99//! struct Gadget {
100//! id: i32,
101//! owner: Rc<Owner>,
102//! // ...other fields
103//! }
104//!
105//! fn main() {
106//! // Create a reference-counted `Owner`.
107//! let gadget_owner: Rc<Owner> = Rc::new(
108//! Owner {
109//! name: "Gadget Man".to_string(),
110//! }
111//! );
112//!
113//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//! // gives us a new pointer to the same `Owner` allocation, incrementing
115//! // the reference count in the process.
116//! let gadget1 = Gadget {
117//! id: 1,
118//! owner: Rc::clone(&gadget_owner),
119//! };
120//! let gadget2 = Gadget {
121//! id: 2,
122//! owner: Rc::clone(&gadget_owner),
123//! };
124//!
125//! // Dispose of our local variable `gadget_owner`.
126//! drop(gadget_owner);
127//!
128//! // Despite dropping `gadget_owner`, we're still able to print out the name
129//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//! // live. The field projection `gadget1.owner.name` works because
133//! // `Rc<Owner>` automatically dereferences to `Owner`.
134//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//! // with them the last counted references to our `Owner`. Gadget Man now
139//! // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//! name: String,
166//! gadgets: RefCell<Vec<Weak<Gadget>>>,
167//! // ...other fields
168//! }
169//!
170//! struct Gadget {
171//! id: i32,
172//! owner: Rc<Owner>,
173//! // ...other fields
174//! }
175//!
176//! fn main() {
177//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//! // a shared reference.
180//! let gadget_owner: Rc<Owner> = Rc::new(
181//! Owner {
182//! name: "Gadget Man".to_string(),
183//! gadgets: RefCell::new(vec![]),
184//! }
185//! );
186//!
187//! // Create `Gadget`s belonging to `gadget_owner`, as before.
188//! let gadget1 = Rc::new(
189//! Gadget {
190//! id: 1,
191//! owner: Rc::clone(&gadget_owner),
192//! }
193//! );
194//! let gadget2 = Rc::new(
195//! Gadget {
196//! id: 2,
197//! owner: Rc::clone(&gadget_owner),
198//! }
199//! );
200//!
201//! // Add the `Gadget`s to their `Owner`.
202//! {
203//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//! gadgets.push(Rc::downgrade(&gadget1));
205//! gadgets.push(Rc::downgrade(&gadget2));
206//!
207//! // `RefCell` dynamic borrow ends here.
208//! }
209//!
210//! // Iterate over our `Gadget`s, printing their details out.
211//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//! // guarantee the allocation still exists, we need to call
215//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//! //
217//! // In this case we know the allocation still exists, so we simply
218//! // `unwrap` the `Option`. In a more complicated program, you might
219//! // need graceful error handling for a `None` result.
220//!
221//! let gadget = gadget_weak.upgrade().unwrap();
222//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//! }
224//!
225//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//! // are destroyed. There are now no strong (`Rc`) pointers to the
227//! // gadgets, so they are destroyed. This zeroes the reference count on
228//! // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::Cell;
246#[cfg(not(no_global_oom_handling))]
247use core::clone::CloneToUninit;
248use core::clone::UseCloned;
249use core::cmp::Ordering;
250use core::hash::{Hash, Hasher};
251use core::intrinsics::abort;
252#[cfg(not(no_global_oom_handling))]
253use core::iter;
254use core::marker::{PhantomData, Unsize};
255use core::mem::{self, ManuallyDrop, align_of_val_raw};
256use core::num::NonZeroUsize;
257use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258use core::panic::{RefUnwindSafe, UnwindSafe};
259#[cfg(not(no_global_oom_handling))]
260use core::pin::Pin;
261use core::pin::PinCoerceUnsized;
262use core::ptr::{self, NonNull, drop_in_place};
263#[cfg(not(no_global_oom_handling))]
264use core::slice::from_raw_parts_mut;
265use core::{borrow, fmt, hint};
266
267#[cfg(not(no_global_oom_handling))]
268use crate::alloc::handle_alloc_error;
269use crate::alloc::{AllocError, Allocator, Global, Layout};
270use crate::borrow::{Cow, ToOwned};
271use crate::boxed::Box;
272#[cfg(not(no_global_oom_handling))]
273use crate::string::String;
274#[cfg(not(no_global_oom_handling))]
275use crate::vec::Vec;
276
277// This is repr(C) to future-proof against possible field-reordering, which
278// would interfere with otherwise safe [into|from]_raw() of transmutable
279// inner types.
280#[repr(C)]
281struct RcInner<T: ?Sized> {
282 strong: Cell<usize>,
283 weak: Cell<usize>,
284 value: T,
285}
286
287/// Calculate layout for `RcInner<T>` using the inner value's layout
288fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
289 // Calculate layout using the given value layout.
290 // Previously, layout was calculated on the expression
291 // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
292 // reference (see #54908).
293 Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
294}
295
296/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
297/// Counted'.
298///
299/// See the [module-level documentation](./index.html) for more details.
300///
301/// The inherent methods of `Rc` are all associated functions, which means
302/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
303/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
304///
305/// [get_mut]: Rc::get_mut
306#[doc(search_unbox)]
307#[rustc_diagnostic_item = "Rc"]
308#[stable(feature = "rust1", since = "1.0.0")]
309#[rustc_insignificant_dtor]
310pub struct Rc<
311 T: ?Sized,
312 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
313> {
314 ptr: NonNull<RcInner<T>>,
315 phantom: PhantomData<RcInner<T>>,
316 alloc: A,
317}
318
319#[stable(feature = "rust1", since = "1.0.0")]
320impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
321
322// Note that this negative impl isn't strictly necessary for correctness,
323// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
324// However, given how important `Rc`'s `!Sync`-ness is,
325// having an explicit negative impl is nice for documentation purposes
326// and results in nicer error messages.
327#[stable(feature = "rust1", since = "1.0.0")]
328impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
329
330#[stable(feature = "catch_unwind", since = "1.9.0")]
331impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
332#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
333impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
334
335#[unstable(feature = "coerce_unsized", issue = "18598")]
336impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
337
338#[unstable(feature = "dispatch_from_dyn", issue = "none")]
339impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
340
341impl<T: ?Sized> Rc<T> {
342 #[inline]
343 unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
344 unsafe { Self::from_inner_in(ptr, Global) }
345 }
346
347 #[inline]
348 unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
349 unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
350 }
351}
352
353impl<T: ?Sized, A: Allocator> Rc<T, A> {
354 #[inline(always)]
355 fn inner(&self) -> &RcInner<T> {
356 // This unsafety is ok because while this Rc is alive we're guaranteed
357 // that the inner pointer is valid.
358 unsafe { self.ptr.as_ref() }
359 }
360
361 #[inline]
362 fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
363 let this = mem::ManuallyDrop::new(this);
364 (this.ptr, unsafe { ptr::read(&this.alloc) })
365 }
366
367 #[inline]
368 unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
369 Self { ptr, phantom: PhantomData, alloc }
370 }
371
372 #[inline]
373 unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
374 unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
375 }
376
377 // Non-inlined part of `drop`.
378 #[inline(never)]
379 unsafe fn drop_slow(&mut self) {
380 // Reconstruct the "strong weak" pointer and drop it when this
381 // variable goes out of scope. This ensures that the memory is
382 // deallocated even if the destructor of `T` panics.
383 let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
384
385 // Destroy the contained object.
386 // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
387 unsafe {
388 ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
389 }
390 }
391}
392
393impl<T> Rc<T> {
394 /// Constructs a new `Rc<T>`.
395 ///
396 /// # Examples
397 ///
398 /// ```
399 /// use std::rc::Rc;
400 ///
401 /// let five = Rc::new(5);
402 /// ```
403 #[cfg(not(no_global_oom_handling))]
404 #[stable(feature = "rust1", since = "1.0.0")]
405 pub fn new(value: T) -> Rc<T> {
406 // There is an implicit weak pointer owned by all the strong
407 // pointers, which ensures that the weak destructor never frees
408 // the allocation while the strong destructor is running, even
409 // if the weak pointer is stored inside the strong one.
410 unsafe {
411 Self::from_inner(
412 Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
413 .into(),
414 )
415 }
416 }
417
418 /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
419 /// to allow you to construct a `T` which holds a weak pointer to itself.
420 ///
421 /// Generally, a structure circularly referencing itself, either directly or
422 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
423 /// Using this function, you get access to the weak pointer during the
424 /// initialization of `T`, before the `Rc<T>` is created, such that you can
425 /// clone and store it inside the `T`.
426 ///
427 /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
428 /// then calls your closure, giving it a `Weak<T>` to this allocation,
429 /// and only afterwards completes the construction of the `Rc<T>` by placing
430 /// the `T` returned from your closure into the allocation.
431 ///
432 /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
433 /// returns, calling [`upgrade`] on the weak reference inside your closure will
434 /// fail and result in a `None` value.
435 ///
436 /// # Panics
437 ///
438 /// If `data_fn` panics, the panic is propagated to the caller, and the
439 /// temporary [`Weak<T>`] is dropped normally.
440 ///
441 /// # Examples
442 ///
443 /// ```
444 /// # #![allow(dead_code)]
445 /// use std::rc::{Rc, Weak};
446 ///
447 /// struct Gadget {
448 /// me: Weak<Gadget>,
449 /// }
450 ///
451 /// impl Gadget {
452 /// /// Constructs a reference counted Gadget.
453 /// fn new() -> Rc<Self> {
454 /// // `me` is a `Weak<Gadget>` pointing at the new allocation of the
455 /// // `Rc` we're constructing.
456 /// Rc::new_cyclic(|me| {
457 /// // Create the actual struct here.
458 /// Gadget { me: me.clone() }
459 /// })
460 /// }
461 ///
462 /// /// Returns a reference counted pointer to Self.
463 /// fn me(&self) -> Rc<Self> {
464 /// self.me.upgrade().unwrap()
465 /// }
466 /// }
467 /// ```
468 /// [`upgrade`]: Weak::upgrade
469 #[cfg(not(no_global_oom_handling))]
470 #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
471 pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
472 where
473 F: FnOnce(&Weak<T>) -> T,
474 {
475 Self::new_cyclic_in(data_fn, Global)
476 }
477
478 /// Constructs a new `Rc` with uninitialized contents.
479 ///
480 /// # Examples
481 ///
482 /// ```
483 /// #![feature(get_mut_unchecked)]
484 ///
485 /// use std::rc::Rc;
486 ///
487 /// let mut five = Rc::<u32>::new_uninit();
488 ///
489 /// // Deferred initialization:
490 /// Rc::get_mut(&mut five).unwrap().write(5);
491 ///
492 /// let five = unsafe { five.assume_init() };
493 ///
494 /// assert_eq!(*five, 5)
495 /// ```
496 #[cfg(not(no_global_oom_handling))]
497 #[stable(feature = "new_uninit", since = "1.82.0")]
498 #[must_use]
499 pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
500 unsafe {
501 Rc::from_ptr(Rc::allocate_for_layout(
502 Layout::new::<T>(),
503 |layout| Global.allocate(layout),
504 <*mut u8>::cast,
505 ))
506 }
507 }
508
509 /// Constructs a new `Rc` with uninitialized contents, with the memory
510 /// being filled with `0` bytes.
511 ///
512 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
513 /// incorrect usage of this method.
514 ///
515 /// # Examples
516 ///
517 /// ```
518 /// #![feature(new_zeroed_alloc)]
519 ///
520 /// use std::rc::Rc;
521 ///
522 /// let zero = Rc::<u32>::new_zeroed();
523 /// let zero = unsafe { zero.assume_init() };
524 ///
525 /// assert_eq!(*zero, 0)
526 /// ```
527 ///
528 /// [zeroed]: mem::MaybeUninit::zeroed
529 #[cfg(not(no_global_oom_handling))]
530 #[unstable(feature = "new_zeroed_alloc", issue = "129396")]
531 #[must_use]
532 pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
533 unsafe {
534 Rc::from_ptr(Rc::allocate_for_layout(
535 Layout::new::<T>(),
536 |layout| Global.allocate_zeroed(layout),
537 <*mut u8>::cast,
538 ))
539 }
540 }
541
542 /// Constructs a new `Rc<T>`, returning an error if the allocation fails
543 ///
544 /// # Examples
545 ///
546 /// ```
547 /// #![feature(allocator_api)]
548 /// use std::rc::Rc;
549 ///
550 /// let five = Rc::try_new(5);
551 /// # Ok::<(), std::alloc::AllocError>(())
552 /// ```
553 #[unstable(feature = "allocator_api", issue = "32838")]
554 pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
555 // There is an implicit weak pointer owned by all the strong
556 // pointers, which ensures that the weak destructor never frees
557 // the allocation while the strong destructor is running, even
558 // if the weak pointer is stored inside the strong one.
559 unsafe {
560 Ok(Self::from_inner(
561 Box::leak(Box::try_new(RcInner {
562 strong: Cell::new(1),
563 weak: Cell::new(1),
564 value,
565 })?)
566 .into(),
567 ))
568 }
569 }
570
571 /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
572 ///
573 /// # Examples
574 ///
575 /// ```
576 /// #![feature(allocator_api)]
577 /// #![feature(get_mut_unchecked)]
578 ///
579 /// use std::rc::Rc;
580 ///
581 /// let mut five = Rc::<u32>::try_new_uninit()?;
582 ///
583 /// // Deferred initialization:
584 /// Rc::get_mut(&mut five).unwrap().write(5);
585 ///
586 /// let five = unsafe { five.assume_init() };
587 ///
588 /// assert_eq!(*five, 5);
589 /// # Ok::<(), std::alloc::AllocError>(())
590 /// ```
591 #[unstable(feature = "allocator_api", issue = "32838")]
592 // #[unstable(feature = "new_uninit", issue = "63291")]
593 pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
594 unsafe {
595 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
596 Layout::new::<T>(),
597 |layout| Global.allocate(layout),
598 <*mut u8>::cast,
599 )?))
600 }
601 }
602
603 /// Constructs a new `Rc` with uninitialized contents, with the memory
604 /// being filled with `0` bytes, returning an error if the allocation fails
605 ///
606 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
607 /// incorrect usage of this method.
608 ///
609 /// # Examples
610 ///
611 /// ```
612 /// #![feature(allocator_api)]
613 ///
614 /// use std::rc::Rc;
615 ///
616 /// let zero = Rc::<u32>::try_new_zeroed()?;
617 /// let zero = unsafe { zero.assume_init() };
618 ///
619 /// assert_eq!(*zero, 0);
620 /// # Ok::<(), std::alloc::AllocError>(())
621 /// ```
622 ///
623 /// [zeroed]: mem::MaybeUninit::zeroed
624 #[unstable(feature = "allocator_api", issue = "32838")]
625 //#[unstable(feature = "new_uninit", issue = "63291")]
626 pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
627 unsafe {
628 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
629 Layout::new::<T>(),
630 |layout| Global.allocate_zeroed(layout),
631 <*mut u8>::cast,
632 )?))
633 }
634 }
635 /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
636 /// `value` will be pinned in memory and unable to be moved.
637 #[cfg(not(no_global_oom_handling))]
638 #[stable(feature = "pin", since = "1.33.0")]
639 #[must_use]
640 pub fn pin(value: T) -> Pin<Rc<T>> {
641 unsafe { Pin::new_unchecked(Rc::new(value)) }
642 }
643}
644
645impl<T, A: Allocator> Rc<T, A> {
646 /// Constructs a new `Rc` in the provided allocator.
647 ///
648 /// # Examples
649 ///
650 /// ```
651 /// #![feature(allocator_api)]
652 /// use std::rc::Rc;
653 /// use std::alloc::System;
654 ///
655 /// let five = Rc::new_in(5, System);
656 /// ```
657 #[cfg(not(no_global_oom_handling))]
658 #[unstable(feature = "allocator_api", issue = "32838")]
659 #[inline]
660 pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
661 // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
662 // That would make code size bigger.
663 match Self::try_new_in(value, alloc) {
664 Ok(m) => m,
665 Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
666 }
667 }
668
669 /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
670 ///
671 /// # Examples
672 ///
673 /// ```
674 /// #![feature(get_mut_unchecked)]
675 /// #![feature(allocator_api)]
676 ///
677 /// use std::rc::Rc;
678 /// use std::alloc::System;
679 ///
680 /// let mut five = Rc::<u32, _>::new_uninit_in(System);
681 ///
682 /// let five = unsafe {
683 /// // Deferred initialization:
684 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
685 ///
686 /// five.assume_init()
687 /// };
688 ///
689 /// assert_eq!(*five, 5)
690 /// ```
691 #[cfg(not(no_global_oom_handling))]
692 #[unstable(feature = "allocator_api", issue = "32838")]
693 // #[unstable(feature = "new_uninit", issue = "63291")]
694 #[inline]
695 pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
696 unsafe {
697 Rc::from_ptr_in(
698 Rc::allocate_for_layout(
699 Layout::new::<T>(),
700 |layout| alloc.allocate(layout),
701 <*mut u8>::cast,
702 ),
703 alloc,
704 )
705 }
706 }
707
708 /// Constructs a new `Rc` with uninitialized contents, with the memory
709 /// being filled with `0` bytes, in the provided allocator.
710 ///
711 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
712 /// incorrect usage of this method.
713 ///
714 /// # Examples
715 ///
716 /// ```
717 /// #![feature(allocator_api)]
718 ///
719 /// use std::rc::Rc;
720 /// use std::alloc::System;
721 ///
722 /// let zero = Rc::<u32, _>::new_zeroed_in(System);
723 /// let zero = unsafe { zero.assume_init() };
724 ///
725 /// assert_eq!(*zero, 0)
726 /// ```
727 ///
728 /// [zeroed]: mem::MaybeUninit::zeroed
729 #[cfg(not(no_global_oom_handling))]
730 #[unstable(feature = "allocator_api", issue = "32838")]
731 // #[unstable(feature = "new_uninit", issue = "63291")]
732 #[inline]
733 pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
734 unsafe {
735 Rc::from_ptr_in(
736 Rc::allocate_for_layout(
737 Layout::new::<T>(),
738 |layout| alloc.allocate_zeroed(layout),
739 <*mut u8>::cast,
740 ),
741 alloc,
742 )
743 }
744 }
745
746 /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
747 /// to allow you to construct a `T` which holds a weak pointer to itself.
748 ///
749 /// Generally, a structure circularly referencing itself, either directly or
750 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
751 /// Using this function, you get access to the weak pointer during the
752 /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
753 /// clone and store it inside the `T`.
754 ///
755 /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
756 /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
757 /// and only afterwards completes the construction of the `Rc<T, A>` by placing
758 /// the `T` returned from your closure into the allocation.
759 ///
760 /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
761 /// returns, calling [`upgrade`] on the weak reference inside your closure will
762 /// fail and result in a `None` value.
763 ///
764 /// # Panics
765 ///
766 /// If `data_fn` panics, the panic is propagated to the caller, and the
767 /// temporary [`Weak<T, A>`] is dropped normally.
768 ///
769 /// # Examples
770 ///
771 /// See [`new_cyclic`].
772 ///
773 /// [`new_cyclic`]: Rc::new_cyclic
774 /// [`upgrade`]: Weak::upgrade
775 #[cfg(not(no_global_oom_handling))]
776 #[unstable(feature = "allocator_api", issue = "32838")]
777 pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
778 where
779 F: FnOnce(&Weak<T, A>) -> T,
780 {
781 // Construct the inner in the "uninitialized" state with a single
782 // weak reference.
783 let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
784 RcInner {
785 strong: Cell::new(0),
786 weak: Cell::new(1),
787 value: mem::MaybeUninit::<T>::uninit(),
788 },
789 alloc,
790 ));
791 let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
792 let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
793
794 let weak = Weak { ptr: init_ptr, alloc };
795
796 // It's important we don't give up ownership of the weak pointer, or
797 // else the memory might be freed by the time `data_fn` returns. If
798 // we really wanted to pass ownership, we could create an additional
799 // weak pointer for ourselves, but this would result in additional
800 // updates to the weak reference count which might not be necessary
801 // otherwise.
802 let data = data_fn(&weak);
803
804 let strong = unsafe {
805 let inner = init_ptr.as_ptr();
806 ptr::write(&raw mut (*inner).value, data);
807
808 let prev_value = (*inner).strong.get();
809 debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
810 (*inner).strong.set(1);
811
812 // Strong references should collectively own a shared weak reference,
813 // so don't run the destructor for our old weak reference.
814 // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
815 // and forgetting the weak reference.
816 let alloc = weak.into_raw_with_allocator().1;
817
818 Rc::from_inner_in(init_ptr, alloc)
819 };
820
821 strong
822 }
823
824 /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
825 /// fails
826 ///
827 /// # Examples
828 ///
829 /// ```
830 /// #![feature(allocator_api)]
831 /// use std::rc::Rc;
832 /// use std::alloc::System;
833 ///
834 /// let five = Rc::try_new_in(5, System);
835 /// # Ok::<(), std::alloc::AllocError>(())
836 /// ```
837 #[unstable(feature = "allocator_api", issue = "32838")]
838 #[inline]
839 pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
840 // There is an implicit weak pointer owned by all the strong
841 // pointers, which ensures that the weak destructor never frees
842 // the allocation while the strong destructor is running, even
843 // if the weak pointer is stored inside the strong one.
844 let (ptr, alloc) = Box::into_unique(Box::try_new_in(
845 RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
846 alloc,
847 )?);
848 Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
849 }
850
851 /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
852 /// error if the allocation fails
853 ///
854 /// # Examples
855 ///
856 /// ```
857 /// #![feature(allocator_api)]
858 /// #![feature(get_mut_unchecked)]
859 ///
860 /// use std::rc::Rc;
861 /// use std::alloc::System;
862 ///
863 /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
864 ///
865 /// let five = unsafe {
866 /// // Deferred initialization:
867 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
868 ///
869 /// five.assume_init()
870 /// };
871 ///
872 /// assert_eq!(*five, 5);
873 /// # Ok::<(), std::alloc::AllocError>(())
874 /// ```
875 #[unstable(feature = "allocator_api", issue = "32838")]
876 // #[unstable(feature = "new_uninit", issue = "63291")]
877 #[inline]
878 pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
879 unsafe {
880 Ok(Rc::from_ptr_in(
881 Rc::try_allocate_for_layout(
882 Layout::new::<T>(),
883 |layout| alloc.allocate(layout),
884 <*mut u8>::cast,
885 )?,
886 alloc,
887 ))
888 }
889 }
890
891 /// Constructs a new `Rc` with uninitialized contents, with the memory
892 /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
893 /// fails
894 ///
895 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
896 /// incorrect usage of this method.
897 ///
898 /// # Examples
899 ///
900 /// ```
901 /// #![feature(allocator_api)]
902 ///
903 /// use std::rc::Rc;
904 /// use std::alloc::System;
905 ///
906 /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
907 /// let zero = unsafe { zero.assume_init() };
908 ///
909 /// assert_eq!(*zero, 0);
910 /// # Ok::<(), std::alloc::AllocError>(())
911 /// ```
912 ///
913 /// [zeroed]: mem::MaybeUninit::zeroed
914 #[unstable(feature = "allocator_api", issue = "32838")]
915 //#[unstable(feature = "new_uninit", issue = "63291")]
916 #[inline]
917 pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
918 unsafe {
919 Ok(Rc::from_ptr_in(
920 Rc::try_allocate_for_layout(
921 Layout::new::<T>(),
922 |layout| alloc.allocate_zeroed(layout),
923 <*mut u8>::cast,
924 )?,
925 alloc,
926 ))
927 }
928 }
929
930 /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
931 /// `value` will be pinned in memory and unable to be moved.
932 #[cfg(not(no_global_oom_handling))]
933 #[unstable(feature = "allocator_api", issue = "32838")]
934 #[inline]
935 pub fn pin_in(value: T, alloc: A) -> Pin<Self>
936 where
937 A: 'static,
938 {
939 unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
940 }
941
942 /// Returns the inner value, if the `Rc` has exactly one strong reference.
943 ///
944 /// Otherwise, an [`Err`] is returned with the same `Rc` that was
945 /// passed in.
946 ///
947 /// This will succeed even if there are outstanding weak references.
948 ///
949 /// # Examples
950 ///
951 /// ```
952 /// use std::rc::Rc;
953 ///
954 /// let x = Rc::new(3);
955 /// assert_eq!(Rc::try_unwrap(x), Ok(3));
956 ///
957 /// let x = Rc::new(4);
958 /// let _y = Rc::clone(&x);
959 /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
960 /// ```
961 #[inline]
962 #[stable(feature = "rc_unique", since = "1.4.0")]
963 pub fn try_unwrap(this: Self) -> Result<T, Self> {
964 if Rc::strong_count(&this) == 1 {
965 let this = ManuallyDrop::new(this);
966
967 let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
968 let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
969
970 // Indicate to Weaks that they can't be promoted by decrementing
971 // the strong count, and then remove the implicit "strong weak"
972 // pointer while also handling drop logic by just crafting a
973 // fake Weak.
974 this.inner().dec_strong();
975 let _weak = Weak { ptr: this.ptr, alloc };
976 Ok(val)
977 } else {
978 Err(this)
979 }
980 }
981
982 /// Returns the inner value, if the `Rc` has exactly one strong reference.
983 ///
984 /// Otherwise, [`None`] is returned and the `Rc` is dropped.
985 ///
986 /// This will succeed even if there are outstanding weak references.
987 ///
988 /// If `Rc::into_inner` is called on every clone of this `Rc`,
989 /// it is guaranteed that exactly one of the calls returns the inner value.
990 /// This means in particular that the inner value is not dropped.
991 ///
992 /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
993 /// And while they are meant for different use-cases, `Rc::into_inner(this)`
994 /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
995 /// (Note that the same kind of equivalence does **not** hold true for
996 /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
997 ///
998 /// # Examples
999 ///
1000 /// ```
1001 /// use std::rc::Rc;
1002 ///
1003 /// let x = Rc::new(3);
1004 /// assert_eq!(Rc::into_inner(x), Some(3));
1005 ///
1006 /// let x = Rc::new(4);
1007 /// let y = Rc::clone(&x);
1008 ///
1009 /// assert_eq!(Rc::into_inner(y), None);
1010 /// assert_eq!(Rc::into_inner(x), Some(4));
1011 /// ```
1012 #[inline]
1013 #[stable(feature = "rc_into_inner", since = "1.70.0")]
1014 pub fn into_inner(this: Self) -> Option<T> {
1015 Rc::try_unwrap(this).ok()
1016 }
1017}
1018
1019impl<T> Rc<[T]> {
1020 /// Constructs a new reference-counted slice with uninitialized contents.
1021 ///
1022 /// # Examples
1023 ///
1024 /// ```
1025 /// #![feature(get_mut_unchecked)]
1026 ///
1027 /// use std::rc::Rc;
1028 ///
1029 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1030 ///
1031 /// // Deferred initialization:
1032 /// let data = Rc::get_mut(&mut values).unwrap();
1033 /// data[0].write(1);
1034 /// data[1].write(2);
1035 /// data[2].write(3);
1036 ///
1037 /// let values = unsafe { values.assume_init() };
1038 ///
1039 /// assert_eq!(*values, [1, 2, 3])
1040 /// ```
1041 #[cfg(not(no_global_oom_handling))]
1042 #[stable(feature = "new_uninit", since = "1.82.0")]
1043 #[must_use]
1044 pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1045 unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1046 }
1047
1048 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1049 /// filled with `0` bytes.
1050 ///
1051 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1052 /// incorrect usage of this method.
1053 ///
1054 /// # Examples
1055 ///
1056 /// ```
1057 /// #![feature(new_zeroed_alloc)]
1058 ///
1059 /// use std::rc::Rc;
1060 ///
1061 /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1062 /// let values = unsafe { values.assume_init() };
1063 ///
1064 /// assert_eq!(*values, [0, 0, 0])
1065 /// ```
1066 ///
1067 /// [zeroed]: mem::MaybeUninit::zeroed
1068 #[cfg(not(no_global_oom_handling))]
1069 #[unstable(feature = "new_zeroed_alloc", issue = "129396")]
1070 #[must_use]
1071 pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1072 unsafe {
1073 Rc::from_ptr(Rc::allocate_for_layout(
1074 Layout::array::<T>(len).unwrap(),
1075 |layout| Global.allocate_zeroed(layout),
1076 |mem| {
1077 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1078 as *mut RcInner<[mem::MaybeUninit<T>]>
1079 },
1080 ))
1081 }
1082 }
1083
1084 /// Converts the reference-counted slice into a reference-counted array.
1085 ///
1086 /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1087 ///
1088 /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1089 #[unstable(feature = "slice_as_array", issue = "133508")]
1090 #[inline]
1091 #[must_use]
1092 pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
1093 if self.len() == N {
1094 let ptr = Self::into_raw(self) as *const [T; N];
1095
1096 // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1097 let me = unsafe { Rc::from_raw(ptr) };
1098 Some(me)
1099 } else {
1100 None
1101 }
1102 }
1103}
1104
1105impl<T, A: Allocator> Rc<[T], A> {
1106 /// Constructs a new reference-counted slice with uninitialized contents.
1107 ///
1108 /// # Examples
1109 ///
1110 /// ```
1111 /// #![feature(get_mut_unchecked)]
1112 /// #![feature(allocator_api)]
1113 ///
1114 /// use std::rc::Rc;
1115 /// use std::alloc::System;
1116 ///
1117 /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1118 ///
1119 /// let values = unsafe {
1120 /// // Deferred initialization:
1121 /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1122 /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1123 /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1124 ///
1125 /// values.assume_init()
1126 /// };
1127 ///
1128 /// assert_eq!(*values, [1, 2, 3])
1129 /// ```
1130 #[cfg(not(no_global_oom_handling))]
1131 #[unstable(feature = "allocator_api", issue = "32838")]
1132 // #[unstable(feature = "new_uninit", issue = "63291")]
1133 #[inline]
1134 pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1135 unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1136 }
1137
1138 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1139 /// filled with `0` bytes.
1140 ///
1141 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1142 /// incorrect usage of this method.
1143 ///
1144 /// # Examples
1145 ///
1146 /// ```
1147 /// #![feature(allocator_api)]
1148 ///
1149 /// use std::rc::Rc;
1150 /// use std::alloc::System;
1151 ///
1152 /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1153 /// let values = unsafe { values.assume_init() };
1154 ///
1155 /// assert_eq!(*values, [0, 0, 0])
1156 /// ```
1157 ///
1158 /// [zeroed]: mem::MaybeUninit::zeroed
1159 #[cfg(not(no_global_oom_handling))]
1160 #[unstable(feature = "allocator_api", issue = "32838")]
1161 // #[unstable(feature = "new_uninit", issue = "63291")]
1162 #[inline]
1163 pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1164 unsafe {
1165 Rc::from_ptr_in(
1166 Rc::allocate_for_layout(
1167 Layout::array::<T>(len).unwrap(),
1168 |layout| alloc.allocate_zeroed(layout),
1169 |mem| {
1170 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1171 as *mut RcInner<[mem::MaybeUninit<T>]>
1172 },
1173 ),
1174 alloc,
1175 )
1176 }
1177 }
1178}
1179
1180impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1181 /// Converts to `Rc<T>`.
1182 ///
1183 /// # Safety
1184 ///
1185 /// As with [`MaybeUninit::assume_init`],
1186 /// it is up to the caller to guarantee that the inner value
1187 /// really is in an initialized state.
1188 /// Calling this when the content is not yet fully initialized
1189 /// causes immediate undefined behavior.
1190 ///
1191 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1192 ///
1193 /// # Examples
1194 ///
1195 /// ```
1196 /// #![feature(get_mut_unchecked)]
1197 ///
1198 /// use std::rc::Rc;
1199 ///
1200 /// let mut five = Rc::<u32>::new_uninit();
1201 ///
1202 /// // Deferred initialization:
1203 /// Rc::get_mut(&mut five).unwrap().write(5);
1204 ///
1205 /// let five = unsafe { five.assume_init() };
1206 ///
1207 /// assert_eq!(*five, 5)
1208 /// ```
1209 #[stable(feature = "new_uninit", since = "1.82.0")]
1210 #[inline]
1211 pub unsafe fn assume_init(self) -> Rc<T, A> {
1212 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1213 unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1214 }
1215}
1216
1217impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1218 /// Converts to `Rc<[T]>`.
1219 ///
1220 /// # Safety
1221 ///
1222 /// As with [`MaybeUninit::assume_init`],
1223 /// it is up to the caller to guarantee that the inner value
1224 /// really is in an initialized state.
1225 /// Calling this when the content is not yet fully initialized
1226 /// causes immediate undefined behavior.
1227 ///
1228 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1229 ///
1230 /// # Examples
1231 ///
1232 /// ```
1233 /// #![feature(get_mut_unchecked)]
1234 ///
1235 /// use std::rc::Rc;
1236 ///
1237 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1238 ///
1239 /// // Deferred initialization:
1240 /// let data = Rc::get_mut(&mut values).unwrap();
1241 /// data[0].write(1);
1242 /// data[1].write(2);
1243 /// data[2].write(3);
1244 ///
1245 /// let values = unsafe { values.assume_init() };
1246 ///
1247 /// assert_eq!(*values, [1, 2, 3])
1248 /// ```
1249 #[stable(feature = "new_uninit", since = "1.82.0")]
1250 #[inline]
1251 pub unsafe fn assume_init(self) -> Rc<[T], A> {
1252 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1253 unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1254 }
1255}
1256
1257impl<T: ?Sized> Rc<T> {
1258 /// Constructs an `Rc<T>` from a raw pointer.
1259 ///
1260 /// The raw pointer must have been previously returned by a call to
1261 /// [`Rc<U>::into_raw`][into_raw] with the following requirements:
1262 ///
1263 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1264 /// is trivially true if `U` is `T`.
1265 /// * If `U` is unsized, its data pointer must have the same size and
1266 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1267 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1268 /// coercion].
1269 ///
1270 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1271 /// and alignment, this is basically like transmuting references of
1272 /// different types. See [`mem::transmute`][transmute] for more information
1273 /// on what restrictions apply in this case.
1274 ///
1275 /// The raw pointer must point to a block of memory allocated by the global allocator
1276 ///
1277 /// The user of `from_raw` has to make sure a specific value of `T` is only
1278 /// dropped once.
1279 ///
1280 /// This function is unsafe because improper use may lead to memory unsafety,
1281 /// even if the returned `Rc<T>` is never accessed.
1282 ///
1283 /// [into_raw]: Rc::into_raw
1284 /// [transmute]: core::mem::transmute
1285 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1286 ///
1287 /// # Examples
1288 ///
1289 /// ```
1290 /// use std::rc::Rc;
1291 ///
1292 /// let x = Rc::new("hello".to_owned());
1293 /// let x_ptr = Rc::into_raw(x);
1294 ///
1295 /// unsafe {
1296 /// // Convert back to an `Rc` to prevent leak.
1297 /// let x = Rc::from_raw(x_ptr);
1298 /// assert_eq!(&*x, "hello");
1299 ///
1300 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1301 /// }
1302 ///
1303 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1304 /// ```
1305 ///
1306 /// Convert a slice back into its original array:
1307 ///
1308 /// ```
1309 /// use std::rc::Rc;
1310 ///
1311 /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1312 /// let x_ptr: *const [u32] = Rc::into_raw(x);
1313 ///
1314 /// unsafe {
1315 /// let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1316 /// assert_eq!(&*x, &[1, 2, 3]);
1317 /// }
1318 /// ```
1319 #[inline]
1320 #[stable(feature = "rc_raw", since = "1.17.0")]
1321 pub unsafe fn from_raw(ptr: *const T) -> Self {
1322 unsafe { Self::from_raw_in(ptr, Global) }
1323 }
1324
1325 /// Increments the strong reference count on the `Rc<T>` associated with the
1326 /// provided pointer by one.
1327 ///
1328 /// # Safety
1329 ///
1330 /// The pointer must have been obtained through `Rc::into_raw`, the
1331 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1332 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1333 /// allocated by the global allocator.
1334 ///
1335 /// # Examples
1336 ///
1337 /// ```
1338 /// use std::rc::Rc;
1339 ///
1340 /// let five = Rc::new(5);
1341 ///
1342 /// unsafe {
1343 /// let ptr = Rc::into_raw(five);
1344 /// Rc::increment_strong_count(ptr);
1345 ///
1346 /// let five = Rc::from_raw(ptr);
1347 /// assert_eq!(2, Rc::strong_count(&five));
1348 /// # // Prevent leaks for Miri.
1349 /// # Rc::decrement_strong_count(ptr);
1350 /// }
1351 /// ```
1352 #[inline]
1353 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1354 pub unsafe fn increment_strong_count(ptr: *const T) {
1355 unsafe { Self::increment_strong_count_in(ptr, Global) }
1356 }
1357
1358 /// Decrements the strong reference count on the `Rc<T>` associated with the
1359 /// provided pointer by one.
1360 ///
1361 /// # Safety
1362 ///
1363 /// The pointer must have been obtained through `Rc::into_raw`, the
1364 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1365 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1366 /// allocated by the global allocator. This method can be used to release the final `Rc` and
1367 /// backing storage, but **should not** be called after the final `Rc` has been released.
1368 ///
1369 /// # Examples
1370 ///
1371 /// ```
1372 /// use std::rc::Rc;
1373 ///
1374 /// let five = Rc::new(5);
1375 ///
1376 /// unsafe {
1377 /// let ptr = Rc::into_raw(five);
1378 /// Rc::increment_strong_count(ptr);
1379 ///
1380 /// let five = Rc::from_raw(ptr);
1381 /// assert_eq!(2, Rc::strong_count(&five));
1382 /// Rc::decrement_strong_count(ptr);
1383 /// assert_eq!(1, Rc::strong_count(&five));
1384 /// }
1385 /// ```
1386 #[inline]
1387 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1388 pub unsafe fn decrement_strong_count(ptr: *const T) {
1389 unsafe { Self::decrement_strong_count_in(ptr, Global) }
1390 }
1391}
1392
1393impl<T: ?Sized, A: Allocator> Rc<T, A> {
1394 /// Returns a reference to the underlying allocator.
1395 ///
1396 /// Note: this is an associated function, which means that you have
1397 /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1398 /// is so that there is no conflict with a method on the inner type.
1399 #[inline]
1400 #[unstable(feature = "allocator_api", issue = "32838")]
1401 pub fn allocator(this: &Self) -> &A {
1402 &this.alloc
1403 }
1404
1405 /// Consumes the `Rc`, returning the wrapped pointer.
1406 ///
1407 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1408 /// [`Rc::from_raw`].
1409 ///
1410 /// # Examples
1411 ///
1412 /// ```
1413 /// use std::rc::Rc;
1414 ///
1415 /// let x = Rc::new("hello".to_owned());
1416 /// let x_ptr = Rc::into_raw(x);
1417 /// assert_eq!(unsafe { &*x_ptr }, "hello");
1418 /// # // Prevent leaks for Miri.
1419 /// # drop(unsafe { Rc::from_raw(x_ptr) });
1420 /// ```
1421 #[must_use = "losing the pointer will leak memory"]
1422 #[stable(feature = "rc_raw", since = "1.17.0")]
1423 #[rustc_never_returns_null_ptr]
1424 pub fn into_raw(this: Self) -> *const T {
1425 let this = ManuallyDrop::new(this);
1426 Self::as_ptr(&*this)
1427 }
1428
1429 /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1430 ///
1431 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1432 /// [`Rc::from_raw_in`].
1433 ///
1434 /// # Examples
1435 ///
1436 /// ```
1437 /// #![feature(allocator_api)]
1438 /// use std::rc::Rc;
1439 /// use std::alloc::System;
1440 ///
1441 /// let x = Rc::new_in("hello".to_owned(), System);
1442 /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1443 /// assert_eq!(unsafe { &*ptr }, "hello");
1444 /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1445 /// assert_eq!(&*x, "hello");
1446 /// ```
1447 #[must_use = "losing the pointer will leak memory"]
1448 #[unstable(feature = "allocator_api", issue = "32838")]
1449 pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1450 let this = mem::ManuallyDrop::new(this);
1451 let ptr = Self::as_ptr(&this);
1452 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1453 let alloc = unsafe { ptr::read(&this.alloc) };
1454 (ptr, alloc)
1455 }
1456
1457 /// Provides a raw pointer to the data.
1458 ///
1459 /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1460 /// for as long as there are strong counts in the `Rc`.
1461 ///
1462 /// # Examples
1463 ///
1464 /// ```
1465 /// use std::rc::Rc;
1466 ///
1467 /// let x = Rc::new(0);
1468 /// let y = Rc::clone(&x);
1469 /// let x_ptr = Rc::as_ptr(&x);
1470 /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1471 /// assert_eq!(unsafe { *x_ptr }, 0);
1472 /// ```
1473 #[stable(feature = "weak_into_raw", since = "1.45.0")]
1474 #[rustc_never_returns_null_ptr]
1475 pub fn as_ptr(this: &Self) -> *const T {
1476 let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1477
1478 // SAFETY: This cannot go through Deref::deref or Rc::inner because
1479 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1480 // write through the pointer after the Rc is recovered through `from_raw`.
1481 unsafe { &raw mut (*ptr).value }
1482 }
1483
1484 /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1485 ///
1486 /// The raw pointer must have been previously returned by a call to [`Rc<U,
1487 /// A>::into_raw`][into_raw] with the following requirements:
1488 ///
1489 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1490 /// is trivially true if `U` is `T`.
1491 /// * If `U` is unsized, its data pointer must have the same size and
1492 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1493 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1494 /// coercion].
1495 ///
1496 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1497 /// and alignment, this is basically like transmuting references of
1498 /// different types. See [`mem::transmute`][transmute] for more information
1499 /// on what restrictions apply in this case.
1500 ///
1501 /// The raw pointer must point to a block of memory allocated by `alloc`
1502 ///
1503 /// The user of `from_raw` has to make sure a specific value of `T` is only
1504 /// dropped once.
1505 ///
1506 /// This function is unsafe because improper use may lead to memory unsafety,
1507 /// even if the returned `Rc<T>` is never accessed.
1508 ///
1509 /// [into_raw]: Rc::into_raw
1510 /// [transmute]: core::mem::transmute
1511 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1512 ///
1513 /// # Examples
1514 ///
1515 /// ```
1516 /// #![feature(allocator_api)]
1517 ///
1518 /// use std::rc::Rc;
1519 /// use std::alloc::System;
1520 ///
1521 /// let x = Rc::new_in("hello".to_owned(), System);
1522 /// let x_ptr = Rc::into_raw(x);
1523 ///
1524 /// unsafe {
1525 /// // Convert back to an `Rc` to prevent leak.
1526 /// let x = Rc::from_raw_in(x_ptr, System);
1527 /// assert_eq!(&*x, "hello");
1528 ///
1529 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1530 /// }
1531 ///
1532 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1533 /// ```
1534 ///
1535 /// Convert a slice back into its original array:
1536 ///
1537 /// ```
1538 /// #![feature(allocator_api)]
1539 ///
1540 /// use std::rc::Rc;
1541 /// use std::alloc::System;
1542 ///
1543 /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1544 /// let x_ptr: *const [u32] = Rc::into_raw(x);
1545 ///
1546 /// unsafe {
1547 /// let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1548 /// assert_eq!(&*x, &[1, 2, 3]);
1549 /// }
1550 /// ```
1551 #[unstable(feature = "allocator_api", issue = "32838")]
1552 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1553 let offset = unsafe { data_offset(ptr) };
1554
1555 // Reverse the offset to find the original RcInner.
1556 let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1557
1558 unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1559 }
1560
1561 /// Creates a new [`Weak`] pointer to this allocation.
1562 ///
1563 /// # Examples
1564 ///
1565 /// ```
1566 /// use std::rc::Rc;
1567 ///
1568 /// let five = Rc::new(5);
1569 ///
1570 /// let weak_five = Rc::downgrade(&five);
1571 /// ```
1572 #[must_use = "this returns a new `Weak` pointer, \
1573 without modifying the original `Rc`"]
1574 #[stable(feature = "rc_weak", since = "1.4.0")]
1575 pub fn downgrade(this: &Self) -> Weak<T, A>
1576 where
1577 A: Clone,
1578 {
1579 this.inner().inc_weak();
1580 // Make sure we do not create a dangling Weak
1581 debug_assert!(!is_dangling(this.ptr.as_ptr()));
1582 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1583 }
1584
1585 /// Gets the number of [`Weak`] pointers to this allocation.
1586 ///
1587 /// # Examples
1588 ///
1589 /// ```
1590 /// use std::rc::Rc;
1591 ///
1592 /// let five = Rc::new(5);
1593 /// let _weak_five = Rc::downgrade(&five);
1594 ///
1595 /// assert_eq!(1, Rc::weak_count(&five));
1596 /// ```
1597 #[inline]
1598 #[stable(feature = "rc_counts", since = "1.15.0")]
1599 pub fn weak_count(this: &Self) -> usize {
1600 this.inner().weak() - 1
1601 }
1602
1603 /// Gets the number of strong (`Rc`) pointers to this allocation.
1604 ///
1605 /// # Examples
1606 ///
1607 /// ```
1608 /// use std::rc::Rc;
1609 ///
1610 /// let five = Rc::new(5);
1611 /// let _also_five = Rc::clone(&five);
1612 ///
1613 /// assert_eq!(2, Rc::strong_count(&five));
1614 /// ```
1615 #[inline]
1616 #[stable(feature = "rc_counts", since = "1.15.0")]
1617 pub fn strong_count(this: &Self) -> usize {
1618 this.inner().strong()
1619 }
1620
1621 /// Increments the strong reference count on the `Rc<T>` associated with the
1622 /// provided pointer by one.
1623 ///
1624 /// # Safety
1625 ///
1626 /// The pointer must have been obtained through `Rc::into_raw`, the
1627 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1628 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1629 /// allocated by `alloc`
1630 ///
1631 /// # Examples
1632 ///
1633 /// ```
1634 /// #![feature(allocator_api)]
1635 ///
1636 /// use std::rc::Rc;
1637 /// use std::alloc::System;
1638 ///
1639 /// let five = Rc::new_in(5, System);
1640 ///
1641 /// unsafe {
1642 /// let ptr = Rc::into_raw(five);
1643 /// Rc::increment_strong_count_in(ptr, System);
1644 ///
1645 /// let five = Rc::from_raw_in(ptr, System);
1646 /// assert_eq!(2, Rc::strong_count(&five));
1647 /// # // Prevent leaks for Miri.
1648 /// # Rc::decrement_strong_count_in(ptr, System);
1649 /// }
1650 /// ```
1651 #[inline]
1652 #[unstable(feature = "allocator_api", issue = "32838")]
1653 pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1654 where
1655 A: Clone,
1656 {
1657 // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1658 let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1659 // Now increase refcount, but don't drop new refcount either
1660 let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1661 }
1662
1663 /// Decrements the strong reference count on the `Rc<T>` associated with the
1664 /// provided pointer by one.
1665 ///
1666 /// # Safety
1667 ///
1668 /// The pointer must have been obtained through `Rc::into_raw`, the
1669 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1670 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1671 /// allocated by `alloc`. This method can be used to release the final `Rc` and backing storage,
1672 /// but **should not** be called after the final `Rc` has been released.
1673 ///
1674 /// # Examples
1675 ///
1676 /// ```
1677 /// #![feature(allocator_api)]
1678 ///
1679 /// use std::rc::Rc;
1680 /// use std::alloc::System;
1681 ///
1682 /// let five = Rc::new_in(5, System);
1683 ///
1684 /// unsafe {
1685 /// let ptr = Rc::into_raw(five);
1686 /// Rc::increment_strong_count_in(ptr, System);
1687 ///
1688 /// let five = Rc::from_raw_in(ptr, System);
1689 /// assert_eq!(2, Rc::strong_count(&five));
1690 /// Rc::decrement_strong_count_in(ptr, System);
1691 /// assert_eq!(1, Rc::strong_count(&five));
1692 /// }
1693 /// ```
1694 #[inline]
1695 #[unstable(feature = "allocator_api", issue = "32838")]
1696 pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1697 unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1698 }
1699
1700 /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1701 /// this allocation.
1702 #[inline]
1703 fn is_unique(this: &Self) -> bool {
1704 Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1705 }
1706
1707 /// Returns a mutable reference into the given `Rc`, if there are
1708 /// no other `Rc` or [`Weak`] pointers to the same allocation.
1709 ///
1710 /// Returns [`None`] otherwise, because it is not safe to
1711 /// mutate a shared value.
1712 ///
1713 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1714 /// the inner value when there are other `Rc` pointers.
1715 ///
1716 /// [make_mut]: Rc::make_mut
1717 /// [clone]: Clone::clone
1718 ///
1719 /// # Examples
1720 ///
1721 /// ```
1722 /// use std::rc::Rc;
1723 ///
1724 /// let mut x = Rc::new(3);
1725 /// *Rc::get_mut(&mut x).unwrap() = 4;
1726 /// assert_eq!(*x, 4);
1727 ///
1728 /// let _y = Rc::clone(&x);
1729 /// assert!(Rc::get_mut(&mut x).is_none());
1730 /// ```
1731 #[inline]
1732 #[stable(feature = "rc_unique", since = "1.4.0")]
1733 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1734 if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1735 }
1736
1737 /// Returns a mutable reference into the given `Rc`,
1738 /// without any check.
1739 ///
1740 /// See also [`get_mut`], which is safe and does appropriate checks.
1741 ///
1742 /// [`get_mut`]: Rc::get_mut
1743 ///
1744 /// # Safety
1745 ///
1746 /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1747 /// they must not be dereferenced or have active borrows for the duration
1748 /// of the returned borrow, and their inner type must be exactly the same as the
1749 /// inner type of this Rc (including lifetimes). This is trivially the case if no
1750 /// such pointers exist, for example immediately after `Rc::new`.
1751 ///
1752 /// # Examples
1753 ///
1754 /// ```
1755 /// #![feature(get_mut_unchecked)]
1756 ///
1757 /// use std::rc::Rc;
1758 ///
1759 /// let mut x = Rc::new(String::new());
1760 /// unsafe {
1761 /// Rc::get_mut_unchecked(&mut x).push_str("foo")
1762 /// }
1763 /// assert_eq!(*x, "foo");
1764 /// ```
1765 /// Other `Rc` pointers to the same allocation must be to the same type.
1766 /// ```no_run
1767 /// #![feature(get_mut_unchecked)]
1768 ///
1769 /// use std::rc::Rc;
1770 ///
1771 /// let x: Rc<str> = Rc::from("Hello, world!");
1772 /// let mut y: Rc<[u8]> = x.clone().into();
1773 /// unsafe {
1774 /// // this is Undefined Behavior, because x's inner type is str, not [u8]
1775 /// Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1776 /// }
1777 /// println!("{}", &*x); // Invalid UTF-8 in a str
1778 /// ```
1779 /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1780 /// ```no_run
1781 /// #![feature(get_mut_unchecked)]
1782 ///
1783 /// use std::rc::Rc;
1784 ///
1785 /// let x: Rc<&str> = Rc::new("Hello, world!");
1786 /// {
1787 /// let s = String::from("Oh, no!");
1788 /// let mut y: Rc<&str> = x.clone();
1789 /// unsafe {
1790 /// // this is Undefined Behavior, because x's inner type
1791 /// // is &'long str, not &'short str
1792 /// *Rc::get_mut_unchecked(&mut y) = &s;
1793 /// }
1794 /// }
1795 /// println!("{}", &*x); // Use-after-free
1796 /// ```
1797 #[inline]
1798 #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1799 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1800 // We are careful to *not* create a reference covering the "count" fields, as
1801 // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1802 unsafe { &mut (*this.ptr.as_ptr()).value }
1803 }
1804
1805 #[inline]
1806 #[stable(feature = "ptr_eq", since = "1.17.0")]
1807 /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1808 /// [`ptr::eq`]. This function ignores the metadata of `dyn Trait` pointers.
1809 ///
1810 /// # Examples
1811 ///
1812 /// ```
1813 /// use std::rc::Rc;
1814 ///
1815 /// let five = Rc::new(5);
1816 /// let same_five = Rc::clone(&five);
1817 /// let other_five = Rc::new(5);
1818 ///
1819 /// assert!(Rc::ptr_eq(&five, &same_five));
1820 /// assert!(!Rc::ptr_eq(&five, &other_five));
1821 /// ```
1822 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
1823 ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
1824 }
1825}
1826
1827#[cfg(not(no_global_oom_handling))]
1828impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
1829 /// Makes a mutable reference into the given `Rc`.
1830 ///
1831 /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1832 /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
1833 /// referred to as clone-on-write.
1834 ///
1835 /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1836 /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1837 /// be cloned.
1838 ///
1839 /// See also [`get_mut`], which will fail rather than cloning the inner value
1840 /// or disassociating [`Weak`] pointers.
1841 ///
1842 /// [`clone`]: Clone::clone
1843 /// [`get_mut`]: Rc::get_mut
1844 ///
1845 /// # Examples
1846 ///
1847 /// ```
1848 /// use std::rc::Rc;
1849 ///
1850 /// let mut data = Rc::new(5);
1851 ///
1852 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1853 /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1854 /// *Rc::make_mut(&mut data) += 1; // Clones inner data
1855 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1856 /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
1857 ///
1858 /// // Now `data` and `other_data` point to different allocations.
1859 /// assert_eq!(*data, 8);
1860 /// assert_eq!(*other_data, 12);
1861 /// ```
1862 ///
1863 /// [`Weak`] pointers will be disassociated:
1864 ///
1865 /// ```
1866 /// use std::rc::Rc;
1867 ///
1868 /// let mut data = Rc::new(75);
1869 /// let weak = Rc::downgrade(&data);
1870 ///
1871 /// assert!(75 == *data);
1872 /// assert!(75 == *weak.upgrade().unwrap());
1873 ///
1874 /// *Rc::make_mut(&mut data) += 1;
1875 ///
1876 /// assert!(76 == *data);
1877 /// assert!(weak.upgrade().is_none());
1878 /// ```
1879 #[inline]
1880 #[stable(feature = "rc_unique", since = "1.4.0")]
1881 pub fn make_mut(this: &mut Self) -> &mut T {
1882 let size_of_val = size_of_val::<T>(&**this);
1883
1884 if Rc::strong_count(this) != 1 {
1885 // Gotta clone the data, there are other Rcs.
1886
1887 let this_data_ref: &T = &**this;
1888 // `in_progress` drops the allocation if we panic before finishing initializing it.
1889 let mut in_progress: UniqueRcUninit<T, A> =
1890 UniqueRcUninit::new(this_data_ref, this.alloc.clone());
1891
1892 // Initialize with clone of this.
1893 let initialized_clone = unsafe {
1894 // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1895 this_data_ref.clone_to_uninit(in_progress.data_ptr().cast());
1896 // Cast type of pointer, now that it is initialized.
1897 in_progress.into_rc()
1898 };
1899
1900 // Replace `this` with newly constructed Rc.
1901 *this = initialized_clone;
1902 } else if Rc::weak_count(this) != 0 {
1903 // Can just steal the data, all that's left is Weaks
1904
1905 // We don't need panic-protection like the above branch does, but we might as well
1906 // use the same mechanism.
1907 let mut in_progress: UniqueRcUninit<T, A> =
1908 UniqueRcUninit::new(&**this, this.alloc.clone());
1909 unsafe {
1910 // Initialize `in_progress` with move of **this.
1911 // We have to express this in terms of bytes because `T: ?Sized`; there is no
1912 // operation that just copies a value based on its `size_of_val()`.
1913 ptr::copy_nonoverlapping(
1914 ptr::from_ref(&**this).cast::<u8>(),
1915 in_progress.data_ptr().cast::<u8>(),
1916 size_of_val,
1917 );
1918
1919 this.inner().dec_strong();
1920 // Remove implicit strong-weak ref (no need to craft a fake
1921 // Weak here -- we know other Weaks can clean up for us)
1922 this.inner().dec_weak();
1923 // Replace `this` with newly constructed Rc that has the moved data.
1924 ptr::write(this, in_progress.into_rc());
1925 }
1926 }
1927 // This unsafety is ok because we're guaranteed that the pointer
1928 // returned is the *only* pointer that will ever be returned to T. Our
1929 // reference count is guaranteed to be 1 at this point, and we required
1930 // the `Rc<T>` itself to be `mut`, so we're returning the only possible
1931 // reference to the allocation.
1932 unsafe { &mut this.ptr.as_mut().value }
1933 }
1934}
1935
1936impl<T: Clone, A: Allocator> Rc<T, A> {
1937 /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
1938 /// clone.
1939 ///
1940 /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
1941 /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
1942 ///
1943 /// # Examples
1944 ///
1945 /// ```
1946 /// # use std::{ptr, rc::Rc};
1947 /// let inner = String::from("test");
1948 /// let ptr = inner.as_ptr();
1949 ///
1950 /// let rc = Rc::new(inner);
1951 /// let inner = Rc::unwrap_or_clone(rc);
1952 /// // The inner value was not cloned
1953 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1954 ///
1955 /// let rc = Rc::new(inner);
1956 /// let rc2 = rc.clone();
1957 /// let inner = Rc::unwrap_or_clone(rc);
1958 /// // Because there were 2 references, we had to clone the inner value.
1959 /// assert!(!ptr::eq(ptr, inner.as_ptr()));
1960 /// // `rc2` is the last reference, so when we unwrap it we get back
1961 /// // the original `String`.
1962 /// let inner = Rc::unwrap_or_clone(rc2);
1963 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1964 /// ```
1965 #[inline]
1966 #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
1967 pub fn unwrap_or_clone(this: Self) -> T {
1968 Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
1969 }
1970}
1971
1972impl<A: Allocator> Rc<dyn Any, A> {
1973 /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
1974 ///
1975 /// # Examples
1976 ///
1977 /// ```
1978 /// use std::any::Any;
1979 /// use std::rc::Rc;
1980 ///
1981 /// fn print_if_string(value: Rc<dyn Any>) {
1982 /// if let Ok(string) = value.downcast::<String>() {
1983 /// println!("String ({}): {}", string.len(), string);
1984 /// }
1985 /// }
1986 ///
1987 /// let my_string = "Hello World".to_string();
1988 /// print_if_string(Rc::new(my_string));
1989 /// print_if_string(Rc::new(0i8));
1990 /// ```
1991 #[inline]
1992 #[stable(feature = "rc_downcast", since = "1.29.0")]
1993 pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
1994 if (*self).is::<T>() {
1995 unsafe {
1996 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1997 Ok(Rc::from_inner_in(ptr.cast(), alloc))
1998 }
1999 } else {
2000 Err(self)
2001 }
2002 }
2003
2004 /// Downcasts the `Rc<dyn Any>` to a concrete type.
2005 ///
2006 /// For a safe alternative see [`downcast`].
2007 ///
2008 /// # Examples
2009 ///
2010 /// ```
2011 /// #![feature(downcast_unchecked)]
2012 ///
2013 /// use std::any::Any;
2014 /// use std::rc::Rc;
2015 ///
2016 /// let x: Rc<dyn Any> = Rc::new(1_usize);
2017 ///
2018 /// unsafe {
2019 /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2020 /// }
2021 /// ```
2022 ///
2023 /// # Safety
2024 ///
2025 /// The contained value must be of type `T`. Calling this method
2026 /// with the incorrect type is *undefined behavior*.
2027 ///
2028 ///
2029 /// [`downcast`]: Self::downcast
2030 #[inline]
2031 #[unstable(feature = "downcast_unchecked", issue = "90850")]
2032 pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2033 unsafe {
2034 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2035 Rc::from_inner_in(ptr.cast(), alloc)
2036 }
2037 }
2038}
2039
2040impl<T: ?Sized> Rc<T> {
2041 /// Allocates an `RcInner<T>` with sufficient space for
2042 /// a possibly-unsized inner value where the value has the layout provided.
2043 ///
2044 /// The function `mem_to_rc_inner` is called with the data pointer
2045 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2046 #[cfg(not(no_global_oom_handling))]
2047 unsafe fn allocate_for_layout(
2048 value_layout: Layout,
2049 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2050 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2051 ) -> *mut RcInner<T> {
2052 let layout = rc_inner_layout_for_value_layout(value_layout);
2053 unsafe {
2054 Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2055 .unwrap_or_else(|_| handle_alloc_error(layout))
2056 }
2057 }
2058
2059 /// Allocates an `RcInner<T>` with sufficient space for
2060 /// a possibly-unsized inner value where the value has the layout provided,
2061 /// returning an error if allocation fails.
2062 ///
2063 /// The function `mem_to_rc_inner` is called with the data pointer
2064 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2065 #[inline]
2066 unsafe fn try_allocate_for_layout(
2067 value_layout: Layout,
2068 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2069 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2070 ) -> Result<*mut RcInner<T>, AllocError> {
2071 let layout = rc_inner_layout_for_value_layout(value_layout);
2072
2073 // Allocate for the layout.
2074 let ptr = allocate(layout)?;
2075
2076 // Initialize the RcInner
2077 let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2078 unsafe {
2079 debug_assert_eq!(Layout::for_value_raw(inner), layout);
2080
2081 (&raw mut (*inner).strong).write(Cell::new(1));
2082 (&raw mut (*inner).weak).write(Cell::new(1));
2083 }
2084
2085 Ok(inner)
2086 }
2087}
2088
2089impl<T: ?Sized, A: Allocator> Rc<T, A> {
2090 /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2091 #[cfg(not(no_global_oom_handling))]
2092 unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2093 // Allocate for the `RcInner<T>` using the given value.
2094 unsafe {
2095 Rc::<T>::allocate_for_layout(
2096 Layout::for_value_raw(ptr),
2097 |layout| alloc.allocate(layout),
2098 |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2099 )
2100 }
2101 }
2102
2103 #[cfg(not(no_global_oom_handling))]
2104 fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2105 unsafe {
2106 let value_size = size_of_val(&*src);
2107 let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2108
2109 // Copy value as bytes
2110 ptr::copy_nonoverlapping(
2111 (&raw const *src) as *const u8,
2112 (&raw mut (*ptr).value) as *mut u8,
2113 value_size,
2114 );
2115
2116 // Free the allocation without dropping its contents
2117 let (bptr, alloc) = Box::into_raw_with_allocator(src);
2118 let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2119 drop(src);
2120
2121 Self::from_ptr_in(ptr, alloc)
2122 }
2123 }
2124}
2125
2126impl<T> Rc<[T]> {
2127 /// Allocates an `RcInner<[T]>` with the given length.
2128 #[cfg(not(no_global_oom_handling))]
2129 unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2130 unsafe {
2131 Self::allocate_for_layout(
2132 Layout::array::<T>(len).unwrap(),
2133 |layout| Global.allocate(layout),
2134 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2135 )
2136 }
2137 }
2138
2139 /// Copy elements from slice into newly allocated `Rc<[T]>`
2140 ///
2141 /// Unsafe because the caller must either take ownership or bind `T: Copy`
2142 #[cfg(not(no_global_oom_handling))]
2143 unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2144 unsafe {
2145 let ptr = Self::allocate_for_slice(v.len());
2146 ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2147 Self::from_ptr(ptr)
2148 }
2149 }
2150
2151 /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2152 ///
2153 /// Behavior is undefined should the size be wrong.
2154 #[cfg(not(no_global_oom_handling))]
2155 unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2156 // Panic guard while cloning T elements.
2157 // In the event of a panic, elements that have been written
2158 // into the new RcInner will be dropped, then the memory freed.
2159 struct Guard<T> {
2160 mem: NonNull<u8>,
2161 elems: *mut T,
2162 layout: Layout,
2163 n_elems: usize,
2164 }
2165
2166 impl<T> Drop for Guard<T> {
2167 fn drop(&mut self) {
2168 unsafe {
2169 let slice = from_raw_parts_mut(self.elems, self.n_elems);
2170 ptr::drop_in_place(slice);
2171
2172 Global.deallocate(self.mem, self.layout);
2173 }
2174 }
2175 }
2176
2177 unsafe {
2178 let ptr = Self::allocate_for_slice(len);
2179
2180 let mem = ptr as *mut _ as *mut u8;
2181 let layout = Layout::for_value_raw(ptr);
2182
2183 // Pointer to first element
2184 let elems = (&raw mut (*ptr).value) as *mut T;
2185
2186 let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2187
2188 for (i, item) in iter.enumerate() {
2189 ptr::write(elems.add(i), item);
2190 guard.n_elems += 1;
2191 }
2192
2193 // All clear. Forget the guard so it doesn't free the new RcInner.
2194 mem::forget(guard);
2195
2196 Self::from_ptr(ptr)
2197 }
2198 }
2199}
2200
2201impl<T, A: Allocator> Rc<[T], A> {
2202 /// Allocates an `RcInner<[T]>` with the given length.
2203 #[inline]
2204 #[cfg(not(no_global_oom_handling))]
2205 unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2206 unsafe {
2207 Rc::<[T]>::allocate_for_layout(
2208 Layout::array::<T>(len).unwrap(),
2209 |layout| alloc.allocate(layout),
2210 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2211 )
2212 }
2213 }
2214}
2215
2216#[cfg(not(no_global_oom_handling))]
2217/// Specialization trait used for `From<&[T]>`.
2218trait RcFromSlice<T> {
2219 fn from_slice(slice: &[T]) -> Self;
2220}
2221
2222#[cfg(not(no_global_oom_handling))]
2223impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2224 #[inline]
2225 default fn from_slice(v: &[T]) -> Self {
2226 unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2227 }
2228}
2229
2230#[cfg(not(no_global_oom_handling))]
2231impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
2232 #[inline]
2233 fn from_slice(v: &[T]) -> Self {
2234 unsafe { Rc::copy_from_slice(v) }
2235 }
2236}
2237
2238#[stable(feature = "rust1", since = "1.0.0")]
2239impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2240 type Target = T;
2241
2242 #[inline(always)]
2243 fn deref(&self) -> &T {
2244 &self.inner().value
2245 }
2246}
2247
2248#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2249unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2250
2251//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2252#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2253unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2254
2255#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2256unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Weak<T, A> {}
2257
2258#[unstable(feature = "deref_pure_trait", issue = "87121")]
2259unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2260
2261//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2262#[unstable(feature = "deref_pure_trait", issue = "87121")]
2263unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2264
2265#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2266impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2267
2268#[stable(feature = "rust1", since = "1.0.0")]
2269unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2270 /// Drops the `Rc`.
2271 ///
2272 /// This will decrement the strong reference count. If the strong reference
2273 /// count reaches zero then the only other references (if any) are
2274 /// [`Weak`], so we `drop` the inner value.
2275 ///
2276 /// # Examples
2277 ///
2278 /// ```
2279 /// use std::rc::Rc;
2280 ///
2281 /// struct Foo;
2282 ///
2283 /// impl Drop for Foo {
2284 /// fn drop(&mut self) {
2285 /// println!("dropped!");
2286 /// }
2287 /// }
2288 ///
2289 /// let foo = Rc::new(Foo);
2290 /// let foo2 = Rc::clone(&foo);
2291 ///
2292 /// drop(foo); // Doesn't print anything
2293 /// drop(foo2); // Prints "dropped!"
2294 /// ```
2295 #[inline]
2296 fn drop(&mut self) {
2297 unsafe {
2298 self.inner().dec_strong();
2299 if self.inner().strong() == 0 {
2300 self.drop_slow();
2301 }
2302 }
2303 }
2304}
2305
2306#[stable(feature = "rust1", since = "1.0.0")]
2307impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2308 /// Makes a clone of the `Rc` pointer.
2309 ///
2310 /// This creates another pointer to the same allocation, increasing the
2311 /// strong reference count.
2312 ///
2313 /// # Examples
2314 ///
2315 /// ```
2316 /// use std::rc::Rc;
2317 ///
2318 /// let five = Rc::new(5);
2319 ///
2320 /// let _ = Rc::clone(&five);
2321 /// ```
2322 #[inline]
2323 fn clone(&self) -> Self {
2324 unsafe {
2325 self.inner().inc_strong();
2326 Self::from_inner_in(self.ptr, self.alloc.clone())
2327 }
2328 }
2329}
2330
2331#[unstable(feature = "ergonomic_clones", issue = "132290")]
2332impl<T: ?Sized, A: Allocator + Clone> UseCloned for Rc<T, A> {}
2333
2334#[cfg(not(no_global_oom_handling))]
2335#[stable(feature = "rust1", since = "1.0.0")]
2336impl<T: Default> Default for Rc<T> {
2337 /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2338 ///
2339 /// # Examples
2340 ///
2341 /// ```
2342 /// use std::rc::Rc;
2343 ///
2344 /// let x: Rc<i32> = Default::default();
2345 /// assert_eq!(*x, 0);
2346 /// ```
2347 #[inline]
2348 fn default() -> Rc<T> {
2349 unsafe {
2350 Self::from_inner(
2351 Box::leak(Box::write(
2352 Box::new_uninit(),
2353 RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2354 ))
2355 .into(),
2356 )
2357 }
2358 }
2359}
2360
2361#[cfg(not(no_global_oom_handling))]
2362#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2363impl Default for Rc<str> {
2364 /// Creates an empty str inside an Rc
2365 ///
2366 /// This may or may not share an allocation with other Rcs on the same thread.
2367 #[inline]
2368 fn default() -> Self {
2369 let rc = Rc::<[u8]>::default();
2370 // `[u8]` has the same layout as `str`.
2371 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2372 }
2373}
2374
2375#[cfg(not(no_global_oom_handling))]
2376#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2377impl<T> Default for Rc<[T]> {
2378 /// Creates an empty `[T]` inside an Rc
2379 ///
2380 /// This may or may not share an allocation with other Rcs on the same thread.
2381 #[inline]
2382 fn default() -> Self {
2383 let arr: [T; 0] = [];
2384 Rc::from(arr)
2385 }
2386}
2387
2388#[stable(feature = "rust1", since = "1.0.0")]
2389trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2390 fn eq(&self, other: &Rc<T, A>) -> bool;
2391 fn ne(&self, other: &Rc<T, A>) -> bool;
2392}
2393
2394#[stable(feature = "rust1", since = "1.0.0")]
2395impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2396 #[inline]
2397 default fn eq(&self, other: &Rc<T, A>) -> bool {
2398 **self == **other
2399 }
2400
2401 #[inline]
2402 default fn ne(&self, other: &Rc<T, A>) -> bool {
2403 **self != **other
2404 }
2405}
2406
2407// Hack to allow specializing on `Eq` even though `Eq` has a method.
2408#[rustc_unsafe_specialization_marker]
2409pub(crate) trait MarkerEq: PartialEq<Self> {}
2410
2411impl<T: Eq> MarkerEq for T {}
2412
2413/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2414/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2415/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2416/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2417/// the same value, than two `&T`s.
2418///
2419/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2420#[stable(feature = "rust1", since = "1.0.0")]
2421impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2422 #[inline]
2423 fn eq(&self, other: &Rc<T, A>) -> bool {
2424 Rc::ptr_eq(self, other) || **self == **other
2425 }
2426
2427 #[inline]
2428 fn ne(&self, other: &Rc<T, A>) -> bool {
2429 !Rc::ptr_eq(self, other) && **self != **other
2430 }
2431}
2432
2433#[stable(feature = "rust1", since = "1.0.0")]
2434impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2435 /// Equality for two `Rc`s.
2436 ///
2437 /// Two `Rc`s are equal if their inner values are equal, even if they are
2438 /// stored in different allocation.
2439 ///
2440 /// If `T` also implements `Eq` (implying reflexivity of equality),
2441 /// two `Rc`s that point to the same allocation are
2442 /// always equal.
2443 ///
2444 /// # Examples
2445 ///
2446 /// ```
2447 /// use std::rc::Rc;
2448 ///
2449 /// let five = Rc::new(5);
2450 ///
2451 /// assert!(five == Rc::new(5));
2452 /// ```
2453 #[inline]
2454 fn eq(&self, other: &Rc<T, A>) -> bool {
2455 RcEqIdent::eq(self, other)
2456 }
2457
2458 /// Inequality for two `Rc`s.
2459 ///
2460 /// Two `Rc`s are not equal if their inner values are not equal.
2461 ///
2462 /// If `T` also implements `Eq` (implying reflexivity of equality),
2463 /// two `Rc`s that point to the same allocation are
2464 /// always equal.
2465 ///
2466 /// # Examples
2467 ///
2468 /// ```
2469 /// use std::rc::Rc;
2470 ///
2471 /// let five = Rc::new(5);
2472 ///
2473 /// assert!(five != Rc::new(6));
2474 /// ```
2475 #[inline]
2476 fn ne(&self, other: &Rc<T, A>) -> bool {
2477 RcEqIdent::ne(self, other)
2478 }
2479}
2480
2481#[stable(feature = "rust1", since = "1.0.0")]
2482impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2483
2484#[stable(feature = "rust1", since = "1.0.0")]
2485impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2486 /// Partial comparison for two `Rc`s.
2487 ///
2488 /// The two are compared by calling `partial_cmp()` on their inner values.
2489 ///
2490 /// # Examples
2491 ///
2492 /// ```
2493 /// use std::rc::Rc;
2494 /// use std::cmp::Ordering;
2495 ///
2496 /// let five = Rc::new(5);
2497 ///
2498 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2499 /// ```
2500 #[inline(always)]
2501 fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2502 (**self).partial_cmp(&**other)
2503 }
2504
2505 /// Less-than comparison for two `Rc`s.
2506 ///
2507 /// The two are compared by calling `<` on their inner values.
2508 ///
2509 /// # Examples
2510 ///
2511 /// ```
2512 /// use std::rc::Rc;
2513 ///
2514 /// let five = Rc::new(5);
2515 ///
2516 /// assert!(five < Rc::new(6));
2517 /// ```
2518 #[inline(always)]
2519 fn lt(&self, other: &Rc<T, A>) -> bool {
2520 **self < **other
2521 }
2522
2523 /// 'Less than or equal to' comparison for two `Rc`s.
2524 ///
2525 /// The two are compared by calling `<=` on their inner values.
2526 ///
2527 /// # Examples
2528 ///
2529 /// ```
2530 /// use std::rc::Rc;
2531 ///
2532 /// let five = Rc::new(5);
2533 ///
2534 /// assert!(five <= Rc::new(5));
2535 /// ```
2536 #[inline(always)]
2537 fn le(&self, other: &Rc<T, A>) -> bool {
2538 **self <= **other
2539 }
2540
2541 /// Greater-than comparison for two `Rc`s.
2542 ///
2543 /// The two are compared by calling `>` on their inner values.
2544 ///
2545 /// # Examples
2546 ///
2547 /// ```
2548 /// use std::rc::Rc;
2549 ///
2550 /// let five = Rc::new(5);
2551 ///
2552 /// assert!(five > Rc::new(4));
2553 /// ```
2554 #[inline(always)]
2555 fn gt(&self, other: &Rc<T, A>) -> bool {
2556 **self > **other
2557 }
2558
2559 /// 'Greater than or equal to' comparison for two `Rc`s.
2560 ///
2561 /// The two are compared by calling `>=` on their inner values.
2562 ///
2563 /// # Examples
2564 ///
2565 /// ```
2566 /// use std::rc::Rc;
2567 ///
2568 /// let five = Rc::new(5);
2569 ///
2570 /// assert!(five >= Rc::new(5));
2571 /// ```
2572 #[inline(always)]
2573 fn ge(&self, other: &Rc<T, A>) -> bool {
2574 **self >= **other
2575 }
2576}
2577
2578#[stable(feature = "rust1", since = "1.0.0")]
2579impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2580 /// Comparison for two `Rc`s.
2581 ///
2582 /// The two are compared by calling `cmp()` on their inner values.
2583 ///
2584 /// # Examples
2585 ///
2586 /// ```
2587 /// use std::rc::Rc;
2588 /// use std::cmp::Ordering;
2589 ///
2590 /// let five = Rc::new(5);
2591 ///
2592 /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2593 /// ```
2594 #[inline]
2595 fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2596 (**self).cmp(&**other)
2597 }
2598}
2599
2600#[stable(feature = "rust1", since = "1.0.0")]
2601impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2602 fn hash<H: Hasher>(&self, state: &mut H) {
2603 (**self).hash(state);
2604 }
2605}
2606
2607#[stable(feature = "rust1", since = "1.0.0")]
2608impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2609 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2610 fmt::Display::fmt(&**self, f)
2611 }
2612}
2613
2614#[stable(feature = "rust1", since = "1.0.0")]
2615impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2616 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2617 fmt::Debug::fmt(&**self, f)
2618 }
2619}
2620
2621#[stable(feature = "rust1", since = "1.0.0")]
2622impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2623 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2624 fmt::Pointer::fmt(&(&raw const **self), f)
2625 }
2626}
2627
2628#[cfg(not(no_global_oom_handling))]
2629#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2630impl<T> From<T> for Rc<T> {
2631 /// Converts a generic type `T` into an `Rc<T>`
2632 ///
2633 /// The conversion allocates on the heap and moves `t`
2634 /// from the stack into it.
2635 ///
2636 /// # Example
2637 /// ```rust
2638 /// # use std::rc::Rc;
2639 /// let x = 5;
2640 /// let rc = Rc::new(5);
2641 ///
2642 /// assert_eq!(Rc::from(x), rc);
2643 /// ```
2644 fn from(t: T) -> Self {
2645 Rc::new(t)
2646 }
2647}
2648
2649#[cfg(not(no_global_oom_handling))]
2650#[stable(feature = "shared_from_array", since = "1.74.0")]
2651impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2652 /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2653 ///
2654 /// The conversion moves the array into a newly allocated `Rc`.
2655 ///
2656 /// # Example
2657 ///
2658 /// ```
2659 /// # use std::rc::Rc;
2660 /// let original: [i32; 3] = [1, 2, 3];
2661 /// let shared: Rc<[i32]> = Rc::from(original);
2662 /// assert_eq!(&[1, 2, 3], &shared[..]);
2663 /// ```
2664 #[inline]
2665 fn from(v: [T; N]) -> Rc<[T]> {
2666 Rc::<[T; N]>::from(v)
2667 }
2668}
2669
2670#[cfg(not(no_global_oom_handling))]
2671#[stable(feature = "shared_from_slice", since = "1.21.0")]
2672impl<T: Clone> From<&[T]> for Rc<[T]> {
2673 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2674 ///
2675 /// # Example
2676 ///
2677 /// ```
2678 /// # use std::rc::Rc;
2679 /// let original: &[i32] = &[1, 2, 3];
2680 /// let shared: Rc<[i32]> = Rc::from(original);
2681 /// assert_eq!(&[1, 2, 3], &shared[..]);
2682 /// ```
2683 #[inline]
2684 fn from(v: &[T]) -> Rc<[T]> {
2685 <Self as RcFromSlice<T>>::from_slice(v)
2686 }
2687}
2688
2689#[cfg(not(no_global_oom_handling))]
2690#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2691impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2692 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2693 ///
2694 /// # Example
2695 ///
2696 /// ```
2697 /// # use std::rc::Rc;
2698 /// let mut original = [1, 2, 3];
2699 /// let original: &mut [i32] = &mut original;
2700 /// let shared: Rc<[i32]> = Rc::from(original);
2701 /// assert_eq!(&[1, 2, 3], &shared[..]);
2702 /// ```
2703 #[inline]
2704 fn from(v: &mut [T]) -> Rc<[T]> {
2705 Rc::from(&*v)
2706 }
2707}
2708
2709#[cfg(not(no_global_oom_handling))]
2710#[stable(feature = "shared_from_slice", since = "1.21.0")]
2711impl From<&str> for Rc<str> {
2712 /// Allocates a reference-counted string slice and copies `v` into it.
2713 ///
2714 /// # Example
2715 ///
2716 /// ```
2717 /// # use std::rc::Rc;
2718 /// let shared: Rc<str> = Rc::from("statue");
2719 /// assert_eq!("statue", &shared[..]);
2720 /// ```
2721 #[inline]
2722 fn from(v: &str) -> Rc<str> {
2723 let rc = Rc::<[u8]>::from(v.as_bytes());
2724 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2725 }
2726}
2727
2728#[cfg(not(no_global_oom_handling))]
2729#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2730impl From<&mut str> for Rc<str> {
2731 /// Allocates a reference-counted string slice and copies `v` into it.
2732 ///
2733 /// # Example
2734 ///
2735 /// ```
2736 /// # use std::rc::Rc;
2737 /// let mut original = String::from("statue");
2738 /// let original: &mut str = &mut original;
2739 /// let shared: Rc<str> = Rc::from(original);
2740 /// assert_eq!("statue", &shared[..]);
2741 /// ```
2742 #[inline]
2743 fn from(v: &mut str) -> Rc<str> {
2744 Rc::from(&*v)
2745 }
2746}
2747
2748#[cfg(not(no_global_oom_handling))]
2749#[stable(feature = "shared_from_slice", since = "1.21.0")]
2750impl From<String> for Rc<str> {
2751 /// Allocates a reference-counted string slice and copies `v` into it.
2752 ///
2753 /// # Example
2754 ///
2755 /// ```
2756 /// # use std::rc::Rc;
2757 /// let original: String = "statue".to_owned();
2758 /// let shared: Rc<str> = Rc::from(original);
2759 /// assert_eq!("statue", &shared[..]);
2760 /// ```
2761 #[inline]
2762 fn from(v: String) -> Rc<str> {
2763 Rc::from(&v[..])
2764 }
2765}
2766
2767#[cfg(not(no_global_oom_handling))]
2768#[stable(feature = "shared_from_slice", since = "1.21.0")]
2769impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2770 /// Move a boxed object to a new, reference counted, allocation.
2771 ///
2772 /// # Example
2773 ///
2774 /// ```
2775 /// # use std::rc::Rc;
2776 /// let original: Box<i32> = Box::new(1);
2777 /// let shared: Rc<i32> = Rc::from(original);
2778 /// assert_eq!(1, *shared);
2779 /// ```
2780 #[inline]
2781 fn from(v: Box<T, A>) -> Rc<T, A> {
2782 Rc::from_box_in(v)
2783 }
2784}
2785
2786#[cfg(not(no_global_oom_handling))]
2787#[stable(feature = "shared_from_slice", since = "1.21.0")]
2788impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2789 /// Allocates a reference-counted slice and moves `v`'s items into it.
2790 ///
2791 /// # Example
2792 ///
2793 /// ```
2794 /// # use std::rc::Rc;
2795 /// let unique: Vec<i32> = vec![1, 2, 3];
2796 /// let shared: Rc<[i32]> = Rc::from(unique);
2797 /// assert_eq!(&[1, 2, 3], &shared[..]);
2798 /// ```
2799 #[inline]
2800 fn from(v: Vec<T, A>) -> Rc<[T], A> {
2801 unsafe {
2802 let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2803
2804 let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2805 ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
2806
2807 // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2808 // without dropping its contents or the allocator
2809 let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2810
2811 Self::from_ptr_in(rc_ptr, alloc)
2812 }
2813 }
2814}
2815
2816#[stable(feature = "shared_from_cow", since = "1.45.0")]
2817impl<'a, B> From<Cow<'a, B>> for Rc<B>
2818where
2819 B: ToOwned + ?Sized,
2820 Rc<B>: From<&'a B> + From<B::Owned>,
2821{
2822 /// Creates a reference-counted pointer from a clone-on-write pointer by
2823 /// copying its content.
2824 ///
2825 /// # Example
2826 ///
2827 /// ```rust
2828 /// # use std::rc::Rc;
2829 /// # use std::borrow::Cow;
2830 /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
2831 /// let shared: Rc<str> = Rc::from(cow);
2832 /// assert_eq!("eggplant", &shared[..]);
2833 /// ```
2834 #[inline]
2835 fn from(cow: Cow<'a, B>) -> Rc<B> {
2836 match cow {
2837 Cow::Borrowed(s) => Rc::from(s),
2838 Cow::Owned(s) => Rc::from(s),
2839 }
2840 }
2841}
2842
2843#[stable(feature = "shared_from_str", since = "1.62.0")]
2844impl From<Rc<str>> for Rc<[u8]> {
2845 /// Converts a reference-counted string slice into a byte slice.
2846 ///
2847 /// # Example
2848 ///
2849 /// ```
2850 /// # use std::rc::Rc;
2851 /// let string: Rc<str> = Rc::from("eggplant");
2852 /// let bytes: Rc<[u8]> = Rc::from(string);
2853 /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
2854 /// ```
2855 #[inline]
2856 fn from(rc: Rc<str>) -> Self {
2857 // SAFETY: `str` has the same layout as `[u8]`.
2858 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
2859 }
2860}
2861
2862#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2863impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
2864 type Error = Rc<[T], A>;
2865
2866 fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
2867 if boxed_slice.len() == N {
2868 let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
2869 Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
2870 } else {
2871 Err(boxed_slice)
2872 }
2873 }
2874}
2875
2876#[cfg(not(no_global_oom_handling))]
2877#[stable(feature = "shared_from_iter", since = "1.37.0")]
2878impl<T> FromIterator<T> for Rc<[T]> {
2879 /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
2880 ///
2881 /// # Performance characteristics
2882 ///
2883 /// ## The general case
2884 ///
2885 /// In the general case, collecting into `Rc<[T]>` is done by first
2886 /// collecting into a `Vec<T>`. That is, when writing the following:
2887 ///
2888 /// ```rust
2889 /// # use std::rc::Rc;
2890 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2891 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2892 /// ```
2893 ///
2894 /// this behaves as if we wrote:
2895 ///
2896 /// ```rust
2897 /// # use std::rc::Rc;
2898 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
2899 /// .collect::<Vec<_>>() // The first set of allocations happens here.
2900 /// .into(); // A second allocation for `Rc<[T]>` happens here.
2901 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2902 /// ```
2903 ///
2904 /// This will allocate as many times as needed for constructing the `Vec<T>`
2905 /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
2906 ///
2907 /// ## Iterators of known length
2908 ///
2909 /// When your `Iterator` implements `TrustedLen` and is of an exact size,
2910 /// a single allocation will be made for the `Rc<[T]>`. For example:
2911 ///
2912 /// ```rust
2913 /// # use std::rc::Rc;
2914 /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
2915 /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
2916 /// ```
2917 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
2918 ToRcSlice::to_rc_slice(iter.into_iter())
2919 }
2920}
2921
2922/// Specialization trait used for collecting into `Rc<[T]>`.
2923#[cfg(not(no_global_oom_handling))]
2924trait ToRcSlice<T>: Iterator<Item = T> + Sized {
2925 fn to_rc_slice(self) -> Rc<[T]>;
2926}
2927
2928#[cfg(not(no_global_oom_handling))]
2929impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
2930 default fn to_rc_slice(self) -> Rc<[T]> {
2931 self.collect::<Vec<T>>().into()
2932 }
2933}
2934
2935#[cfg(not(no_global_oom_handling))]
2936impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
2937 fn to_rc_slice(self) -> Rc<[T]> {
2938 // This is the case for a `TrustedLen` iterator.
2939 let (low, high) = self.size_hint();
2940 if let Some(high) = high {
2941 debug_assert_eq!(
2942 low,
2943 high,
2944 "TrustedLen iterator's size hint is not exact: {:?}",
2945 (low, high)
2946 );
2947
2948 unsafe {
2949 // SAFETY: We need to ensure that the iterator has an exact length and we have.
2950 Rc::from_iter_exact(self, low)
2951 }
2952 } else {
2953 // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
2954 // length exceeding `usize::MAX`.
2955 // The default implementation would collect into a vec which would panic.
2956 // Thus we panic here immediately without invoking `Vec` code.
2957 panic!("capacity overflow");
2958 }
2959 }
2960}
2961
2962/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
2963/// managed allocation.
2964///
2965/// The allocation is accessed by calling [`upgrade`] on the `Weak`
2966/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
2967///
2968/// Since a `Weak` reference does not count towards ownership, it will not
2969/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
2970/// guarantees about the value still being present. Thus it may return [`None`]
2971/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
2972/// itself (the backing store) from being deallocated.
2973///
2974/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
2975/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
2976/// prevent circular references between [`Rc`] pointers, since mutual owning references
2977/// would never allow either [`Rc`] to be dropped. For example, a tree could
2978/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
2979/// pointers from children back to their parents.
2980///
2981/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
2982///
2983/// [`upgrade`]: Weak::upgrade
2984#[stable(feature = "rc_weak", since = "1.4.0")]
2985#[rustc_diagnostic_item = "RcWeak"]
2986pub struct Weak<
2987 T: ?Sized,
2988 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
2989> {
2990 // This is a `NonNull` to allow optimizing the size of this type in enums,
2991 // but it is not necessarily a valid pointer.
2992 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
2993 // to allocate space on the heap. That's not a value a real pointer
2994 // will ever have because RcInner has alignment at least 2.
2995 // This is only possible when `T: Sized`; unsized `T` never dangle.
2996 ptr: NonNull<RcInner<T>>,
2997 alloc: A,
2998}
2999
3000#[stable(feature = "rc_weak", since = "1.4.0")]
3001impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3002#[stable(feature = "rc_weak", since = "1.4.0")]
3003impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3004
3005#[unstable(feature = "coerce_unsized", issue = "18598")]
3006impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3007
3008#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3009impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3010
3011impl<T> Weak<T> {
3012 /// Constructs a new `Weak<T>`, without allocating any memory.
3013 /// Calling [`upgrade`] on the return value always gives [`None`].
3014 ///
3015 /// [`upgrade`]: Weak::upgrade
3016 ///
3017 /// # Examples
3018 ///
3019 /// ```
3020 /// use std::rc::Weak;
3021 ///
3022 /// let empty: Weak<i64> = Weak::new();
3023 /// assert!(empty.upgrade().is_none());
3024 /// ```
3025 #[inline]
3026 #[stable(feature = "downgraded_weak", since = "1.10.0")]
3027 #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3028 #[must_use]
3029 pub const fn new() -> Weak<T> {
3030 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3031 }
3032}
3033
3034impl<T, A: Allocator> Weak<T, A> {
3035 /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3036 /// allocator.
3037 /// Calling [`upgrade`] on the return value always gives [`None`].
3038 ///
3039 /// [`upgrade`]: Weak::upgrade
3040 ///
3041 /// # Examples
3042 ///
3043 /// ```
3044 /// use std::rc::Weak;
3045 ///
3046 /// let empty: Weak<i64> = Weak::new();
3047 /// assert!(empty.upgrade().is_none());
3048 /// ```
3049 #[inline]
3050 #[unstable(feature = "allocator_api", issue = "32838")]
3051 pub fn new_in(alloc: A) -> Weak<T, A> {
3052 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3053 }
3054}
3055
3056pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3057 (ptr.cast::<()>()).addr() == usize::MAX
3058}
3059
3060/// Helper type to allow accessing the reference counts without
3061/// making any assertions about the data field.
3062struct WeakInner<'a> {
3063 weak: &'a Cell<usize>,
3064 strong: &'a Cell<usize>,
3065}
3066
3067impl<T: ?Sized> Weak<T> {
3068 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3069 ///
3070 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3071 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3072 ///
3073 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3074 /// as these don't own anything; the method still works on them).
3075 ///
3076 /// # Safety
3077 ///
3078 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3079 /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3080 ///
3081 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3082 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3083 /// count is not modified by this operation) and therefore it must be paired with a previous
3084 /// call to [`into_raw`].
3085 ///
3086 /// # Examples
3087 ///
3088 /// ```
3089 /// use std::rc::{Rc, Weak};
3090 ///
3091 /// let strong = Rc::new("hello".to_owned());
3092 ///
3093 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3094 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3095 ///
3096 /// assert_eq!(2, Rc::weak_count(&strong));
3097 ///
3098 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3099 /// assert_eq!(1, Rc::weak_count(&strong));
3100 ///
3101 /// drop(strong);
3102 ///
3103 /// // Decrement the last weak count.
3104 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3105 /// ```
3106 ///
3107 /// [`into_raw`]: Weak::into_raw
3108 /// [`upgrade`]: Weak::upgrade
3109 /// [`new`]: Weak::new
3110 #[inline]
3111 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3112 pub unsafe fn from_raw(ptr: *const T) -> Self {
3113 unsafe { Self::from_raw_in(ptr, Global) }
3114 }
3115}
3116
3117impl<T: ?Sized, A: Allocator> Weak<T, A> {
3118 /// Returns a reference to the underlying allocator.
3119 #[inline]
3120 #[unstable(feature = "allocator_api", issue = "32838")]
3121 pub fn allocator(&self) -> &A {
3122 &self.alloc
3123 }
3124
3125 /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3126 ///
3127 /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3128 /// unaligned or even [`null`] otherwise.
3129 ///
3130 /// # Examples
3131 ///
3132 /// ```
3133 /// use std::rc::Rc;
3134 /// use std::ptr;
3135 ///
3136 /// let strong = Rc::new("hello".to_owned());
3137 /// let weak = Rc::downgrade(&strong);
3138 /// // Both point to the same object
3139 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3140 /// // The strong here keeps it alive, so we can still access the object.
3141 /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3142 ///
3143 /// drop(strong);
3144 /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3145 /// // undefined behavior.
3146 /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3147 /// ```
3148 ///
3149 /// [`null`]: ptr::null
3150 #[must_use]
3151 #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3152 pub fn as_ptr(&self) -> *const T {
3153 let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3154
3155 if is_dangling(ptr) {
3156 // If the pointer is dangling, we return the sentinel directly. This cannot be
3157 // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3158 ptr as *const T
3159 } else {
3160 // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3161 // The payload may be dropped at this point, and we have to maintain provenance,
3162 // so use raw pointer manipulation.
3163 unsafe { &raw mut (*ptr).value }
3164 }
3165 }
3166
3167 /// Consumes the `Weak<T>` and turns it into a raw pointer.
3168 ///
3169 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3170 /// one weak reference (the weak count is not modified by this operation). It can be turned
3171 /// back into the `Weak<T>` with [`from_raw`].
3172 ///
3173 /// The same restrictions of accessing the target of the pointer as with
3174 /// [`as_ptr`] apply.
3175 ///
3176 /// # Examples
3177 ///
3178 /// ```
3179 /// use std::rc::{Rc, Weak};
3180 ///
3181 /// let strong = Rc::new("hello".to_owned());
3182 /// let weak = Rc::downgrade(&strong);
3183 /// let raw = weak.into_raw();
3184 ///
3185 /// assert_eq!(1, Rc::weak_count(&strong));
3186 /// assert_eq!("hello", unsafe { &*raw });
3187 ///
3188 /// drop(unsafe { Weak::from_raw(raw) });
3189 /// assert_eq!(0, Rc::weak_count(&strong));
3190 /// ```
3191 ///
3192 /// [`from_raw`]: Weak::from_raw
3193 /// [`as_ptr`]: Weak::as_ptr
3194 #[must_use = "losing the pointer will leak memory"]
3195 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3196 pub fn into_raw(self) -> *const T {
3197 mem::ManuallyDrop::new(self).as_ptr()
3198 }
3199
3200 /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3201 ///
3202 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3203 /// one weak reference (the weak count is not modified by this operation). It can be turned
3204 /// back into the `Weak<T>` with [`from_raw_in`].
3205 ///
3206 /// The same restrictions of accessing the target of the pointer as with
3207 /// [`as_ptr`] apply.
3208 ///
3209 /// # Examples
3210 ///
3211 /// ```
3212 /// #![feature(allocator_api)]
3213 /// use std::rc::{Rc, Weak};
3214 /// use std::alloc::System;
3215 ///
3216 /// let strong = Rc::new_in("hello".to_owned(), System);
3217 /// let weak = Rc::downgrade(&strong);
3218 /// let (raw, alloc) = weak.into_raw_with_allocator();
3219 ///
3220 /// assert_eq!(1, Rc::weak_count(&strong));
3221 /// assert_eq!("hello", unsafe { &*raw });
3222 ///
3223 /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3224 /// assert_eq!(0, Rc::weak_count(&strong));
3225 /// ```
3226 ///
3227 /// [`from_raw_in`]: Weak::from_raw_in
3228 /// [`as_ptr`]: Weak::as_ptr
3229 #[must_use = "losing the pointer will leak memory"]
3230 #[inline]
3231 #[unstable(feature = "allocator_api", issue = "32838")]
3232 pub fn into_raw_with_allocator(self) -> (*const T, A) {
3233 let this = mem::ManuallyDrop::new(self);
3234 let result = this.as_ptr();
3235 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3236 let alloc = unsafe { ptr::read(&this.alloc) };
3237 (result, alloc)
3238 }
3239
3240 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3241 ///
3242 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3243 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3244 ///
3245 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3246 /// as these don't own anything; the method still works on them).
3247 ///
3248 /// # Safety
3249 ///
3250 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3251 /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3252 ///
3253 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3254 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3255 /// count is not modified by this operation) and therefore it must be paired with a previous
3256 /// call to [`into_raw`].
3257 ///
3258 /// # Examples
3259 ///
3260 /// ```
3261 /// use std::rc::{Rc, Weak};
3262 ///
3263 /// let strong = Rc::new("hello".to_owned());
3264 ///
3265 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3266 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3267 ///
3268 /// assert_eq!(2, Rc::weak_count(&strong));
3269 ///
3270 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3271 /// assert_eq!(1, Rc::weak_count(&strong));
3272 ///
3273 /// drop(strong);
3274 ///
3275 /// // Decrement the last weak count.
3276 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3277 /// ```
3278 ///
3279 /// [`into_raw`]: Weak::into_raw
3280 /// [`upgrade`]: Weak::upgrade
3281 /// [`new`]: Weak::new
3282 #[inline]
3283 #[unstable(feature = "allocator_api", issue = "32838")]
3284 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3285 // See Weak::as_ptr for context on how the input pointer is derived.
3286
3287 let ptr = if is_dangling(ptr) {
3288 // This is a dangling Weak.
3289 ptr as *mut RcInner<T>
3290 } else {
3291 // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3292 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3293 let offset = unsafe { data_offset(ptr) };
3294 // Thus, we reverse the offset to get the whole RcInner.
3295 // SAFETY: the pointer originated from a Weak, so this offset is safe.
3296 unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3297 };
3298
3299 // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3300 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3301 }
3302
3303 /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3304 /// dropping of the inner value if successful.
3305 ///
3306 /// Returns [`None`] if the inner value has since been dropped.
3307 ///
3308 /// # Examples
3309 ///
3310 /// ```
3311 /// use std::rc::Rc;
3312 ///
3313 /// let five = Rc::new(5);
3314 ///
3315 /// let weak_five = Rc::downgrade(&five);
3316 ///
3317 /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3318 /// assert!(strong_five.is_some());
3319 ///
3320 /// // Destroy all strong pointers.
3321 /// drop(strong_five);
3322 /// drop(five);
3323 ///
3324 /// assert!(weak_five.upgrade().is_none());
3325 /// ```
3326 #[must_use = "this returns a new `Rc`, \
3327 without modifying the original weak pointer"]
3328 #[stable(feature = "rc_weak", since = "1.4.0")]
3329 pub fn upgrade(&self) -> Option<Rc<T, A>>
3330 where
3331 A: Clone,
3332 {
3333 let inner = self.inner()?;
3334
3335 if inner.strong() == 0 {
3336 None
3337 } else {
3338 unsafe {
3339 inner.inc_strong();
3340 Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3341 }
3342 }
3343 }
3344
3345 /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3346 ///
3347 /// If `self` was created using [`Weak::new`], this will return 0.
3348 #[must_use]
3349 #[stable(feature = "weak_counts", since = "1.41.0")]
3350 pub fn strong_count(&self) -> usize {
3351 if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3352 }
3353
3354 /// Gets the number of `Weak` pointers pointing to this allocation.
3355 ///
3356 /// If no strong pointers remain, this will return zero.
3357 #[must_use]
3358 #[stable(feature = "weak_counts", since = "1.41.0")]
3359 pub fn weak_count(&self) -> usize {
3360 if let Some(inner) = self.inner() {
3361 if inner.strong() > 0 {
3362 inner.weak() - 1 // subtract the implicit weak ptr
3363 } else {
3364 0
3365 }
3366 } else {
3367 0
3368 }
3369 }
3370
3371 /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3372 /// (i.e., when this `Weak` was created by `Weak::new`).
3373 #[inline]
3374 fn inner(&self) -> Option<WeakInner<'_>> {
3375 if is_dangling(self.ptr.as_ptr()) {
3376 None
3377 } else {
3378 // We are careful to *not* create a reference covering the "data" field, as
3379 // the field may be mutated concurrently (for example, if the last `Rc`
3380 // is dropped, the data field will be dropped in-place).
3381 Some(unsafe {
3382 let ptr = self.ptr.as_ptr();
3383 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3384 })
3385 }
3386 }
3387
3388 /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3389 /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3390 /// this function ignores the metadata of `dyn Trait` pointers.
3391 ///
3392 /// # Notes
3393 ///
3394 /// Since this compares pointers it means that `Weak::new()` will equal each
3395 /// other, even though they don't point to any allocation.
3396 ///
3397 /// # Examples
3398 ///
3399 /// ```
3400 /// use std::rc::Rc;
3401 ///
3402 /// let first_rc = Rc::new(5);
3403 /// let first = Rc::downgrade(&first_rc);
3404 /// let second = Rc::downgrade(&first_rc);
3405 ///
3406 /// assert!(first.ptr_eq(&second));
3407 ///
3408 /// let third_rc = Rc::new(5);
3409 /// let third = Rc::downgrade(&third_rc);
3410 ///
3411 /// assert!(!first.ptr_eq(&third));
3412 /// ```
3413 ///
3414 /// Comparing `Weak::new`.
3415 ///
3416 /// ```
3417 /// use std::rc::{Rc, Weak};
3418 ///
3419 /// let first = Weak::new();
3420 /// let second = Weak::new();
3421 /// assert!(first.ptr_eq(&second));
3422 ///
3423 /// let third_rc = Rc::new(());
3424 /// let third = Rc::downgrade(&third_rc);
3425 /// assert!(!first.ptr_eq(&third));
3426 /// ```
3427 #[inline]
3428 #[must_use]
3429 #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3430 pub fn ptr_eq(&self, other: &Self) -> bool {
3431 ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3432 }
3433}
3434
3435#[stable(feature = "rc_weak", since = "1.4.0")]
3436unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3437 /// Drops the `Weak` pointer.
3438 ///
3439 /// # Examples
3440 ///
3441 /// ```
3442 /// use std::rc::{Rc, Weak};
3443 ///
3444 /// struct Foo;
3445 ///
3446 /// impl Drop for Foo {
3447 /// fn drop(&mut self) {
3448 /// println!("dropped!");
3449 /// }
3450 /// }
3451 ///
3452 /// let foo = Rc::new(Foo);
3453 /// let weak_foo = Rc::downgrade(&foo);
3454 /// let other_weak_foo = Weak::clone(&weak_foo);
3455 ///
3456 /// drop(weak_foo); // Doesn't print anything
3457 /// drop(foo); // Prints "dropped!"
3458 ///
3459 /// assert!(other_weak_foo.upgrade().is_none());
3460 /// ```
3461 fn drop(&mut self) {
3462 let inner = if let Some(inner) = self.inner() { inner } else { return };
3463
3464 inner.dec_weak();
3465 // the weak count starts at 1, and will only go to zero if all
3466 // the strong pointers have disappeared.
3467 if inner.weak() == 0 {
3468 unsafe {
3469 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3470 }
3471 }
3472 }
3473}
3474
3475#[stable(feature = "rc_weak", since = "1.4.0")]
3476impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3477 /// Makes a clone of the `Weak` pointer that points to the same allocation.
3478 ///
3479 /// # Examples
3480 ///
3481 /// ```
3482 /// use std::rc::{Rc, Weak};
3483 ///
3484 /// let weak_five = Rc::downgrade(&Rc::new(5));
3485 ///
3486 /// let _ = Weak::clone(&weak_five);
3487 /// ```
3488 #[inline]
3489 fn clone(&self) -> Weak<T, A> {
3490 if let Some(inner) = self.inner() {
3491 inner.inc_weak()
3492 }
3493 Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3494 }
3495}
3496
3497#[unstable(feature = "ergonomic_clones", issue = "132290")]
3498impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
3499
3500#[stable(feature = "rc_weak", since = "1.4.0")]
3501impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3502 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3503 write!(f, "(Weak)")
3504 }
3505}
3506
3507#[stable(feature = "downgraded_weak", since = "1.10.0")]
3508impl<T> Default for Weak<T> {
3509 /// Constructs a new `Weak<T>`, without allocating any memory.
3510 /// Calling [`upgrade`] on the return value always gives [`None`].
3511 ///
3512 /// [`upgrade`]: Weak::upgrade
3513 ///
3514 /// # Examples
3515 ///
3516 /// ```
3517 /// use std::rc::Weak;
3518 ///
3519 /// let empty: Weak<i64> = Default::default();
3520 /// assert!(empty.upgrade().is_none());
3521 /// ```
3522 fn default() -> Weak<T> {
3523 Weak::new()
3524 }
3525}
3526
3527// NOTE: We checked_add here to deal with mem::forget safely. In particular
3528// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
3529// you can free the allocation while outstanding Rcs (or Weaks) exist.
3530// We abort because this is such a degenerate scenario that we don't care about
3531// what happens -- no real program should ever experience this.
3532//
3533// This should have negligible overhead since you don't actually need to
3534// clone these much in Rust thanks to ownership and move-semantics.
3535
3536#[doc(hidden)]
3537trait RcInnerPtr {
3538 fn weak_ref(&self) -> &Cell<usize>;
3539 fn strong_ref(&self) -> &Cell<usize>;
3540
3541 #[inline]
3542 fn strong(&self) -> usize {
3543 self.strong_ref().get()
3544 }
3545
3546 #[inline]
3547 fn inc_strong(&self) {
3548 let strong = self.strong();
3549
3550 // We insert an `assume` here to hint LLVM at an otherwise
3551 // missed optimization.
3552 // SAFETY: The reference count will never be zero when this is
3553 // called.
3554 unsafe {
3555 hint::assert_unchecked(strong != 0);
3556 }
3557
3558 let strong = strong.wrapping_add(1);
3559 self.strong_ref().set(strong);
3560
3561 // We want to abort on overflow instead of dropping the value.
3562 // Checking for overflow after the store instead of before
3563 // allows for slightly better code generation.
3564 if core::intrinsics::unlikely(strong == 0) {
3565 abort();
3566 }
3567 }
3568
3569 #[inline]
3570 fn dec_strong(&self) {
3571 self.strong_ref().set(self.strong() - 1);
3572 }
3573
3574 #[inline]
3575 fn weak(&self) -> usize {
3576 self.weak_ref().get()
3577 }
3578
3579 #[inline]
3580 fn inc_weak(&self) {
3581 let weak = self.weak();
3582
3583 // We insert an `assume` here to hint LLVM at an otherwise
3584 // missed optimization.
3585 // SAFETY: The reference count will never be zero when this is
3586 // called.
3587 unsafe {
3588 hint::assert_unchecked(weak != 0);
3589 }
3590
3591 let weak = weak.wrapping_add(1);
3592 self.weak_ref().set(weak);
3593
3594 // We want to abort on overflow instead of dropping the value.
3595 // Checking for overflow after the store instead of before
3596 // allows for slightly better code generation.
3597 if core::intrinsics::unlikely(weak == 0) {
3598 abort();
3599 }
3600 }
3601
3602 #[inline]
3603 fn dec_weak(&self) {
3604 self.weak_ref().set(self.weak() - 1);
3605 }
3606}
3607
3608impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3609 #[inline(always)]
3610 fn weak_ref(&self) -> &Cell<usize> {
3611 &self.weak
3612 }
3613
3614 #[inline(always)]
3615 fn strong_ref(&self) -> &Cell<usize> {
3616 &self.strong
3617 }
3618}
3619
3620impl<'a> RcInnerPtr for WeakInner<'a> {
3621 #[inline(always)]
3622 fn weak_ref(&self) -> &Cell<usize> {
3623 self.weak
3624 }
3625
3626 #[inline(always)]
3627 fn strong_ref(&self) -> &Cell<usize> {
3628 self.strong
3629 }
3630}
3631
3632#[stable(feature = "rust1", since = "1.0.0")]
3633impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3634 fn borrow(&self) -> &T {
3635 &**self
3636 }
3637}
3638
3639#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3640impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3641 fn as_ref(&self) -> &T {
3642 &**self
3643 }
3644}
3645
3646#[stable(feature = "pin", since = "1.33.0")]
3647impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3648
3649/// Gets the offset within an `RcInner` for the payload behind a pointer.
3650///
3651/// # Safety
3652///
3653/// The pointer must point to (and have valid metadata for) a previously
3654/// valid instance of T, but the T is allowed to be dropped.
3655unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3656 // Align the unsized value to the end of the RcInner.
3657 // Because RcInner is repr(C), it will always be the last field in memory.
3658 // SAFETY: since the only unsized types possible are slices, trait objects,
3659 // and extern types, the input safety requirement is currently enough to
3660 // satisfy the requirements of align_of_val_raw; this is an implementation
3661 // detail of the language that must not be relied upon outside of std.
3662 unsafe { data_offset_align(align_of_val_raw(ptr)) }
3663}
3664
3665#[inline]
3666fn data_offset_align(align: usize) -> usize {
3667 let layout = Layout::new::<RcInner<()>>();
3668 layout.size() + layout.padding_needed_for(align)
3669}
3670
3671/// A uniquely owned [`Rc`].
3672///
3673/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3674/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3675/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3676///
3677/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3678/// use case is to have an object be mutable during its initialization phase but then have it become
3679/// immutable and converted to a normal `Rc`.
3680///
3681/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3682///
3683/// ```
3684/// #![feature(unique_rc_arc)]
3685/// use std::rc::{Rc, Weak, UniqueRc};
3686///
3687/// struct Gadget {
3688/// #[allow(dead_code)]
3689/// me: Weak<Gadget>,
3690/// }
3691///
3692/// fn create_gadget() -> Option<Rc<Gadget>> {
3693/// let mut rc = UniqueRc::new(Gadget {
3694/// me: Weak::new(),
3695/// });
3696/// rc.me = UniqueRc::downgrade(&rc);
3697/// Some(UniqueRc::into_rc(rc))
3698/// }
3699///
3700/// create_gadget().unwrap();
3701/// ```
3702///
3703/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3704/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3705/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3706/// including fallible or async constructors.
3707#[unstable(feature = "unique_rc_arc", issue = "112566")]
3708pub struct UniqueRc<
3709 T: ?Sized,
3710 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3711> {
3712 ptr: NonNull<RcInner<T>>,
3713 // Define the ownership of `RcInner<T>` for drop-check
3714 _marker: PhantomData<RcInner<T>>,
3715 // Invariance is necessary for soundness: once other `Weak`
3716 // references exist, we already have a form of shared mutability!
3717 _marker2: PhantomData<*mut T>,
3718 alloc: A,
3719}
3720
3721// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3722// but having an explicit negative impl is nice for documentation purposes
3723// and results in nicer error messages.
3724#[unstable(feature = "unique_rc_arc", issue = "112566")]
3725impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3726
3727// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3728// but having an explicit negative impl is nice for documentation purposes
3729// and results in nicer error messages.
3730#[unstable(feature = "unique_rc_arc", issue = "112566")]
3731impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3732
3733#[unstable(feature = "unique_rc_arc", issue = "112566")]
3734impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3735 for UniqueRc<T, A>
3736{
3737}
3738
3739//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3740#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3741impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3742
3743#[unstable(feature = "unique_rc_arc", issue = "112566")]
3744impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3745 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3746 fmt::Display::fmt(&**self, f)
3747 }
3748}
3749
3750#[unstable(feature = "unique_rc_arc", issue = "112566")]
3751impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3752 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3753 fmt::Debug::fmt(&**self, f)
3754 }
3755}
3756
3757#[unstable(feature = "unique_rc_arc", issue = "112566")]
3758impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3759 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3760 fmt::Pointer::fmt(&(&raw const **self), f)
3761 }
3762}
3763
3764#[unstable(feature = "unique_rc_arc", issue = "112566")]
3765impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3766 fn borrow(&self) -> &T {
3767 &**self
3768 }
3769}
3770
3771#[unstable(feature = "unique_rc_arc", issue = "112566")]
3772impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3773 fn borrow_mut(&mut self) -> &mut T {
3774 &mut **self
3775 }
3776}
3777
3778#[unstable(feature = "unique_rc_arc", issue = "112566")]
3779impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3780 fn as_ref(&self) -> &T {
3781 &**self
3782 }
3783}
3784
3785#[unstable(feature = "unique_rc_arc", issue = "112566")]
3786impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3787 fn as_mut(&mut self) -> &mut T {
3788 &mut **self
3789 }
3790}
3791
3792#[unstable(feature = "unique_rc_arc", issue = "112566")]
3793impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
3794
3795#[unstable(feature = "unique_rc_arc", issue = "112566")]
3796impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
3797 /// Equality for two `UniqueRc`s.
3798 ///
3799 /// Two `UniqueRc`s are equal if their inner values are equal.
3800 ///
3801 /// # Examples
3802 ///
3803 /// ```
3804 /// #![feature(unique_rc_arc)]
3805 /// use std::rc::UniqueRc;
3806 ///
3807 /// let five = UniqueRc::new(5);
3808 ///
3809 /// assert!(five == UniqueRc::new(5));
3810 /// ```
3811 #[inline]
3812 fn eq(&self, other: &Self) -> bool {
3813 PartialEq::eq(&**self, &**other)
3814 }
3815
3816 /// Inequality for two `UniqueRc`s.
3817 ///
3818 /// Two `UniqueRc`s are not equal if their inner values are not equal.
3819 ///
3820 /// # Examples
3821 ///
3822 /// ```
3823 /// #![feature(unique_rc_arc)]
3824 /// use std::rc::UniqueRc;
3825 ///
3826 /// let five = UniqueRc::new(5);
3827 ///
3828 /// assert!(five != UniqueRc::new(6));
3829 /// ```
3830 #[inline]
3831 fn ne(&self, other: &Self) -> bool {
3832 PartialEq::ne(&**self, &**other)
3833 }
3834}
3835
3836#[unstable(feature = "unique_rc_arc", issue = "112566")]
3837impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
3838 /// Partial comparison for two `UniqueRc`s.
3839 ///
3840 /// The two are compared by calling `partial_cmp()` on their inner values.
3841 ///
3842 /// # Examples
3843 ///
3844 /// ```
3845 /// #![feature(unique_rc_arc)]
3846 /// use std::rc::UniqueRc;
3847 /// use std::cmp::Ordering;
3848 ///
3849 /// let five = UniqueRc::new(5);
3850 ///
3851 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
3852 /// ```
3853 #[inline(always)]
3854 fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
3855 (**self).partial_cmp(&**other)
3856 }
3857
3858 /// Less-than comparison for two `UniqueRc`s.
3859 ///
3860 /// The two are compared by calling `<` on their inner values.
3861 ///
3862 /// # Examples
3863 ///
3864 /// ```
3865 /// #![feature(unique_rc_arc)]
3866 /// use std::rc::UniqueRc;
3867 ///
3868 /// let five = UniqueRc::new(5);
3869 ///
3870 /// assert!(five < UniqueRc::new(6));
3871 /// ```
3872 #[inline(always)]
3873 fn lt(&self, other: &UniqueRc<T, A>) -> bool {
3874 **self < **other
3875 }
3876
3877 /// 'Less than or equal to' comparison for two `UniqueRc`s.
3878 ///
3879 /// The two are compared by calling `<=` on their inner values.
3880 ///
3881 /// # Examples
3882 ///
3883 /// ```
3884 /// #![feature(unique_rc_arc)]
3885 /// use std::rc::UniqueRc;
3886 ///
3887 /// let five = UniqueRc::new(5);
3888 ///
3889 /// assert!(five <= UniqueRc::new(5));
3890 /// ```
3891 #[inline(always)]
3892 fn le(&self, other: &UniqueRc<T, A>) -> bool {
3893 **self <= **other
3894 }
3895
3896 /// Greater-than comparison for two `UniqueRc`s.
3897 ///
3898 /// The two are compared by calling `>` on their inner values.
3899 ///
3900 /// # Examples
3901 ///
3902 /// ```
3903 /// #![feature(unique_rc_arc)]
3904 /// use std::rc::UniqueRc;
3905 ///
3906 /// let five = UniqueRc::new(5);
3907 ///
3908 /// assert!(five > UniqueRc::new(4));
3909 /// ```
3910 #[inline(always)]
3911 fn gt(&self, other: &UniqueRc<T, A>) -> bool {
3912 **self > **other
3913 }
3914
3915 /// 'Greater than or equal to' comparison for two `UniqueRc`s.
3916 ///
3917 /// The two are compared by calling `>=` on their inner values.
3918 ///
3919 /// # Examples
3920 ///
3921 /// ```
3922 /// #![feature(unique_rc_arc)]
3923 /// use std::rc::UniqueRc;
3924 ///
3925 /// let five = UniqueRc::new(5);
3926 ///
3927 /// assert!(five >= UniqueRc::new(5));
3928 /// ```
3929 #[inline(always)]
3930 fn ge(&self, other: &UniqueRc<T, A>) -> bool {
3931 **self >= **other
3932 }
3933}
3934
3935#[unstable(feature = "unique_rc_arc", issue = "112566")]
3936impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
3937 /// Comparison for two `UniqueRc`s.
3938 ///
3939 /// The two are compared by calling `cmp()` on their inner values.
3940 ///
3941 /// # Examples
3942 ///
3943 /// ```
3944 /// #![feature(unique_rc_arc)]
3945 /// use std::rc::UniqueRc;
3946 /// use std::cmp::Ordering;
3947 ///
3948 /// let five = UniqueRc::new(5);
3949 ///
3950 /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
3951 /// ```
3952 #[inline]
3953 fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
3954 (**self).cmp(&**other)
3955 }
3956}
3957
3958#[unstable(feature = "unique_rc_arc", issue = "112566")]
3959impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
3960
3961#[unstable(feature = "unique_rc_arc", issue = "112566")]
3962impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
3963 fn hash<H: Hasher>(&self, state: &mut H) {
3964 (**self).hash(state);
3965 }
3966}
3967
3968// Depends on A = Global
3969impl<T> UniqueRc<T> {
3970 /// Creates a new `UniqueRc`.
3971 ///
3972 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3973 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3974 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3975 /// point to the new [`Rc`].
3976 #[cfg(not(no_global_oom_handling))]
3977 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3978 pub fn new(value: T) -> Self {
3979 Self::new_in(value, Global)
3980 }
3981}
3982
3983impl<T, A: Allocator> UniqueRc<T, A> {
3984 /// Creates a new `UniqueRc` in the provided allocator.
3985 ///
3986 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3987 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3988 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3989 /// point to the new [`Rc`].
3990 #[cfg(not(no_global_oom_handling))]
3991 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3992 pub fn new_in(value: T, alloc: A) -> Self {
3993 let (ptr, alloc) = Box::into_unique(Box::new_in(
3994 RcInner {
3995 strong: Cell::new(0),
3996 // keep one weak reference so if all the weak pointers that are created are dropped
3997 // the UniqueRc still stays valid.
3998 weak: Cell::new(1),
3999 value,
4000 },
4001 alloc,
4002 ));
4003 Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4004 }
4005}
4006
4007impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4008 /// Converts the `UniqueRc` into a regular [`Rc`].
4009 ///
4010 /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4011 /// is passed to `into_rc`.
4012 ///
4013 /// Any weak references created before this method is called can now be upgraded to strong
4014 /// references.
4015 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4016 pub fn into_rc(this: Self) -> Rc<T, A> {
4017 let mut this = ManuallyDrop::new(this);
4018
4019 // Move the allocator out.
4020 // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4021 // a `ManuallyDrop`.
4022 let alloc: A = unsafe { ptr::read(&this.alloc) };
4023
4024 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4025 unsafe {
4026 // Convert our weak reference into a strong reference
4027 this.ptr.as_mut().strong.set(1);
4028 Rc::from_inner_in(this.ptr, alloc)
4029 }
4030 }
4031}
4032
4033impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4034 /// Creates a new weak reference to the `UniqueRc`.
4035 ///
4036 /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4037 /// to a [`Rc`] using [`UniqueRc::into_rc`].
4038 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4039 pub fn downgrade(this: &Self) -> Weak<T, A> {
4040 // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4041 // one strong reference before converting to a regular Rc.
4042 unsafe {
4043 this.ptr.as_ref().inc_weak();
4044 }
4045 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4046 }
4047}
4048
4049#[unstable(feature = "unique_rc_arc", issue = "112566")]
4050impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4051 type Target = T;
4052
4053 fn deref(&self) -> &T {
4054 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4055 unsafe { &self.ptr.as_ref().value }
4056 }
4057}
4058
4059#[unstable(feature = "unique_rc_arc", issue = "112566")]
4060impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4061 fn deref_mut(&mut self) -> &mut T {
4062 // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4063 // have unique ownership and therefore it's safe to make a mutable reference because
4064 // `UniqueRc` owns the only strong reference to itself.
4065 unsafe { &mut (*self.ptr.as_ptr()).value }
4066 }
4067}
4068
4069#[unstable(feature = "unique_rc_arc", issue = "112566")]
4070unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4071 fn drop(&mut self) {
4072 unsafe {
4073 // destroy the contained object
4074 drop_in_place(DerefMut::deref_mut(self));
4075
4076 // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4077 self.ptr.as_ref().dec_weak();
4078
4079 if self.ptr.as_ref().weak() == 0 {
4080 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4081 }
4082 }
4083 }
4084}
4085
4086/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4087/// but will deallocate it (without dropping the value) when dropped.
4088///
4089/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4090/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4091/// which `MaybeUninit` does not.
4092#[cfg(not(no_global_oom_handling))]
4093struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4094 ptr: NonNull<RcInner<T>>,
4095 layout_for_value: Layout,
4096 alloc: Option<A>,
4097}
4098
4099#[cfg(not(no_global_oom_handling))]
4100impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4101 /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4102 fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4103 let layout = Layout::for_value(for_value);
4104 let ptr = unsafe {
4105 Rc::allocate_for_layout(
4106 layout,
4107 |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4108 |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4109 )
4110 };
4111 Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4112 }
4113
4114 /// Returns the pointer to be written into to initialize the [`Rc`].
4115 fn data_ptr(&mut self) -> *mut T {
4116 let offset = data_offset_align(self.layout_for_value.align());
4117 unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4118 }
4119
4120 /// Upgrade this into a normal [`Rc`].
4121 ///
4122 /// # Safety
4123 ///
4124 /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4125 unsafe fn into_rc(self) -> Rc<T, A> {
4126 let mut this = ManuallyDrop::new(self);
4127 let ptr = this.ptr;
4128 let alloc = this.alloc.take().unwrap();
4129
4130 // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4131 // for having initialized the data.
4132 unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4133 }
4134}
4135
4136#[cfg(not(no_global_oom_handling))]
4137impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4138 fn drop(&mut self) {
4139 // SAFETY:
4140 // * new() produced a pointer safe to deallocate.
4141 // * We own the pointer unless into_rc() was called, which forgets us.
4142 unsafe {
4143 self.alloc.take().unwrap().deallocate(
4144 self.ptr.cast(),
4145 rc_inner_layout_for_value_layout(self.layout_for_value),
4146 );
4147 }
4148 }
4149}