cactusref/
rc.rs

1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`][send]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use cactusref::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use cactusref::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use cactusref::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use cactusref::Rc;
93//!
94//! struct Owner {
95//!     name: String,
96//!     // ...other fields
97//! }
98//!
99//! struct Gadget {
100//!     id: i32,
101//!     owner: Rc<Owner>,
102//!     // ...other fields
103//! }
104//!
105//! // Create a reference-counted `Owner`.
106//! let gadget_owner: Rc<Owner> = Rc::new(
107//!     Owner {
108//!         name: "Gadget Man".to_string(),
109//!     }
110//! );
111//!
112//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
113//! // gives us a new pointer to the same `Owner` allocation, incrementing
114//! // the reference count in the process.
115//! let gadget1 = Gadget {
116//!     id: 1,
117//!     owner: Rc::clone(&gadget_owner),
118//! };
119//! let gadget2 = Gadget {
120//!     id: 2,
121//!     owner: Rc::clone(&gadget_owner),
122//! };
123//!
124//! // Dispose of our local variable `gadget_owner`.
125//! drop(gadget_owner);
126//!
127//! // Despite dropping `gadget_owner`, we're still able to print out the name
128//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
129//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
130//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
131//! // live. The field projection `gadget1.owner.name` works because
132//! // `Rc<Owner>` automatically dereferences to `Owner`.
133//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
134//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
135//!
136//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
137//! // with them the last counted references to our `Owner`. Gadget Man now
138//! // gets destroyed as well.
139//! ```
140//!
141//! If our requirements change, and we also need to be able to traverse from
142//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
143//! to `Gadget` introduces a cycle. This means that their
144//! reference counts can never reach 0, and the allocation will never be destroyed:
145//! a memory leak. In order to get around this, we can use [`Weak`]
146//! pointers.
147//!
148//! Rust actually makes it somewhat difficult to produce this loop in the first
149//! place. In order to end up with two values that point at each other, one of
150//! them needs to be mutable. This is difficult because [`Rc`] enforces
151//! memory safety by only giving out shared references to the value it wraps,
152//! and these don't allow direct mutation. We need to wrap the part of the
153//! value we wish to mutate in a [`RefCell`], which provides *interior
154//! mutability*: a method to achieve mutability through a shared reference.
155//! [`RefCell`] enforces Rust's borrowing rules at runtime.
156//!
157//! ```
158//! use cactusref::Rc;
159//! use cactusref::Weak;
160//! use std::cell::RefCell;
161//!
162//! struct Owner {
163//!     name: String,
164//!     gadgets: RefCell<Vec<Weak<Gadget>>>,
165//!     // ...other fields
166//! }
167//!
168//! struct Gadget {
169//!     id: i32,
170//!     owner: Rc<Owner>,
171//!     // ...other fields
172//! }
173//!
174//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
175//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
176//! // a shared reference.
177//! let gadget_owner: Rc<Owner> = Rc::new(
178//!     Owner {
179//!         name: "Gadget Man".to_string(),
180//!         gadgets: RefCell::new(vec![]),
181//!     }
182//! );
183//!
184//! // Create `Gadget`s belonging to `gadget_owner`, as before.
185//! let gadget1 = Rc::new(
186//!     Gadget {
187//!         id: 1,
188//!         owner: Rc::clone(&gadget_owner),
189//!     }
190//! );
191//! let gadget2 = Rc::new(
192//!     Gadget {
193//!         id: 2,
194//!         owner: Rc::clone(&gadget_owner),
195//!     }
196//! );
197//!
198//! // Add the `Gadget`s to their `Owner`.
199//! {
200//!     let mut gadgets = gadget_owner.gadgets.borrow_mut();
201//!     gadgets.push(Rc::downgrade(&gadget1));
202//!     gadgets.push(Rc::downgrade(&gadget2));
203//!
204//!     // `RefCell` dynamic borrow ends here.
205//! }
206//!
207//! // Iterate over our `Gadget`s, printing their details out.
208//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
209//!
210//!     // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
211//!     // guarantee the allocation still exists, we need to call
212//!     // `upgrade`, which returns an `Option<Rc<Gadget>>`.
213//!     //
214//!     // In this case we know the allocation still exists, so we simply
215//!     // `unwrap` the `Option`. In a more complicated program, you might
216//!     // need graceful error handling for a `None` result.
217//!
218//!     let gadget = gadget_weak.upgrade().unwrap();
219//!     println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
220//! }
221//!
222//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
223//! // are destroyed. There are now no strong (`Rc`) pointers to the
224//! // gadgets, so they are destroyed. This zeroes the reference count on
225//! // Gadget Man, so he gets destroyed as well.
226//! ```
227//!
228//! [clone]: Clone::clone
229//! [`Cell`]: core::cell::Cell
230//! [`RefCell`]: core::cell::RefCell
231//! [send]: core::marker::Send
232#![cfg_attr(feature = "std", doc = "[arc]: std::sync::Arc")]
233#![cfg_attr(
234    not(feature = "std"),
235    doc = "[arc]: https://doc.rust-lang.org/stable/std/sync/struct.Arc.html"
236)]
237//! [`Deref`]: core::ops::Deref
238//! [downgrade]: Rc::downgrade
239//! [upgrade]: Weak::upgrade
240//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
241//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
242
243use core::borrow;
244use core::cell::{Cell, RefCell};
245use core::cmp::Ordering;
246use core::fmt;
247use core::hash::{Hash, Hasher};
248use core::intrinsics::abort;
249use core::marker::PhantomData;
250use core::mem::{self, ManuallyDrop, MaybeUninit};
251use core::ops::Deref;
252use core::pin::Pin;
253use core::ptr::{self, NonNull};
254
255use alloc::alloc::handle_alloc_error;
256use alloc::alloc::{AllocError, Allocator, Global, Layout};
257use alloc::boxed::Box;
258
259use crate::link::Links;
260
261#[cfg(test)]
262#[allow(clippy::redundant_clone)]
263#[allow(clippy::uninlined_format_args)]
264mod tests;
265
266// This is repr(C) to future-proof against possible field-reordering, which
267// would interfere with otherwise safe [into|from]_raw() of transmutable
268// inner types.
269#[repr(C)]
270pub(crate) struct RcBox<T> {
271    strong: Cell<usize>,
272    weak: Cell<usize>,
273    pub links: MaybeUninit<RefCell<Links<T>>>,
274    pub value: MaybeUninit<T>,
275}
276
277impl<T> RcBox<T> {
278    /// # Safety
279    ///
280    /// Callers must ensure this `RcBox` is not dead.
281    #[inline]
282    pub(crate) unsafe fn links(&self) -> &RefCell<Links<T>> {
283        let links = &self.links;
284        // SAFETY: because callers have ensured the `RcBox` is not dead, `links`
285        // has not yet been deallocated and the `MaybeUninit` is inhabited.
286        let pointer_to_links = links as *const MaybeUninit<RefCell<Links<T>>>;
287        &*(pointer_to_links.cast::<RefCell<Links<T>>>())
288    }
289}
290
291/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
292/// Counted'.
293///
294/// See the [module-level documentation](./index.html) for more details.
295///
296/// The inherent methods of `Rc` are all associated functions, which means
297/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
298/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
299///
300/// [get_mut]: Rc::get_mut
301pub struct Rc<T> {
302    pub(crate) ptr: NonNull<RcBox<T>>,
303    phantom: PhantomData<RcBox<T>>,
304}
305
306/// `Rc` is not `Send`.
307///
308/// ```compile_fail
309/// use cactusref::Rc;
310/// fn requires_send<T: Send>(val: T) {}
311/// let rc = Rc::<usize>::new(1);
312/// requires_send(rc);
313/// ```
314mod rc_is_not_send {}
315
316/// `Rc` is not `Sync`.
317///
318/// ```compile_fail
319/// use cactusref::Rc;
320/// fn requires_sync<T: Sync>(val: T) {}
321/// let rc = Rc::<usize>::new(1);
322/// requires_sync(rc);
323/// ```
324mod rc_is_not_sync {}
325
326impl<T> Rc<T> {
327    #[inline(always)]
328    pub(crate) fn inner(&self) -> &RcBox<T> {
329        // This unsafety is ok because while this Rc is alive we're guaranteed
330        // that the inner pointer is valid.
331        unsafe { self.ptr.as_ref() }
332    }
333
334    fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
335        Self {
336            ptr,
337            phantom: PhantomData,
338        }
339    }
340
341    unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
342        Self::from_inner(NonNull::new_unchecked(ptr))
343    }
344}
345
346impl<T> Rc<T> {
347    /// Constructs a new `Rc<T>`.
348    ///
349    /// # Examples
350    ///
351    /// ```
352    /// use cactusref::Rc;
353    ///
354    /// let five = Rc::new(5);
355    /// ```
356    pub fn new(value: T) -> Rc<T> {
357        // There is an implicit weak pointer owned by all the strong
358        // pointers, which ensures that the weak destructor never frees
359        // the allocation while the strong destructor is running, even
360        // if the weak pointer is stored inside the strong one.
361        Self::from_inner(
362            Box::leak(Box::new(RcBox {
363                strong: Cell::new(1),
364                weak: Cell::new(1),
365                links: MaybeUninit::new(RefCell::new(Links::new())),
366                value: MaybeUninit::new(value),
367            }))
368            .into(),
369        )
370    }
371
372    /// Constructs a new `Rc` with uninitialized contents.
373    ///
374    /// # Examples
375    ///
376    /// ```
377    /// use cactusref::Rc;
378    ///
379    /// let mut five = Rc::<u32>::new_uninit();
380    ///
381    /// let five = unsafe {
382    ///     // Deferred initialization:
383    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
384    ///
385    ///     five.assume_init()
386    /// };
387    ///
388    /// assert_eq!(*five, 5)
389    /// ```
390    #[must_use]
391    pub fn new_uninit() -> Rc<MaybeUninit<T>> {
392        unsafe {
393            Rc::from_ptr(Rc::allocate_for_layout(
394                Layout::new::<T>(),
395                |layout| Global.allocate(layout),
396                <*mut u8>::cast,
397            ))
398        }
399    }
400
401    /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
402    /// `value` will be pinned in memory and unable to be moved.
403    pub fn pin(value: T) -> Pin<Rc<T>> {
404        unsafe { Pin::new_unchecked(Rc::new(value)) }
405    }
406
407    /// Returns the inner value, if the `Rc` has exactly one strong reference.
408    ///
409    /// Otherwise, an [`Err`] is returned with the same `Rc` that was
410    /// passed in.
411    ///
412    /// This will succeed even if there are outstanding weak references.
413    ///
414    /// # Examples
415    ///
416    /// ```
417    /// use cactusref::Rc;
418    ///
419    /// let x = Rc::new(3);
420    /// assert_eq!(Rc::try_unwrap(x), Ok(3));
421    ///
422    /// let x = Rc::new(4);
423    /// let _y = Rc::clone(&x);
424    /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
425    /// ```
426    ///
427    /// # Errors
428    ///
429    /// If the given `Rc` does not have exactly one strong reference, it is
430    /// returned in the `Err` variant of the returned `Result`.
431    #[inline]
432    pub fn try_unwrap(this: Self) -> Result<T, Self> {
433        if Rc::strong_count(&this) == 1 {
434            unsafe {
435                let val = ptr::read(&*this); // copy the contained object
436
437                // Indicate to Weaks that they can't be promoted by decrementing
438                // the strong count, and then remove the implicit "strong weak"
439                // pointer while also handling drop logic by just crafting a
440                // fake Weak.
441                this.inner().dec_strong();
442                let _weak = Weak {
443                    ptr: this.ptr,
444                    phantom: PhantomData,
445                };
446                mem::forget(this);
447                Ok(val)
448            }
449        } else {
450            Err(this)
451        }
452    }
453}
454
455impl<T> Rc<MaybeUninit<T>> {
456    /// Converts to `Rc<T>`.
457    ///
458    /// # Safety
459    ///
460    /// As with [`MaybeUninit::assume_init`],
461    /// it is up to the caller to guarantee that the inner value
462    /// really is in an initialized state.
463    /// Calling this when the content is not yet fully initialized
464    /// causes immediate undefined behavior.
465    ///
466    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
467    ///
468    /// # Examples
469    ///
470    /// ```
471    /// use cactusref::Rc;
472    ///
473    /// let mut five = Rc::<u32>::new_uninit();
474    ///
475    /// let five = unsafe {
476    ///     // Deferred initialization:
477    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
478    ///
479    ///     five.assume_init()
480    /// };
481    ///
482    /// assert_eq!(*five, 5)
483    /// ```
484    #[inline]
485    #[must_use]
486    pub unsafe fn assume_init(self) -> Rc<T> {
487        Rc::from_inner(ManuallyDrop::new(self).ptr.cast())
488    }
489}
490
491impl<T> Rc<T> {
492    /// Consumes the `Rc`, returning the wrapped pointer.
493    ///
494    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
495    /// [`Rc::from_raw`][from_raw].
496    ///
497    /// [from_raw]: Rc::from_raw
498    ///
499    /// # Examples
500    ///
501    /// ```
502    /// use cactusref::Rc;
503    ///
504    /// let x = Rc::new("hello".to_owned());
505    /// let x_ptr = Rc::into_raw(x);
506    /// assert_eq!(unsafe { &*x_ptr }, "hello");
507    /// // Reconstruct the `Rc` to avoid a leak.
508    /// let _ = unsafe { Rc::from_raw(x_ptr) };
509    /// ```
510    #[must_use]
511    pub fn into_raw(this: Self) -> *const T {
512        let ptr = Self::as_ptr(&this);
513        mem::forget(this);
514        ptr
515    }
516
517    /// Provides a raw pointer to the data.
518    ///
519    /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
520    /// for as long there are strong counts in the `Rc`.
521    ///
522    /// # Examples
523    ///
524    /// ```
525    /// use cactusref::Rc;
526    ///
527    /// let x = Rc::new("hello".to_owned());
528    /// let y = Rc::clone(&x);
529    /// let x_ptr = Rc::as_ptr(&x);
530    /// assert_eq!(x_ptr, Rc::as_ptr(&y));
531    /// assert_eq!(unsafe { &*x_ptr }, "hello");
532    /// ```
533    #[must_use]
534    pub fn as_ptr(this: &Self) -> *const T {
535        let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
536
537        // SAFETY: This cannot go through Deref::deref or Rc::inner because
538        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
539        // write through the pointer after the Rc is recovered through `from_raw`.
540        unsafe {
541            // SAFETY: we can cast the `MaybeUninit<T>` to a `T` because we are
542            // calling and associated function with a live `Rc`. If an `Rc` is
543            // not dead, the inner `MaybeUninit` is inhabited.
544            ptr::addr_of_mut!((*ptr).value).cast::<T>()
545        }
546    }
547
548    /// Constructs an `Rc<T>` from a raw pointer.
549    ///
550    /// The raw pointer must have been previously returned by a call to
551    /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
552    /// and alignment as `T`. This is trivially true if `U` is `T`.
553    /// Note that if `U` is not `T` but has the same size and alignment, this is
554    /// basically like transmuting references of different types. See
555    /// [`mem::transmute`][transmute] for more information on what
556    /// restrictions apply in this case.
557    ///
558    /// The user of `from_raw` has to make sure a specific value of `T` is only
559    /// dropped once.
560    ///
561    /// This function is unsafe because improper use may lead to memory unsafety,
562    /// even if the returned `Rc<T>` is never accessed.
563    ///
564    /// [into_raw]: Rc::into_raw
565    /// [transmute]: core::mem::transmute
566    ///
567    /// # Examples
568    ///
569    /// ```
570    /// use cactusref::Rc;
571    ///
572    /// let x = Rc::new("hello".to_owned());
573    /// let x_ptr = Rc::into_raw(x);
574    ///
575    /// unsafe {
576    ///     // Convert back to an `Rc` to prevent leak.
577    ///     let x = Rc::from_raw(x_ptr);
578    ///     assert_eq!(&*x, "hello");
579    ///
580    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
581    /// }
582    ///
583    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
584    /// ```
585    ///
586    /// # Safety
587    ///
588    /// Callers must ensure that `ptr` points to a live `Rc` and was created
589    /// with a call to [`Rc::into_raw`].
590    pub unsafe fn from_raw(ptr: *const T) -> Self {
591        let offset = data_offset(ptr);
592
593        // Reverse the offset to find the original RcBox.
594        let rc_ptr = (ptr as *mut u8)
595            .offset(-offset)
596            .with_metadata_of(ptr as *mut RcBox<T>);
597
598        Self::from_ptr(rc_ptr)
599    }
600
601    /// Creates a new [`Weak`] pointer to this allocation.
602    ///
603    /// # Examples
604    ///
605    /// ```
606    /// use cactusref::Rc;
607    ///
608    /// let five = Rc::new(5);
609    ///
610    /// let weak_five = Rc::downgrade(&five);
611    /// ```
612    #[must_use]
613    pub fn downgrade(this: &Self) -> Weak<T> {
614        this.inner().inc_weak();
615        // Make sure we do not create a dangling Weak
616        debug_assert!(!is_dangling(this.ptr.as_ptr()));
617        Weak {
618            ptr: this.ptr,
619            phantom: PhantomData,
620        }
621    }
622
623    /// Gets the number of [`Weak`] pointers to this allocation.
624    ///
625    /// # Examples
626    ///
627    /// ```
628    /// use cactusref::Rc;
629    ///
630    /// let five = Rc::new(5);
631    /// let _weak_five = Rc::downgrade(&five);
632    ///
633    /// assert_eq!(1, Rc::weak_count(&five));
634    /// ```
635    #[inline]
636    #[must_use]
637    pub fn weak_count(this: &Self) -> usize {
638        this.inner().weak() - 1
639    }
640
641    /// Gets the number of strong (`Rc`) pointers to this allocation.
642    ///
643    /// # Examples
644    ///
645    /// ```
646    /// use cactusref::Rc;
647    ///
648    /// let five = Rc::new(5);
649    /// let _also_five = Rc::clone(&five);
650    ///
651    /// assert_eq!(2, Rc::strong_count(&five));
652    /// ```
653    #[inline]
654    #[must_use]
655    pub fn strong_count(this: &Self) -> usize {
656        this.inner().strong()
657    }
658
659    /// Increments the strong reference count on the `Rc<T>` associated with the
660    /// provided pointer by one.
661    ///
662    /// # Safety
663    ///
664    /// The pointer must have been obtained through `Rc::into_raw`, and the
665    /// associated `Rc` instance must be valid (i.e. the strong count must be at
666    /// least 1) for the duration of this method.
667    ///
668    /// # Examples
669    ///
670    /// ```
671    /// use cactusref::Rc;
672    ///
673    /// let five = Rc::new(5);
674    ///
675    /// unsafe {
676    ///     let ptr = Rc::into_raw(five);
677    ///     Rc::increment_strong_count(ptr);
678    ///
679    ///     let five = Rc::from_raw(ptr);
680    ///     assert_eq!(2, Rc::strong_count(&five));
681    ///
682    ///     // Decrement the strong count to avoid a leak.
683    ///     Rc::decrement_strong_count(ptr);
684    /// }
685    /// ```
686    #[inline]
687    pub unsafe fn increment_strong_count(ptr: *const T) {
688        // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
689        let rc = ManuallyDrop::new(Rc::<T>::from_raw(ptr));
690        // Now increase refcount, but don't drop new refcount either
691        let _rc_clone: ManuallyDrop<_> = rc.clone();
692    }
693
694    /// Decrements the strong reference count on the `Rc<T>` associated with the
695    /// provided pointer by one.
696    ///
697    /// # Safety
698    ///
699    /// The pointer must have been obtained through `Rc::into_raw`, and the
700    /// associated `Rc` instance must be valid (i.e. the strong count must be at
701    /// least 1) when invoking this method. This method can be used to release
702    /// the final `Rc` and backing storage, but **should not** be called after
703    /// the final `Rc` has been released.
704    ///
705    /// # Examples
706    ///
707    /// ```
708    /// use cactusref::Rc;
709    ///
710    /// let five = Rc::new(5);
711    ///
712    /// unsafe {
713    ///     let ptr = Rc::into_raw(five);
714    ///     Rc::increment_strong_count(ptr);
715    ///
716    ///     let five = Rc::from_raw(ptr);
717    ///     assert_eq!(2, Rc::strong_count(&five));
718    ///     Rc::decrement_strong_count(ptr);
719    ///     assert_eq!(1, Rc::strong_count(&five));
720    /// }
721    /// ```
722    #[inline]
723    pub unsafe fn decrement_strong_count(ptr: *const T) {
724        drop(Rc::from_raw(ptr));
725    }
726
727    /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
728    /// this allocation.
729    #[inline]
730    fn is_unique(this: &Self) -> bool {
731        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
732    }
733
734    /// Returns a mutable reference into the given `Rc`, if there are
735    /// no other `Rc` or [`Weak`] pointers to the same allocation.
736    ///
737    /// Returns [`None`] otherwise, because it is not safe to
738    /// mutate a shared value.
739    ///
740    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
741    /// the inner value when there are other pointers.
742    ///
743    /// [make_mut]: Rc::make_mut
744    /// [clone]: Clone::clone
745    ///
746    /// # Examples
747    ///
748    /// ```
749    /// use cactusref::Rc;
750    ///
751    /// let mut x = Rc::new(3);
752    /// *Rc::get_mut(&mut x).unwrap() = 4;
753    /// assert_eq!(*x, 4);
754    ///
755    /// let _y = Rc::clone(&x);
756    /// assert!(Rc::get_mut(&mut x).is_none());
757    /// ```
758    #[inline]
759    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
760        if Rc::is_unique(this) {
761            unsafe { Some(Rc::get_mut_unchecked(this)) }
762        } else {
763            None
764        }
765    }
766
767    /// Returns a mutable reference into the given `Rc`,
768    /// without any check.
769    ///
770    /// See also [`get_mut`], which is safe and does appropriate checks.
771    ///
772    /// [`get_mut`]: Rc::get_mut
773    ///
774    /// # Safety
775    ///
776    /// Any other `Rc` or [`Weak`] pointers to the same allocation must not be dereferenced
777    /// for the duration of the returned borrow.
778    /// This is trivially the case if no such pointers exist,
779    /// for example immediately after `Rc::new`.
780    ///
781    /// # Examples
782    ///
783    /// ```
784    /// use cactusref::Rc;
785    ///
786    /// let mut x = Rc::new(String::new());
787    /// unsafe {
788    ///     Rc::get_mut_unchecked(&mut x).push_str("foo")
789    /// }
790    /// assert_eq!(*x, "foo");
791    /// ```
792    #[inline]
793    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
794        debug_assert!(!this.inner().is_dead());
795        // We are careful to *not* create a reference covering the "count" fields, as
796        // this would conflict with accesses to the reference counts (e.g. by `Weak`).
797        //
798        // Safety: If we have an `Rc`, then the allocation is not dead so the `MaybeUninit`
799        // is inhabited.
800        let value = &mut (*this.ptr.as_ptr()).value;
801        // SAFETY: we can cast the `MaybeUninit<T>` to a `T` because we are
802        // calling and associated function with a live `Rc`. If an `Rc` is not
803        // dead, the inner `MaybeUninit` is inhabited.
804        let pointer_to_value = (value as *mut MaybeUninit<T>).cast::<T>();
805        &mut *(pointer_to_value)
806    }
807
808    /// Returns `true` if the two `Rc`s point to the same allocation
809    /// (in a vein similar to [`ptr::eq`]).
810    ///
811    /// # Examples
812    ///
813    /// ```
814    /// use cactusref::Rc;
815    ///
816    /// let five = Rc::new(5);
817    /// let same_five = Rc::clone(&five);
818    /// let other_five = Rc::new(5);
819    ///
820    /// assert!(Rc::ptr_eq(&five, &same_five));
821    /// assert!(!Rc::ptr_eq(&five, &other_five));
822    /// ```
823    ///
824    /// [`ptr::eq`]: core::ptr::eq
825    #[inline]
826    #[must_use]
827    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
828        this.ptr.as_ptr() == other.ptr.as_ptr()
829    }
830}
831
832impl<T: Clone> Rc<T> {
833    /// Makes a mutable reference into the given `Rc`.
834    ///
835    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
836    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
837    /// referred to as clone-on-write.
838    ///
839    /// If there are no other `Rc` pointers to this allocation, then [`Weak`]
840    /// pointers to this allocation will be disassociated.
841    ///
842    /// See also [`get_mut`], which will fail rather than cloning.
843    ///
844    /// [`clone`]: Clone::clone
845    /// [`get_mut`]: Rc::get_mut
846    ///
847    /// # Examples
848    ///
849    /// ```
850    /// use cactusref::Rc;
851    ///
852    /// let mut data = Rc::new(5);
853    ///
854    /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
855    /// let mut other_data = Rc::clone(&data);    // Won't clone inner data
856    /// *Rc::make_mut(&mut data) += 1;        // Clones inner data
857    /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
858    /// *Rc::make_mut(&mut other_data) *= 2;  // Won't clone anything
859    ///
860    /// // Now `data` and `other_data` point to different allocations.
861    /// assert_eq!(*data, 8);
862    /// assert_eq!(*other_data, 12);
863    /// ```
864    ///
865    /// [`Weak`] pointers will be disassociated:
866    ///
867    /// ```
868    /// use cactusref::Rc;
869    ///
870    /// let mut data = Rc::new(75);
871    /// let weak = Rc::downgrade(&data);
872    ///
873    /// assert!(75 == *data);
874    /// assert!(75 == *weak.upgrade().unwrap());
875    ///
876    /// *Rc::make_mut(&mut data) += 1;
877    ///
878    /// assert!(76 == *data);
879    /// assert!(weak.upgrade().is_none());
880    /// ```
881    #[inline]
882    pub fn make_mut(this: &mut Self) -> &mut T {
883        if Rc::strong_count(this) != 1 {
884            // Gotta clone the data, there are other Rcs.
885            // Pre-allocate memory to allow writing the cloned value directly.
886            let mut rc = Self::new_uninit();
887            unsafe {
888                let data = Rc::get_mut_unchecked(&mut rc);
889                data.as_mut_ptr().write((**this).clone());
890                *this = rc.assume_init();
891            }
892        } else if Rc::weak_count(this) != 0 {
893            // Can just steal the data, all that's left is Weaks
894            let mut rc = Self::new_uninit();
895            unsafe {
896                let data: &mut MaybeUninit<T> = mem::transmute(Rc::get_mut_unchecked(&mut rc));
897                data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
898
899                this.inner().dec_strong();
900                // Remove implicit strong-weak ref (no need to craft a fake
901                // Weak here -- we know other Weaks can clean up for us)
902                this.inner().dec_weak();
903                ptr::write(this, rc.assume_init());
904            }
905        }
906        // This unsafety is ok because we're guaranteed that the pointer
907        // returned is the *only* pointer that will ever be returned to T. Our
908        // reference count is guaranteed to be 1 at this point, and we required
909        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
910        // reference to the allocation.
911        unsafe {
912            let value = &mut this.ptr.as_mut().value;
913            // SAFETY: we can cast the `MaybeUninit<T>` to a `T` because we are
914            // calling and associated function with a live `Rc`. If an `Rc` is
915            // not dead, the inner `MaybeUninit` is inhabited.
916            let pointer_to_value = (value as *mut MaybeUninit<T>).cast::<T>();
917            &mut *(pointer_to_value)
918        }
919    }
920}
921
922impl<T> Rc<T> {
923    /// Allocates an `RcBox<T>` with sufficient space for
924    /// a possibly-unsized inner value where the value has the layout provided.
925    ///
926    /// The function `mem_to_rcbox` is called with the data pointer
927    /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
928    unsafe fn allocate_for_layout(
929        value_layout: Layout,
930        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
931        mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
932    ) -> *mut RcBox<T> {
933        // Calculate layout using the given value layout.
934        // Previously, layout was calculated on the expression
935        // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
936        // reference (see #54908).
937        let layout = Layout::new::<RcBox<()>>()
938            .extend(value_layout)
939            .unwrap()
940            .0
941            .pad_to_align();
942        Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox)
943            .unwrap_or_else(|_| handle_alloc_error(layout))
944    }
945
946    /// Allocates an `RcBox<T>` with sufficient space for
947    /// a possibly-unsized inner value where the value has the layout provided,
948    /// returning an error if allocation fails.
949    ///
950    /// The function `mem_to_rcbox` is called with the data pointer
951    /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
952    #[inline]
953    unsafe fn try_allocate_for_layout(
954        value_layout: Layout,
955        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
956        mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
957    ) -> Result<*mut RcBox<T>, AllocError> {
958        // Calculate layout using the given value layout.
959        // Previously, layout was calculated on the expression
960        // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
961        // reference (see #54908).
962        let layout = Layout::new::<RcBox<()>>()
963            .extend(value_layout)
964            .unwrap()
965            .0
966            .pad_to_align();
967
968        // Allocate for the layout.
969        let ptr = allocate(layout)?;
970
971        // Initialize the RcBox
972        let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
973        debug_assert_eq!(Layout::for_value(&*inner), layout);
974
975        ptr::write(&mut (*inner).strong, Cell::new(1));
976        ptr::write(&mut (*inner).weak, Cell::new(1));
977        ptr::write(
978            &mut (*inner).links,
979            MaybeUninit::new(RefCell::new(Links::new())),
980        );
981
982        Ok(inner)
983    }
984
985    /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
986    unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
987        // Allocate for the `RcBox<T>` using the given value.
988        Self::allocate_for_layout(
989            Layout::for_value(&*ptr),
990            |layout| Global.allocate(layout),
991            |mem| mem.with_metadata_of(ptr as *mut RcBox<T>),
992        )
993    }
994
995    fn from_box(v: Box<T>) -> Rc<T> {
996        unsafe {
997            let (box_unique, alloc) = Box::into_raw_with_allocator(v);
998            // SAFETY: Pointers obtained from `Box::into_raw` are always
999            // non-null.
1000            let box_unique = NonNull::new_unchecked(box_unique);
1001            let box_ptr = box_unique.as_ptr();
1002
1003            let value_size = size_of_val(&*box_ptr);
1004            let ptr = Self::allocate_for_ptr(box_ptr);
1005
1006            // Copy value as bytes
1007            ptr::copy_nonoverlapping(
1008                box_ptr.cast_const().cast::<u8>(),
1009                ptr::addr_of_mut!((*ptr).value).cast::<u8>(),
1010                value_size,
1011            );
1012
1013            // Free the allocation without dropping its contents
1014            box_free(box_unique, alloc);
1015
1016            Self::from_ptr(ptr)
1017        }
1018    }
1019}
1020
1021impl<T> Deref for Rc<T> {
1022    type Target = T;
1023
1024    #[inline(always)]
1025    fn deref(&self) -> &T {
1026        unsafe {
1027            let value = &self.inner().value;
1028            // SAFETY: we can cast the `MaybeUninit<T>` to a `T` because we are
1029            // calling and associated function with a live `Rc`. If an `Rc` is
1030            // not dead, the inner `MaybeUninit` is inhabited.
1031            let pointer_to_value = (value as *const MaybeUninit<T>).cast::<T>();
1032            &*(pointer_to_value)
1033        }
1034    }
1035}
1036
1037impl<T> Clone for Rc<T> {
1038    /// Makes a clone of the `Rc` pointer.
1039    ///
1040    /// This creates another pointer to the same allocation, increasing the
1041    /// strong reference count.
1042    ///
1043    /// # Examples
1044    ///
1045    /// ```
1046    /// use cactusref::Rc;
1047    ///
1048    /// let five = Rc::new(5);
1049    ///
1050    /// let _ = Rc::clone(&five);
1051    /// ```
1052    #[inline]
1053    fn clone(&self) -> Rc<T> {
1054        self.inner().inc_strong();
1055        Self::from_inner(self.ptr)
1056    }
1057}
1058
1059impl<T: Default> Default for Rc<T> {
1060    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
1061    ///
1062    /// # Examples
1063    ///
1064    /// ```
1065    /// use cactusref::Rc;
1066    ///
1067    /// let x: Rc<i32> = Default::default();
1068    /// assert_eq!(*x, 0);
1069    /// ```
1070    #[inline]
1071    fn default() -> Rc<T> {
1072        Rc::new(Default::default())
1073    }
1074}
1075
1076impl<T: PartialEq> PartialEq for Rc<T> {
1077    /// Equality for two `Rc`s.
1078    ///
1079    /// Two `Rc`s are equal if their inner values are equal, even if they are
1080    /// stored in different allocation.
1081    ///
1082    /// If `T` also implements `Eq` (implying reflexivity of equality),
1083    /// two `Rc`s that point to the same allocation are
1084    /// always equal.
1085    ///
1086    /// # Examples
1087    ///
1088    /// ```
1089    /// use cactusref::Rc;
1090    ///
1091    /// let five = Rc::new(5);
1092    ///
1093    /// assert!(five == Rc::new(5));
1094    /// ```
1095    #[inline]
1096    fn eq(&self, other: &Rc<T>) -> bool {
1097        **self == **other
1098    }
1099
1100    /// Inequality for two `Rc`s.
1101    ///
1102    /// Two `Rc`s are unequal if their inner values are unequal.
1103    ///
1104    /// If `T` also implements `Eq` (implying reflexivity of equality),
1105    /// two `Rc`s that point to the same allocation are
1106    /// never unequal.
1107    ///
1108    /// # Examples
1109    ///
1110    /// ```
1111    /// use cactusref::Rc;
1112    ///
1113    /// let five = Rc::new(5);
1114    ///
1115    /// assert!(five != Rc::new(6));
1116    /// ```
1117    #[inline]
1118    #[allow(clippy::partialeq_ne_impl)]
1119    fn ne(&self, other: &Rc<T>) -> bool {
1120        **self != **other
1121    }
1122}
1123
1124impl<T: Eq> Eq for Rc<T> {}
1125
1126impl<T: PartialOrd> PartialOrd for Rc<T> {
1127    /// Partial comparison for two `Rc`s.
1128    ///
1129    /// The two are compared by calling `partial_cmp()` on their inner values.
1130    ///
1131    /// # Examples
1132    ///
1133    /// ```
1134    /// use cactusref::Rc;
1135    /// use std::cmp::Ordering;
1136    ///
1137    /// let five = Rc::new(5);
1138    ///
1139    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
1140    /// ```
1141    #[inline(always)]
1142    fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
1143        (**self).partial_cmp(&**other)
1144    }
1145
1146    /// Less-than comparison for two `Rc`s.
1147    ///
1148    /// The two are compared by calling `<` on their inner values.
1149    ///
1150    /// # Examples
1151    ///
1152    /// ```
1153    /// use cactusref::Rc;
1154    ///
1155    /// let five = Rc::new(5);
1156    ///
1157    /// assert!(five < Rc::new(6));
1158    /// ```
1159    #[inline(always)]
1160    fn lt(&self, other: &Rc<T>) -> bool {
1161        **self < **other
1162    }
1163
1164    /// 'Less than or equal to' comparison for two `Rc`s.
1165    ///
1166    /// The two are compared by calling `<=` on their inner values.
1167    ///
1168    /// # Examples
1169    ///
1170    /// ```
1171    /// use cactusref::Rc;
1172    ///
1173    /// let five = Rc::new(5);
1174    ///
1175    /// assert!(five <= Rc::new(5));
1176    /// ```
1177    #[inline(always)]
1178    fn le(&self, other: &Rc<T>) -> bool {
1179        **self <= **other
1180    }
1181
1182    /// Greater-than comparison for two `Rc`s.
1183    ///
1184    /// The two are compared by calling `>` on their inner values.
1185    ///
1186    /// # Examples
1187    ///
1188    /// ```
1189    /// use cactusref::Rc;
1190    ///
1191    /// let five = Rc::new(5);
1192    ///
1193    /// assert!(five > Rc::new(4));
1194    /// ```
1195    #[inline(always)]
1196    fn gt(&self, other: &Rc<T>) -> bool {
1197        **self > **other
1198    }
1199
1200    /// 'Greater than or equal to' comparison for two `Rc`s.
1201    ///
1202    /// The two are compared by calling `>=` on their inner values.
1203    ///
1204    /// # Examples
1205    ///
1206    /// ```
1207    /// use cactusref::Rc;
1208    ///
1209    /// let five = Rc::new(5);
1210    ///
1211    /// assert!(five >= Rc::new(5));
1212    /// ```
1213    #[inline(always)]
1214    fn ge(&self, other: &Rc<T>) -> bool {
1215        **self >= **other
1216    }
1217}
1218
1219impl<T: Ord> Ord for Rc<T> {
1220    /// Comparison for two `Rc`s.
1221    ///
1222    /// The two are compared by calling `cmp()` on their inner values.
1223    ///
1224    /// # Examples
1225    ///
1226    /// ```
1227    /// use cactusref::Rc;
1228    /// use std::cmp::Ordering;
1229    ///
1230    /// let five = Rc::new(5);
1231    ///
1232    /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
1233    /// ```
1234    #[inline]
1235    fn cmp(&self, other: &Rc<T>) -> Ordering {
1236        (**self).cmp(&**other)
1237    }
1238}
1239
1240impl<T: Hash> Hash for Rc<T> {
1241    fn hash<H: Hasher>(&self, state: &mut H) {
1242        (**self).hash(state);
1243    }
1244}
1245
1246impl<T: fmt::Display> fmt::Display for Rc<T> {
1247    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1248        fmt::Display::fmt(&**self, f)
1249    }
1250}
1251
1252impl<T: fmt::Debug> fmt::Debug for Rc<T> {
1253    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1254        fmt::Debug::fmt(&**self, f)
1255    }
1256}
1257
1258impl<T> fmt::Pointer for Rc<T> {
1259    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1260        fmt::Pointer::fmt(&ptr::addr_of!(**self), f)
1261    }
1262}
1263
1264impl<T> From<T> for Rc<T> {
1265    /// Converts a generic type `T` into a `Rc<T>`
1266    ///
1267    /// The conversion allocates on the heap and moves `t`
1268    /// from the stack into it.
1269    ///
1270    /// # Example
1271    /// ```rust
1272    /// # use cactusref::Rc;
1273    /// let x = 5;
1274    /// let rc = Rc::new(5);
1275    ///
1276    /// assert_eq!(Rc::from(x), rc);
1277    /// ```
1278    fn from(t: T) -> Self {
1279        Rc::new(t)
1280    }
1281}
1282
1283impl<T> From<Box<T>> for Rc<T> {
1284    /// Move a boxed object to a new, reference counted, allocation.
1285    ///
1286    /// # Example
1287    ///
1288    /// ```
1289    /// # use cactusref::Rc;
1290    /// let original: Box<i32> = Box::new(1);
1291    /// let shared: Rc<i32> = Rc::from(original);
1292    /// assert_eq!(1, *shared);
1293    /// ```
1294    #[inline]
1295    fn from(v: Box<T>) -> Rc<T> {
1296        Rc::from_box(v)
1297    }
1298}
1299
1300/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
1301/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
1302/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
1303///
1304/// Since a `Weak` reference does not count towards ownership, it will not
1305/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
1306/// guarantees about the value still being present. Thus it may return [`None`]
1307/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
1308/// itself (the backing store) from being deallocated.
1309///
1310/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
1311/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
1312/// prevent circular references between [`Rc`] pointers, since mutual owning references
1313/// would never allow either [`Rc`] to be dropped. For example, a tree could
1314/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
1315/// pointers from children back to their parents.
1316///
1317/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
1318///
1319/// [`upgrade`]: Weak::upgrade
1320pub struct Weak<T> {
1321    // This is a `NonNull` to allow optimizing the size of this type in enums,
1322    // but it is not necessarily a valid pointer.
1323    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
1324    // to allocate space on the heap.  That's not a value a real pointer
1325    // will ever have because RcBox has alignment at least 2.
1326    // This is only possible when `T: Sized`; unsized `T` never dangle.
1327    ptr: NonNull<RcBox<T>>,
1328    phantom: PhantomData<RcBox<T>>,
1329}
1330
1331/// `Weak` is not `Send`.
1332///
1333/// ```compile_fail
1334/// use cactusref::Weak;
1335/// fn requires_send<T: Send>(val: T) {}
1336/// let weak = Weak::<usize>::new();
1337/// requires_send(weak);
1338/// ```
1339mod weak_is_not_send {}
1340
1341/// `Weak` is not `Sync`.
1342///
1343/// ```compile_fail
1344/// use cactusref::Weak;
1345/// fn requires_sync<T: Sync>(val: T) {}
1346/// let weak = Weak::<usize>::new();
1347/// requires_sync(weak);
1348/// ```
1349mod weak_is_not_sync {}
1350
1351impl<T> Weak<T> {
1352    /// Constructs a new `Weak<T>`, without allocating any memory.
1353    /// Calling [`upgrade`] on the return value always gives [`None`].
1354    ///
1355    /// [`upgrade`]: Weak::upgrade
1356    ///
1357    /// # Examples
1358    ///
1359    /// ```
1360    /// use cactusref::Weak;
1361    ///
1362    /// let empty: Weak<i64> = Weak::new();
1363    /// assert!(empty.upgrade().is_none());
1364    /// ```
1365    #[must_use]
1366    pub fn new() -> Weak<T> {
1367        Weak {
1368            ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0"),
1369            phantom: PhantomData,
1370        }
1371    }
1372}
1373
1374pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
1375    let address = ptr.cast::<()>() as usize;
1376    address == usize::MAX
1377}
1378
1379/// Helper type to allow accessing the reference counts without
1380/// making any assertions about the data field.
1381struct WeakInner<'a> {
1382    weak: &'a Cell<usize>,
1383    strong: &'a Cell<usize>,
1384}
1385
1386impl<T> Weak<T> {
1387    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
1388    ///
1389    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
1390    /// unaligned or even [`null`] otherwise.
1391    ///
1392    /// # Examples
1393    ///
1394    /// ```
1395    /// use cactusref::Rc;
1396    /// use std::ptr;
1397    ///
1398    /// let strong = Rc::new("hello".to_owned());
1399    /// let weak = Rc::downgrade(&strong);
1400    /// // Both point to the same object
1401    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
1402    /// // The strong here keeps it alive, so we can still access the object.
1403    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
1404    ///
1405    /// drop(strong);
1406    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
1407    /// // undefined behaviour.
1408    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
1409    /// ```
1410    ///
1411    /// [`null`]: core::ptr::null
1412    #[must_use]
1413    pub fn as_ptr(&self) -> *const T {
1414        let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
1415
1416        if is_dangling(ptr) {
1417            // If the pointer is dangling, we return the sentinel directly. This cannot be
1418            // a valid payload address, as the payload is at least as aligned as RcBox (usize).
1419            ptr as *const T
1420        } else {
1421            // SAFETY: if is_dangling returns false, then the pointer is dereferencable.
1422            // The payload may be dropped at this point, and we have to maintain provenance,
1423            // so use raw pointer manipulation.
1424            //
1425            // SAFETY: Because we are a live `Rc`, the `MaybeUninit` `value` is
1426            // inhabited and can be transmuted to an initialized `T`.
1427            unsafe { ptr::addr_of_mut!((*ptr).value) as *const T }
1428        }
1429    }
1430
1431    /// Consumes the `Weak<T>` and turns it into a raw pointer.
1432    ///
1433    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
1434    /// one weak reference (the weak count is not modified by this operation). It can be turned
1435    /// back into the `Weak<T>` with [`from_raw`].
1436    ///
1437    /// The same restrictions of accessing the target of the pointer as with
1438    /// [`as_ptr`] apply.
1439    ///
1440    /// # Examples
1441    ///
1442    /// ```
1443    /// use cactusref::{Rc, Weak};
1444    ///
1445    /// let strong = Rc::new("hello".to_owned());
1446    /// let weak = Rc::downgrade(&strong);
1447    /// let raw = weak.into_raw();
1448    ///
1449    /// assert_eq!(1, Rc::weak_count(&strong));
1450    /// assert_eq!("hello", unsafe { &*raw });
1451    ///
1452    /// drop(unsafe { Weak::from_raw(raw) });
1453    /// assert_eq!(0, Rc::weak_count(&strong));
1454    /// ```
1455    ///
1456    /// [`from_raw`]: Weak::from_raw
1457    /// [`as_ptr`]: Weak::as_ptr
1458    #[must_use]
1459    pub fn into_raw(self) -> *const T {
1460        let result = self.as_ptr();
1461        mem::forget(self);
1462        result
1463    }
1464
1465    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
1466    ///
1467    /// This can be used to safely get a strong reference (by calling [`upgrade`]
1468    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
1469    ///
1470    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
1471    /// as these don't own anything; the method still works on them).
1472    ///
1473    /// # Safety
1474    ///
1475    /// The pointer must have originated from the [`into_raw`] and must still own its potential
1476    /// weak reference.
1477    ///
1478    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
1479    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
1480    /// count is not modified by this operation) and therefore it must be paired with a previous
1481    /// call to [`into_raw`].
1482    ///
1483    /// # Examples
1484    ///
1485    /// ```
1486    /// use cactusref::{Rc, Weak};
1487    ///
1488    /// let strong = Rc::new("hello".to_owned());
1489    ///
1490    /// let raw_1 = Rc::downgrade(&strong).into_raw();
1491    /// let raw_2 = Rc::downgrade(&strong).into_raw();
1492    ///
1493    /// assert_eq!(2, Rc::weak_count(&strong));
1494    ///
1495    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
1496    /// assert_eq!(1, Rc::weak_count(&strong));
1497    ///
1498    /// drop(strong);
1499    ///
1500    /// // Decrement the last weak count.
1501    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
1502    /// ```
1503    ///
1504    /// [`into_raw`]: Weak::into_raw
1505    /// [`upgrade`]: Weak::upgrade
1506    /// [`new`]: Weak::new
1507    pub unsafe fn from_raw(ptr: *const T) -> Self {
1508        // See Weak::as_ptr for context on how the input pointer is derived.
1509
1510        let ptr = if is_dangling(ptr.cast_mut()) {
1511            // This is a dangling Weak.
1512            ptr as *mut RcBox<T>
1513        } else {
1514            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
1515            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
1516            let offset = data_offset(ptr);
1517            // Thus, we reverse the offset to get the whole RcBox.
1518            // SAFETY: the pointer originated from a Weak, so this offset is safe.
1519            (ptr as *mut u8)
1520                .offset(-offset)
1521                .with_metadata_of(ptr as *mut RcBox<T>)
1522        };
1523
1524        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
1525        Weak {
1526            ptr: NonNull::new_unchecked(ptr),
1527            phantom: PhantomData,
1528        }
1529    }
1530
1531    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
1532    /// dropping of the inner value if successful.
1533    ///
1534    /// Returns [`None`] if the inner value has since been dropped.
1535    ///
1536    /// # Examples
1537    ///
1538    /// ```
1539    /// use cactusref::Rc;
1540    ///
1541    /// let five = Rc::new(5);
1542    ///
1543    /// let weak_five = Rc::downgrade(&five);
1544    ///
1545    /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
1546    /// assert!(strong_five.is_some());
1547    ///
1548    /// // Destroy all strong pointers.
1549    /// drop(strong_five);
1550    /// drop(five);
1551    ///
1552    /// assert!(weak_five.upgrade().is_none());
1553    /// ```
1554    #[must_use]
1555    pub fn upgrade(&self) -> Option<Rc<T>> {
1556        let inner = self.inner()?;
1557        if inner.is_dead() {
1558            None
1559        } else {
1560            inner.inc_strong();
1561            Some(Rc::from_inner(self.ptr))
1562        }
1563    }
1564
1565    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
1566    ///
1567    /// If `self` was created using [`Weak::new`], this will return 0.
1568    #[must_use]
1569    pub fn strong_count(&self) -> usize {
1570        if let Some(inner) = self.inner() {
1571            if inner.is_uninit() {
1572                0
1573            } else {
1574                inner.strong()
1575            }
1576        } else {
1577            0
1578        }
1579    }
1580
1581    /// Gets the number of `Weak` pointers pointing to this allocation.
1582    ///
1583    /// If no strong pointers remain, this will return zero.
1584    #[must_use]
1585    pub fn weak_count(&self) -> usize {
1586        self.inner().map_or(0, |inner| {
1587            if inner.is_uninit() {
1588                0
1589            } else if inner.strong() > 0 {
1590                inner.weak() - 1 // subtract the implicit weak ptr
1591            } else {
1592                0
1593            }
1594        })
1595    }
1596
1597    /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`,
1598    /// (i.e., when this `Weak` was created by `Weak::new`).
1599    #[inline]
1600    #[must_use]
1601    fn inner(&self) -> Option<WeakInner<'_>> {
1602        if is_dangling(self.ptr.as_ptr()) {
1603            None
1604        } else {
1605            // We are careful to *not* create a reference covering the "data" field, as
1606            // the field may be mutated concurrently (for example, if the last `Rc`
1607            // is dropped, the data field will be dropped in-place).
1608            Some(unsafe {
1609                let ptr = self.ptr.as_ptr();
1610                WeakInner {
1611                    strong: &(*ptr).strong,
1612                    weak: &(*ptr).weak,
1613                }
1614            })
1615        }
1616    }
1617
1618    /// Returns `true` if the two `Weak`s point to the same allocation (similar to
1619    /// [`ptr::eq`]), or if both don't point to any allocation
1620    /// (because they were created with `Weak::new()`).
1621    ///
1622    /// # Notes
1623    ///
1624    /// Since this compares pointers it means that `Weak::new()` will equal each
1625    /// other, even though they don't point to any allocation.
1626    ///
1627    /// # Examples
1628    ///
1629    /// ```
1630    /// use cactusref::Rc;
1631    ///
1632    /// let first_rc = Rc::new(5);
1633    /// let first = Rc::downgrade(&first_rc);
1634    /// let second = Rc::downgrade(&first_rc);
1635    ///
1636    /// assert!(first.ptr_eq(&second));
1637    ///
1638    /// let third_rc = Rc::new(5);
1639    /// let third = Rc::downgrade(&third_rc);
1640    ///
1641    /// assert!(!first.ptr_eq(&third));
1642    /// ```
1643    ///
1644    /// Comparing `Weak::new`.
1645    ///
1646    /// ```
1647    /// use cactusref::{Rc, Weak};
1648    ///
1649    /// let first = Weak::new();
1650    /// let second = Weak::new();
1651    /// assert!(first.ptr_eq(&second));
1652    ///
1653    /// let third_rc = Rc::new(());
1654    /// let third = Rc::downgrade(&third_rc);
1655    /// assert!(!first.ptr_eq(&third));
1656    /// ```
1657    ///
1658    /// [`ptr::eq`]: core::ptr::eq
1659    #[inline]
1660    #[must_use]
1661    pub fn ptr_eq(&self, other: &Self) -> bool {
1662        self.ptr.as_ptr() == other.ptr.as_ptr()
1663    }
1664}
1665
1666unsafe impl<#[may_dangle] T> Drop for Weak<T> {
1667    /// Drops the `Weak` pointer.
1668    ///
1669    /// # Examples
1670    ///
1671    /// ```
1672    /// use cactusref::{Rc, Weak};
1673    ///
1674    /// struct Foo;
1675    ///
1676    /// impl Drop for Foo {
1677    ///     fn drop(&mut self) {
1678    ///         println!("dropped!");
1679    ///     }
1680    /// }
1681    ///
1682    /// let foo = Rc::new(Foo);
1683    /// let weak_foo = Rc::downgrade(&foo);
1684    /// let other_weak_foo = Weak::clone(&weak_foo);
1685    ///
1686    /// drop(weak_foo);   // Doesn't print anything
1687    /// drop(foo);        // Prints "dropped!"
1688    ///
1689    /// assert!(other_weak_foo.upgrade().is_none());
1690    /// ```
1691    fn drop(&mut self) {
1692        let inner = if let Some(inner) = self.inner() {
1693            inner
1694        } else {
1695            return;
1696        };
1697
1698        inner.dec_weak();
1699        // the weak count starts at 1, and will only go to zero if all
1700        // the strong pointers have disappeared.
1701        if inner.weak() == 0 {
1702            unsafe {
1703                // SAFETY: `T` is `Sized`, which means `Layout::for_value_raw`
1704                // is always safe to call.
1705                let layout = Layout::for_value_raw(self.ptr.as_ptr());
1706                Global.deallocate(self.ptr.cast(), layout);
1707            }
1708        }
1709    }
1710}
1711
1712impl<T> Clone for Weak<T> {
1713    /// Makes a clone of the `Weak` pointer that points to the same allocation.
1714    ///
1715    /// # Examples
1716    ///
1717    /// ```
1718    /// use cactusref::{Rc, Weak};
1719    ///
1720    /// let weak_five = Rc::downgrade(&Rc::new(5));
1721    ///
1722    /// let _ = Weak::clone(&weak_five);
1723    /// ```
1724    #[inline]
1725    fn clone(&self) -> Weak<T> {
1726        if let Some(inner) = self.inner() {
1727            inner.inc_weak();
1728        }
1729        Weak {
1730            ptr: self.ptr,
1731            phantom: PhantomData,
1732        }
1733    }
1734}
1735
1736impl<T: fmt::Debug> fmt::Debug for Weak<T> {
1737    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1738        write!(f, "(Weak)")
1739    }
1740}
1741
1742impl<T> Default for Weak<T> {
1743    /// Constructs a new `Weak<T>`, without allocating any memory.
1744    /// Calling [`upgrade`] on the return value always gives [`None`].
1745    ///
1746    /// [`None`]: Option
1747    /// [`upgrade`]: Weak::upgrade
1748    ///
1749    /// # Examples
1750    ///
1751    /// ```
1752    /// use cactusref::Weak;
1753    ///
1754    /// let empty: Weak<i64> = Default::default();
1755    /// assert!(empty.upgrade().is_none());
1756    /// ```
1757    fn default() -> Weak<T> {
1758        Weak::new()
1759    }
1760}
1761
1762// NOTE: We checked_add here to deal with mem::forget safely. In particular
1763// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
1764// you can free the allocation while outstanding Rcs (or Weaks) exist.
1765// We abort because this is such a degenerate scenario that we don't care about
1766// what happens -- no real program should ever experience this.
1767//
1768// This should have negligible overhead since you don't actually need to
1769// clone these much in Rust thanks to ownership and move-semantics.
1770
1771#[doc(hidden)]
1772pub(crate) trait RcInnerPtr {
1773    fn weak_ref(&self) -> &Cell<usize>;
1774    fn strong_ref(&self) -> &Cell<usize>;
1775
1776    #[inline]
1777    fn strong(&self) -> usize {
1778        self.strong_ref().get()
1779    }
1780
1781    #[inline]
1782    fn inc_strong(&self) {
1783        let strong = self.strong();
1784
1785        // We want to abort on overflow instead of dropping the value.
1786        // The reference count will never be zero when this is called;
1787        // nevertheless, we insert an abort here to hint LLVM at
1788        // an otherwise missed optimization.
1789        if strong == 0 || strong == usize::MAX {
1790            abort();
1791        }
1792        // `usize::MAX` is used to mark the `Rc` as uninitialized, so disallow
1793        // incrementing the strong count to prevent a memory leak and type
1794        // confusion in `Drop::drop`.
1795        if strong + 1 == usize::MAX {
1796            abort();
1797        }
1798        self.strong_ref().set(strong + 1);
1799    }
1800
1801    #[inline]
1802    fn dec_strong(&self) {
1803        self.strong_ref().set(self.strong() - 1);
1804    }
1805
1806    #[inline]
1807    fn weak(&self) -> usize {
1808        self.weak_ref().get()
1809    }
1810
1811    #[inline]
1812    fn inc_weak(&self) {
1813        let weak = self.weak();
1814
1815        // We want to abort on overflow instead of dropping the value.
1816        // The reference count will never be zero when this is called;
1817        // nevertheless, we insert an abort here to hint LLVM at
1818        // an otherwise missed optimization.
1819        if weak == 0 || weak == usize::MAX {
1820            abort();
1821        }
1822        self.weak_ref().set(weak + 1);
1823    }
1824
1825    #[inline]
1826    fn dec_weak(&self) {
1827        self.weak_ref().set(self.weak() - 1);
1828    }
1829
1830    #[inline]
1831    fn is_dead(&self) -> bool {
1832        self.strong() == 0 || self.is_uninit()
1833    }
1834
1835    #[inline]
1836    fn is_uninit(&self) -> bool {
1837        self.strong() == usize::MAX
1838    }
1839
1840    #[inline]
1841    fn make_uninit(&self) {
1842        self.strong_ref().set(usize::MAX);
1843    }
1844}
1845
1846impl<T> RcInnerPtr for RcBox<T> {
1847    #[inline(always)]
1848    fn weak_ref(&self) -> &Cell<usize> {
1849        &self.weak
1850    }
1851
1852    #[inline(always)]
1853    fn strong_ref(&self) -> &Cell<usize> {
1854        &self.strong
1855    }
1856}
1857
1858impl RcInnerPtr for WeakInner<'_> {
1859    #[inline(always)]
1860    fn weak_ref(&self) -> &Cell<usize> {
1861        self.weak
1862    }
1863
1864    #[inline(always)]
1865    fn strong_ref(&self) -> &Cell<usize> {
1866        self.strong
1867    }
1868}
1869
1870impl<T> borrow::Borrow<T> for Rc<T> {
1871    fn borrow(&self) -> &T {
1872        self
1873    }
1874}
1875
1876impl<T> AsRef<T> for Rc<T> {
1877    fn as_ref(&self) -> &T {
1878        self
1879    }
1880}
1881
1882impl<T> Unpin for Rc<T> {}
1883
1884/// Get the offset within an `RcBox` for the payload behind a pointer.
1885///
1886/// # Safety
1887///
1888/// The pointer must point to (and have valid metadata for) a previously
1889/// valid instance of T, but the T is allowed to be dropped.
1890unsafe fn data_offset<T>(ptr: *const T) -> isize {
1891    let _ = ptr;
1892
1893    let rcbox = MaybeUninit::<RcBox<T>>::uninit();
1894
1895    let base_ptr = rcbox.as_ptr();
1896    let base_ptr = base_ptr as usize;
1897
1898    let field_ptr = ptr::addr_of!((*(base_ptr as *const RcBox<T>)).value);
1899    let field_ptr = field_ptr as usize;
1900
1901    (field_ptr - base_ptr) as isize
1902}
1903
1904// Deallocate a `Box` without destroying the inner `T`.
1905//
1906// # Safety
1907//
1908// Callers must ensure that `ptr` was allocated by `Box::new` with the global allocator.
1909//
1910// Callers must ensure that `T` is not dropped.
1911#[inline]
1912unsafe fn box_free<T, A: Allocator>(ptr: NonNull<T>, alloc: A) {
1913    // SAFETY: `T` is `Sized`, which means `Layout::for_value_raw` is always
1914    // safe to call.
1915    let layout = Layout::for_value_raw(ptr.as_ptr());
1916
1917    alloc.deallocate(ptr.cast(), layout);
1918}