Skip to content

Commit 2705be1

Browse files
committed
Add RawRc methods for sized values
1 parent f39cc7b commit 2705be1

File tree

1 file changed

+209
-4
lines changed

1 file changed

+209
-4
lines changed

library/alloc/src/raw_rc/raw_rc.rs

Lines changed: 209 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,15 @@
1-
use core::alloc::Allocator;
1+
use core::alloc::{AllocError, Allocator};
22
use core::cell::UnsafeCell;
33
#[cfg(not(no_global_oom_handling))]
44
use core::clone::CloneToUninit;
5+
#[cfg(not(no_global_oom_handling))]
6+
use core::convert;
57
use core::marker::PhantomData;
8+
use core::mem::DropGuard;
69
#[cfg(not(no_global_oom_handling))]
7-
use core::mem::{self, DropGuard};
10+
use core::mem::{self, MaybeUninit, SizedTypeProperties};
811
#[cfg(not(no_global_oom_handling))]
9-
use core::ops::DerefMut;
12+
use core::ops::{DerefMut, Residual, Try};
1013
#[cfg(not(no_global_oom_handling))]
1114
use core::ptr;
1215
use core::ptr::NonNull;
@@ -20,7 +23,7 @@ use crate::raw_rc::raw_weak::RawWeak;
2023
#[cfg(not(no_global_oom_handling))]
2124
use crate::raw_rc::rc_alloc;
2225
#[cfg(not(no_global_oom_handling))]
23-
use crate::raw_rc::rc_layout::RcLayout;
26+
use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt};
2427
use crate::raw_rc::rc_value_pointer::RcValuePointer;
2528

2629
/// Base implementation of a strong pointer. `RawRc` does not implement `Drop`, user should call
@@ -337,6 +340,196 @@ where
337340
}
338341
}
339342

343+
impl<T, A> RawRc<T, A> {
344+
/// # Safety
345+
///
346+
/// `weak` must be non-dangling.
347+
unsafe fn from_weak_with_value(weak: RawWeak<T, A>, value: T) -> Self {
348+
unsafe {
349+
weak.as_ptr().write(value);
350+
351+
Self::from_weak(weak)
352+
}
353+
}
354+
355+
#[inline]
356+
pub(crate) fn try_new(value: T) -> Result<Self, AllocError>
357+
where
358+
A: Allocator + Default,
359+
{
360+
RawWeak::try_new_uninit::<1>()
361+
.map(|weak| unsafe { Self::from_weak_with_value(weak, value) })
362+
}
363+
364+
#[inline]
365+
pub(crate) fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError>
366+
where
367+
A: Allocator,
368+
{
369+
RawWeak::try_new_uninit_in::<1>(alloc)
370+
.map(|weak| unsafe { Self::from_weak_with_value(weak, value) })
371+
}
372+
373+
#[cfg(not(no_global_oom_handling))]
374+
#[inline]
375+
pub(crate) fn new(value: T) -> Self
376+
where
377+
A: Allocator + Default,
378+
{
379+
unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<1>(), value) }
380+
}
381+
382+
#[cfg(not(no_global_oom_handling))]
383+
#[inline]
384+
pub(crate) fn new_in(value: T, alloc: A) -> Self
385+
where
386+
A: Allocator,
387+
{
388+
unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<1>(alloc), value) }
389+
}
390+
391+
#[cfg(not(no_global_oom_handling))]
392+
fn new_with<F>(f: F) -> Self
393+
where
394+
A: Allocator + Default,
395+
F: FnOnce() -> T,
396+
{
397+
let (ptr, alloc) = rc_alloc::allocate_with::<A, _, 1>(T::RC_LAYOUT, |ptr| unsafe {
398+
ptr.as_ptr().cast().write(f())
399+
});
400+
401+
unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }
402+
}
403+
404+
/// Maps the value in an `RawRc`, reusing the allocation if possible.
405+
///
406+
/// # Safety
407+
///
408+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
409+
#[cfg(not(no_global_oom_handling))]
410+
pub(crate) unsafe fn map<R, U>(self, f: impl FnOnce(&T) -> U) -> RawRc<U, A>
411+
where
412+
A: Allocator,
413+
R: RefCounter,
414+
{
415+
fn wrap_fn<T, U>(f: impl FnOnce(&T) -> U) -> impl FnOnce(&T) -> Result<U, !> {
416+
|x| Ok(f(x))
417+
}
418+
419+
let f = wrap_fn(f);
420+
421+
unsafe { self.try_map::<R, _, _>(f, convert::identity) }.into_ok()
422+
}
423+
424+
/// Attempts to map the value in an `Rc`, reusing the allocation if possible.
425+
///
426+
/// # Safety
427+
///
428+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
429+
#[cfg(not(no_global_oom_handling))]
430+
pub(crate) unsafe fn try_map<R, U, V>(
431+
mut self,
432+
f: impl FnOnce(&T) -> U,
433+
output_mapper: impl FnOnce(RawRc<U::Output, A>) -> V, // How to remove this argument?
434+
) -> <U::Residual as Residual<V>>::TryType
435+
where
436+
A: Allocator,
437+
R: RefCounter,
438+
U: Try,
439+
U::Residual: Residual<V>,
440+
{
441+
let result = if T::LAYOUT == U::Output::LAYOUT && unsafe { self.is_unique::<R>() } {
442+
let value = unsafe { self.as_ptr().read() };
443+
let mut allocation = unsafe { self.cast::<MaybeUninit<U::Output>>() };
444+
445+
// Destruct `self` as `RawRc<MaybeUninit<U::Output>, A>` if `f` panics or returns a
446+
// failure value.
447+
let guard = unsafe { new_rc_guard::<MaybeUninit<U::Output>, A, R>(&mut allocation) };
448+
449+
let mapped_value = f(&value)?;
450+
451+
drop(value);
452+
mem::forget(guard);
453+
454+
unsafe {
455+
allocation.get_mut_unchecked().write(mapped_value);
456+
457+
allocation.cast()
458+
}
459+
} else {
460+
// Destruct `self` if `f` panics or returns a failure value.
461+
let guard = unsafe { new_rc_guard::<T, A, R>(&mut self) };
462+
463+
let mapped_value = f(unsafe { guard.as_ptr().as_ref() })?;
464+
465+
drop(guard);
466+
467+
let alloc = self.into_raw_parts().1;
468+
469+
RawRc::new_in(mapped_value, alloc)
470+
};
471+
472+
try { output_mapper(result) }
473+
}
474+
475+
/// # Safety
476+
///
477+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
478+
pub(crate) unsafe fn into_inner<R>(self) -> Option<T>
479+
where
480+
A: Allocator,
481+
R: RefCounter,
482+
{
483+
let is_last_strong_ref = unsafe { decrement_strong_ref_count::<R>(self.value_ptr()) };
484+
485+
is_last_strong_ref.then(|| unsafe { self.weak.assume_init_into_inner::<R>() })
486+
}
487+
488+
/// # Safety
489+
///
490+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
491+
pub(crate) unsafe fn try_unwrap<R>(self) -> Result<T, RawRc<T, A>>
492+
where
493+
A: Allocator,
494+
R: RefCounter,
495+
{
496+
unsafe fn inner<R>(value_ptr: RcValuePointer) -> bool
497+
where
498+
R: RefCounter,
499+
{
500+
unsafe {
501+
R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).try_lock_strong_count()
502+
}
503+
}
504+
505+
let is_last_strong_ref = unsafe { inner::<R>(self.value_ptr()) };
506+
507+
if is_last_strong_ref {
508+
Ok(unsafe { self.weak.assume_init_into_inner::<R>() })
509+
} else {
510+
Err(self)
511+
}
512+
}
513+
514+
/// # Safety
515+
///
516+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
517+
pub(crate) unsafe fn unwrap_or_clone<R>(self) -> T
518+
where
519+
T: Clone,
520+
A: Allocator,
521+
R: RefCounter,
522+
{
523+
// SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter`
524+
// implementation.
525+
unsafe { self.try_unwrap::<R>() }.unwrap_or_else(|mut rc| {
526+
let guard = unsafe { new_rc_guard::<T, A, R>(&mut rc) };
527+
528+
T::clone(unsafe { guard.as_ptr().as_ref() })
529+
})
530+
}
531+
}
532+
340533
/// Decrements strong reference count in a reference-counted allocation with a value object that is
341534
/// pointed to by `value_ptr`.
342535
#[inline]
@@ -368,3 +561,15 @@ where
368561
R::is_unique(R::from_raw_counter(&ref_counts.strong), R::from_raw_counter(&ref_counts.weak))
369562
}
370563
}
564+
565+
/// Returns a drop guard that calls `Rc::drop::<R>()` on drop.
566+
unsafe fn new_rc_guard<'a, T, A, R>(
567+
rc: &'a mut RawRc<T, A>,
568+
) -> DropGuard<&'a mut RawRc<T, A>, impl FnOnce(&'a mut RawRc<T, A>)>
569+
where
570+
T: ?Sized,
571+
A: Allocator,
572+
R: RefCounter,
573+
{
574+
DropGuard::new(rc, |rc| unsafe { rc.drop::<R>() })
575+
}

0 commit comments

Comments
 (0)