@@ -2,6 +2,8 @@ use core::alloc::{AllocError, Allocator};
22use core:: cell:: UnsafeCell ;
33#[ cfg( not( no_global_oom_handling) ) ]
44use core:: clone:: CloneToUninit ;
5+ #[ cfg( not( no_global_oom_handling) ) ]
6+ use core:: iter:: TrustedLen ;
57use core:: marker:: PhantomData ;
68#[ cfg( not( no_global_oom_handling) ) ]
79use core:: mem:: { self , SizedTypeProperties } ;
@@ -593,6 +595,139 @@ impl<T, A> RawRc<MaybeUninit<T>, A> {
593595 }
594596}
595597
598+ impl < T , A > RawRc < [ T ] , A > {
599+ #[ cfg( not( no_global_oom_handling) ) ]
600+ fn from_trusted_len_iter < I > ( iter : I ) -> Self
601+ where
602+ A : Allocator + Default ,
603+ I : TrustedLen < Item = T > ,
604+ {
605+ /// Returns a drop guard that calls the destructors of a slice of elements on drop.
606+ ///
607+ /// # Safety
608+ ///
609+ /// - `head..tail` must describe a valid consecutive slice of `T` values when the destructor
610+ /// of the returned guard is called.
611+ /// - After calling the returned function, the corresponding values should not be accessed
612+ /// anymore.
613+ unsafe fn drop_range_on_drop < T > (
614+ head : NonNull < T > ,
615+ tail : NonNull < T > ,
616+ ) -> impl DerefMut < Target = ( NonNull < T > , NonNull < T > ) > {
617+ // SAFETY:
618+ DropGuard :: new ( ( head, tail) , |( head, tail) | unsafe {
619+ let length = tail. offset_from_unsigned ( head) ;
620+
621+ NonNull :: < [ T ] > :: slice_from_raw_parts ( head, length) . drop_in_place ( ) ;
622+ } )
623+ }
624+
625+ let ( length, Some ( high) ) = iter. size_hint ( ) else {
626+ // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
627+ // length exceeding `usize::MAX`.
628+ // The default implementation would collect into a vec which would panic.
629+ // Thus we panic here immediately without invoking `Vec` code.
630+ panic ! ( "capacity overflow" ) ;
631+ } ;
632+
633+ debug_assert_eq ! (
634+ length,
635+ high,
636+ "TrustedLen iterator's size hint is not exact: {:?}" ,
637+ ( length, high)
638+ ) ;
639+
640+ let rc_layout = RcLayout :: new_array :: < T > ( length) ;
641+
642+ let ( ptr, alloc) = rc_alloc:: allocate_with :: < A , _ , 1 > ( rc_layout, |ptr| {
643+ let ptr = ptr. as_ptr ( ) . cast :: < T > ( ) ;
644+ let mut guard = unsafe { drop_range_on_drop :: < T > ( ptr, ptr) } ;
645+
646+ // SAFETY: `iter` is `TrustedLen`, we can assume we will write correct number of
647+ // elements to the buffer.
648+ iter. for_each ( |value| unsafe {
649+ guard. 1 . write ( value) ;
650+ guard. 1 = guard. 1 . add ( 1 ) ;
651+ } ) ;
652+
653+ mem:: forget ( guard) ;
654+ } ) ;
655+
656+ // SAFETY: We have written `length` of `T` values to the buffer, the buffer is now
657+ // initialized.
658+ unsafe {
659+ Self :: from_raw_parts (
660+ NonNull :: slice_from_raw_parts ( ptr. as_ptr ( ) . cast :: < T > ( ) , length) ,
661+ alloc,
662+ )
663+ }
664+ }
665+
666+ fn try_into_array < const N : usize > ( self ) -> Result < RawRc < [ T ; N ] , A > , Self > {
667+ if unsafe { self . as_ptr ( ) . as_ref ( ) } . len ( ) == N {
668+ Ok ( unsafe { self . cast ( ) } )
669+ } else {
670+ Err ( self )
671+ }
672+ }
673+
674+ pub ( crate ) unsafe fn into_array < const N : usize , R > ( self ) -> Option < RawRc < [ T ; N ] , A > >
675+ where
676+ A : Allocator ,
677+ R : RefCounter ,
678+ {
679+ match self . try_into_array :: < N > ( ) {
680+ Ok ( result) => Some ( result) ,
681+ Err ( mut raw_rc) => {
682+ unsafe { raw_rc. drop :: < R > ( ) } ;
683+
684+ None
685+ }
686+ }
687+ }
688+ }
689+
690+ impl < T , A > RawRc < [ MaybeUninit < T > ] , A > {
691+ #[ cfg( not( no_global_oom_handling) ) ]
692+ pub ( crate ) fn new_uninit_slice ( length : usize ) -> Self
693+ where
694+ A : Allocator + Default ,
695+ {
696+ unsafe { Self :: from_weak ( RawWeak :: new_uninit_slice :: < 1 > ( length) ) }
697+ }
698+
699+ #[ cfg( not( no_global_oom_handling) ) ]
700+ pub ( crate ) fn new_uninit_slice_in ( length : usize , alloc : A ) -> Self
701+ where
702+ A : Allocator ,
703+ {
704+ unsafe { Self :: from_weak ( RawWeak :: new_uninit_slice_in :: < 1 > ( length, alloc) ) }
705+ }
706+
707+ #[ cfg( not( no_global_oom_handling) ) ]
708+ pub ( crate ) fn new_zeroed_slice ( length : usize ) -> Self
709+ where
710+ A : Allocator + Default ,
711+ {
712+ unsafe { Self :: from_weak ( RawWeak :: new_zeroed_slice :: < 1 > ( length) ) }
713+ }
714+
715+ #[ cfg( not( no_global_oom_handling) ) ]
716+ pub ( crate ) fn new_zeroed_slice_in ( length : usize , alloc : A ) -> Self
717+ where
718+ A : Allocator ,
719+ {
720+ unsafe { Self :: from_weak ( RawWeak :: new_zeroed_slice_in :: < 1 > ( length, alloc) ) }
721+ }
722+
723+ /// # Safety
724+ ///
725+ /// All `MaybeUninit<T>`s values contained by `self` must be initialized.
726+ pub ( crate ) unsafe fn assume_init ( self ) -> RawRc < [ T ] , A > {
727+ unsafe { self . cast_with ( |ptr| NonNull :: new_unchecked ( ptr. as_ptr ( ) as _ ) ) }
728+ }
729+ }
730+
596731/// Decrements strong reference count in a reference-counted allocation with a value object that is
597732/// pointed to by `value_ptr`.
598733#[ inline]
0 commit comments