@@ -0,0 +1,224 @@
#![ feature( unsize) ]
use std:: alloc:: Layout ;
use std:: marker:: { PhantomData , Unsize } ;
use std:: mem;
/// Assumes pointer-sized metadata
#[ repr( C ) ]
struct DstPointerRepr {
data : * mut ( ) ,
meta : * mut ( ) ,
}
pub struct VecOfDst < DST : ?Sized > {
storage : heterogeneous_vec:: HeterogeneousVec ,
_phantom : PhantomData < DST > ,
}
impl < DST : ?Sized > VecOfDst < DST > {
pub fn new ( ) -> Self {
Self {
storage : heterogeneous_vec:: HeterogeneousVec :: new ( ) ,
_phantom : PhantomData ,
}
}
pub fn push < T : Unsize < DST > > ( & mut self , value : T ) {
assert_eq ! ( mem:: size_of:: <& DST >( ) ,
mem:: size_of:: <DstPointerRepr >( ) ) ;
let repr = unsafe {
let dst_ref: & DST = & value;
mem:: transmute_copy :: < & DST , DstPointerRepr > ( & dst_ref)
} ;
self . storage . push ( repr. meta ) ;
self . storage . push ( value) ;
}
pub fn iter ( & self ) -> impl Iterator < Item =& DST > {
self . iter_raw ( ) . map ( |ptr| unsafe { & * ptr } )
}
pub fn iter_mut ( & mut self ) -> impl Iterator < Item =& mut DST > {
self . iter_raw ( ) . map ( |ptr| unsafe { & mut * ptr } )
}
fn iter_raw < ' a > ( & ' a self ) -> impl Iterator < Item =* mut DST > + ' a {
struct Iter < ' a , DST : ?Sized > {
iter : heterogeneous_vec:: Iter < ' a > ,
_phantom : PhantomData < DST > ,
}
impl < ' a , DST : ?Sized > Iterator for Iter < ' a , DST > {
type Item = * mut DST ;
fn next ( & mut self ) -> Option < Self :: Item > {
const POINTER_BYTES : usize = mem:: size_of :: < * mut ( ) > ( ) ;
const POINTER_BITS : usize = POINTER_BYTES * 8 ;
const HYPER_ALIGNED : * mut ( ) = ( 1_usize << ( POINTER_BITS - 1 ) ) as _ ;
unsafe {
self . iter . read_next :: < * mut ( ) > ( ) . map ( |meta| {
let fake_repr = DstPointerRepr { data : HYPER_ALIGNED , meta } ;
let fake_ref = mem:: transmute_copy :: < DstPointerRepr , & DST > ( & fake_repr) ;
let data = self . iter . next ( Layout :: for_value ( fake_ref) ) . unwrap ( ) ;
let repr = DstPointerRepr { data, meta } ;
mem:: transmute_copy :: < DstPointerRepr , * mut DST > ( & repr)
} )
}
}
}
Iter {
iter : self . storage . iter ( ) ,
_phantom : PhantomData ,
}
}
}
impl < DST : ?Sized > Drop for VecOfDst < DST > {
fn drop ( & mut self ) {
for item in self . iter_raw ( ) {
unsafe {
item. drop_in_place ( )
}
}
}
}
#[ test]
fn trait_objets ( ) {
#[ repr( align( 1024 ) ) ]
#[ derive( Debug ) ]
struct SuperAligned ( u32 ) ;
let mut v = VecOfDst :: < std:: fmt:: Debug > :: new ( ) ;
v. push ( 4 ) ;
v. push ( SuperAligned ( 7 ) ) ;
v. push ( "foo" ) ;
assert_eq ! ( debugs( v. iter( ) ) , [ "4" , "SuperAligned(7)" , "\" foo\" " ] ) ;
}
#[ test]
fn slices ( ) {
let mut v = VecOfDst :: < [ u32 ] > :: new ( ) ;
v. push ( [ 4 ] ) ;
v. push ( [ 9000 , 1 , 3 ] ) ;
assert_eq ! ( debugs( v. iter( ) ) , [ "[4]" , "[9000, 1, 3]" ] ) ;
}
#[ cfg( test) ]
fn debugs < I > ( iter : I ) -> Vec < String >
where I : Iterator , I :: Item : std:: fmt:: Debug
{
iter. map ( |x| format ! ( "{:?}" , x) ) . collect ( )
}
mod heterogeneous_vec {
use std:: alloc:: { self , Layout } ;
use std:: mem;
use std:: num:: NonZeroUsize ;
use std:: ptr;
pub struct HeterogeneousVec {
ptr : * mut u8 ,
// In bytes:
len : usize ,
capacity : usize ,
align : NonZeroUsize ,
}
unsafe fn realloc_with_align ( ptr : * mut u8 , old_layout : Layout , new_layout : Layout )
-> * mut u8 {
if new_layout. align ( ) == old_layout. align ( ) {
alloc:: realloc ( ptr, old_layout, new_layout. size ( ) )
} else {
let new_ptr = alloc:: alloc ( new_layout) ;
if !new_ptr. is_null ( ) {
let size = old_layout. size ( ) . min ( new_layout. size ( ) ) ;
ptr:: copy_nonoverlapping ( ptr, new_ptr, size) ;
alloc:: dealloc ( ptr, old_layout) ;
}
new_ptr
}
}
fn align_to ( position : usize , align : usize ) -> usize {
let remainder = position % align;
if remainder > 0 {
position. checked_add ( align - remainder) . unwrap ( )
} else {
position
}
}
impl HeterogeneousVec {
pub fn new ( ) -> Self {
Self {
ptr : ptr:: null_mut ( ) ,
len : 0 ,
capacity : 0 ,
align : NonZeroUsize :: new ( 1 ) . unwrap ( ) ,
}
}
pub fn push < T > ( & mut self , value : T ) {
let value_size = mem:: size_of :: < T > ( ) ;
let value_align = mem:: align_of :: < T > ( ) ;
let value_position = align_to ( self . len , value_align) ;
let available = self . capacity . saturating_sub ( value_position) ;
let align = self . align . get ( ) ;
if available < value_size || align < value_align {
let required_capacity = self . len . checked_add ( value_size) . unwrap ( ) ;
let new_capacity = self . capacity . max ( required_capacity) . next_power_of_two ( ) ;
let new_align = align. max ( value_align) ;
let new_layout = Layout :: from_size_align ( new_capacity, new_align) . unwrap ( ) ;
let ptr = unsafe {
if self . ptr . is_null ( ) {
alloc:: alloc ( new_layout)
} else {
let old_layout = Layout :: from_size_align ( self . capacity , align) . unwrap ( ) ;
realloc_with_align ( self . ptr , old_layout, new_layout)
}
} ;
if ptr. is_null ( ) {
alloc:: handle_alloc_error ( new_layout)
}
self . ptr = ptr;
self . capacity = new_capacity;
}
unsafe {
let next = self . ptr . add ( value_position) as * mut T ;
debug_assert ! ( next as usize % value_align == 0 ) ;
next. write ( value) ;
}
self . len = value_position + value_size;
}
pub fn iter ( & self ) -> Iter {
Iter { vec : self , position : 0 }
}
}
pub struct Iter < ' a > {
vec : & ' a HeterogeneousVec ,
position : usize ,
}
impl < ' a > Iter < ' a > {
pub unsafe fn read_next < T > ( & mut self ) -> Option < T > {
self . next ( Layout :: new :: < T > ( ) ) . map ( |ptr| ( ptr as * mut T ) . read ( ) )
}
pub unsafe fn next ( & mut self , layout : Layout ) -> Option < * mut ( ) > {
if self . position < self . vec . len {
let value_position = align_to ( self . position , layout. align ( ) ) ;
let ptr = self . vec . ptr . add ( value_position) ;
self . position = value_position + layout. size ( ) ;
debug_assert ! ( self . position <= self . vec. len) ;
Some ( ptr as * mut ( ) )
} else {
None
}
}
}
}