@@ -25,6 +25,25 @@ pub enum VtblEntry<'tcx> {
2525 TraitVPtr ( TraitRef < ' tcx > ) ,
2626}
2727
28+ impl < ' tcx > VtblEntry < ' tcx > {
29+ /// Return the [`rustc_abi::Size`] it takes to represent in-memory the given entry kind.
30+ pub fn memory_size ( & self , ctxt : & impl rustc_abi:: HasDataLayout ) -> rustc_abi:: Size {
31+ let dl = ctxt. data_layout ( ) ;
32+ match self {
33+ VtblEntry :: MetadataSize | VtblEntry :: MetadataAlign => dl. pointer_offset ( ) ,
34+ VtblEntry :: MetadataDropInPlace
35+ | VtblEntry :: Method ( _)
36+ | VtblEntry :: TraitVPtr ( _)
37+ | VtblEntry :: Vacant => dl. pointer_size ( ) ,
38+ }
39+ }
40+
41+ /// Return the [`rustc_abi::Size`] of the data the given entry kind can contain.
42+ pub fn data_size ( & self , ctxt : & impl rustc_abi:: HasDataLayout ) -> rustc_abi:: Size {
43+ ctxt. data_layout ( ) . pointer_offset ( )
44+ }
45+ }
46+
2847impl < ' tcx > fmt:: Debug for VtblEntry < ' tcx > {
2948 fn fmt ( & self , f : & mut fmt:: Formatter < ' _ > ) -> fmt:: Result {
3049 // We want to call `Display` on `Instance` and `PolyTraitRef`,
@@ -85,6 +104,11 @@ pub(super) fn vtable_allocation_provider<'tcx>(
85104 tcx : TyCtxt < ' tcx > ,
86105 key : ( Ty < ' tcx > , Option < ty:: ExistentialTraitRef < ' tcx > > ) ,
87106) -> AllocId {
107+
108+ // FIXME(xdoardo): Figure out to remove the overhead for the computations relative to the sizes
109+ // of the entries happens only for targets that need it (i.e. those where sizeof(usize) !=
110+ // sizeof(ptr)
111+
88112 let ( ty, poly_trait_ref) = key;
89113
90114 let vtable_entries = if let Some ( poly_trait_ref) = poly_trait_ref {
@@ -106,19 +130,25 @@ pub(super) fn vtable_allocation_provider<'tcx>(
106130 let size = layout. size . bytes ( ) ;
107131 let align = layout. align . bytes ( ) ;
108132
109- let ptr_size = tcx. data_layout . pointer_size ( ) ;
110- let ptr_capacity = tcx. data_layout . pointer_offset ( ) ;
133+ let usize_size = tcx. data_layout . pointer_offset ( ) ;
111134 let ptr_align = tcx. data_layout . pointer_align ( ) . abi ;
135+ let size_zero = rustc_abi:: Size :: from_bits ( 0 ) ;
112136
113- let vtable_size = ptr_size * u64:: try_from ( vtable_entries. len ( ) ) . unwrap ( ) ;
137+ let entries_memory_size =
138+ vtable_entries. iter ( ) . map ( |e| e. memory_size ( & tcx) ) . collect :: < Vec < _ > > ( ) ;
139+ let entries_data_size = vtable_entries. iter ( ) . map ( |e| e. data_size ( & tcx) ) . collect :: < Vec < _ > > ( ) ;
140+ let vtable_size = entries_memory_size. iter ( ) . fold ( size_zero, |a, s| a + * s) ;
114141 let mut vtable = Allocation :: new ( vtable_size, ptr_align, AllocInit :: Uninit , ( ) ) ;
142+ let mut field_offset = size_zero;
115143
116- // No need to do any alignment checks on the memory accesses below, because we know the
117- // allocation is correctly aligned as we created it above. Also we're only offsetting by
118- // multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
144+ // We must check that the offsetting in steps of `usizes` does not break the alignment
145+ // requirements of the other entries.
146+ assert ! (
147+ usize_size. bits( ) % ptr_align. bits( ) == 0 || ptr_align. bits( ) % usize_size. bits( ) == 0 ,
148+ "usize_size: {usize_size:?}, ptr_alignment: {ptr_align:?}"
149+ ) ;
119150
120151 for ( idx, entry) in vtable_entries. iter ( ) . enumerate ( ) {
121- let idx: u64 = u64:: try_from ( idx) . unwrap ( ) ;
122152 let scalar = match * entry {
123153 VtblEntry :: MetadataDropInPlace => {
124154 if ty. needs_drop ( tcx, ty:: TypingEnv :: fully_monomorphized ( ) ) {
@@ -130,9 +160,12 @@ pub(super) fn vtable_allocation_provider<'tcx>(
130160 Scalar :: from_maybe_pointer ( Pointer :: null ( ) , & tcx)
131161 }
132162 }
133- VtblEntry :: MetadataSize => Scalar :: from_uint ( size, ptr_capacity) ,
134- VtblEntry :: MetadataAlign => Scalar :: from_uint ( align, ptr_capacity) ,
135- VtblEntry :: Vacant => continue ,
163+ VtblEntry :: MetadataSize => Scalar :: from_uint ( size, usize_size) ,
164+ VtblEntry :: MetadataAlign => Scalar :: from_uint ( align, usize_size) ,
165+ VtblEntry :: Vacant => {
166+ field_offset += entries_memory_size[ idx] ;
167+ continue ;
168+ }
136169 VtblEntry :: Method ( instance) => {
137170 // Prepare the fn ptr we write into the vtable.
138171 let fn_alloc_id = tcx. reserve_and_set_fn_alloc ( instance, CTFE_ALLOC_SALT ) ;
@@ -146,8 +179,13 @@ pub(super) fn vtable_allocation_provider<'tcx>(
146179 Scalar :: from_pointer ( vptr, & tcx)
147180 }
148181 } ;
182+
183+ let current_offset = field_offset;
184+ let field_data_size = entries_data_size[ idx] ;
185+ field_offset += entries_memory_size[ idx] ;
186+
149187 vtable
150- . write_scalar ( & tcx, alloc_range ( ptr_size * idx , ptr_capacity ) , scalar)
188+ . write_scalar ( & tcx, alloc_range ( current_offset , field_data_size ) , scalar)
151189 . expect ( "failed to build vtable representation" ) ;
152190 }
153191
0 commit comments