Skip to content

Commit 379047e

Browse files
committed
rustc: pack size and alignment in vtables
1 parent 02c939e commit 379047e

File tree

5 files changed

+213
-36
lines changed

5 files changed

+213
-36
lines changed

cheri/tests/vtable.rs

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
#![no_std]
2+
3+
extern crate alloc;
4+
extern crate cheriot;
5+
6+
use alloc::string::String;
7+
8+
trait Chair {
9+
fn count_legs(&self) -> u32;
10+
}
11+
12+
struct Monobloc {
13+
dropme: String,
14+
}
15+
16+
impl Chair for Monobloc {
17+
fn count_legs(&self) -> u32 {
18+
4
19+
}
20+
}
21+
22+
struct CuttyStool {}
23+
24+
impl Chair for CuttyStool {
25+
fn count_legs(&self) -> u32 {
26+
3
27+
}
28+
}
29+
30+
struct RockingChair {}
31+
32+
impl Chair for RockingChair {
33+
fn count_legs(&self) -> u32 {
34+
0
35+
}
36+
}
37+
38+
#[no_mangle]
39+
extern "C" fn test_vtable() -> i32 {
40+
fn observe_chair(chair: &dyn Chair) -> u32 {
41+
chair.count_legs()
42+
}
43+
core::hint::black_box({
44+
let chair = Monobloc { dropme: String::new() };
45+
assert_eq!(observe_chair(&chair), 4);
46+
47+
let chair = CuttyStool {};
48+
assert_eq!(observe_chair(&chair), 3);
49+
50+
let chair = RockingChair {};
51+
assert_eq!(observe_chair(&chair), 0);
52+
});
53+
54+
0
55+
}

compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs

Lines changed: 34 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1440,32 +1440,35 @@ fn build_vtable_type_di_node<'ll, 'tcx>(
14401440
) -> &'ll DIType {
14411441
let tcx = cx.tcx;
14421442

1443-
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
1444-
let trait_ref = poly_trait_ref.with_self_ty(tcx, ty);
1445-
let trait_ref = tcx.erase_and_anonymize_regions(trait_ref);
1446-
1447-
tcx.vtable_entries(trait_ref)
1448-
} else {
1449-
TyCtxt::COMMON_VTABLE_ENTRIES
1450-
};
1443+
// FIXME(xdoardo): Figure out to remove the overhead for the computations relative to the sizes
1444+
// of the entries happens only for targets that need it (i.e. those where sizeof(usize) !=
1445+
// sizeof(ptr)
14511446

14521447
// All function pointers are described as opaque pointers. This could be improved in the future
14531448
// by describing them as actual function pointers.
14541449
let void_pointer_ty = Ty::new_imm_ptr(tcx, tcx.types.unit);
14551450
let void_pointer_type_di_node = type_di_node(cx, void_pointer_ty);
14561451
let usize_di_node = type_di_node(cx, tcx.types.usize);
1452+
let usize_layout = cx.layout_of(tcx.types.usize);
14571453
let pointer_layout = cx.layout_of(void_pointer_ty);
1458-
let pointer_size = pointer_layout.size;
14591454
let pointer_align = pointer_layout.align.abi;
1460-
// If `usize` is not pointer-sized and -aligned then the size and alignment computations
1461-
// for the vtable as a whole would be wrong. Let's make sure this holds even on weird
1462-
// platforms.
1463-
assert_eq!(cx.size_and_align_of(tcx.types.usize), (pointer_size, pointer_align));
1455+
let size_zero = Size::from_bits(0);
1456+
1457+
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
1458+
let trait_ref = poly_trait_ref.with_self_ty(tcx, ty);
1459+
let trait_ref = tcx.erase_and_anonymize_regions(trait_ref);
1460+
1461+
tcx.vtable_entries(trait_ref)
1462+
} else {
1463+
TyCtxt::COMMON_VTABLE_ENTRIES
1464+
};
14641465

14651466
let vtable_type_name =
14661467
compute_debuginfo_vtable_name(cx.tcx, ty, poly_trait_ref, VTableNameKind::Type);
14671468
let unique_type_id = UniqueTypeId::for_vtable_ty(tcx, ty, poly_trait_ref);
1468-
let size = pointer_size * vtable_entries.len() as u64;
1469+
let entries_size = vtable_entries.iter().map(|e| e.memory_size(&tcx)).collect::<Vec<_>>();
1470+
let size = entries_size.iter().fold(size_zero, |a, s| a + *s);
1471+
let mut field_offset = size_zero;
14691472

14701473
// This gets mapped to a DW_AT_containing_type attribute which allows GDB to correlate
14711474
// the vtable to the type it is for.
@@ -1488,32 +1491,39 @@ fn build_vtable_type_di_node<'ll, 'tcx>(
14881491
.iter()
14891492
.enumerate()
14901493
.filter_map(|(index, vtable_entry)| {
1491-
let (field_name, field_type_di_node) = match vtable_entry {
1494+
let (field_name, layout, field_type_di_node) = match vtable_entry {
14921495
ty::VtblEntry::MetadataDropInPlace => {
1493-
("drop_in_place".to_string(), void_pointer_type_di_node)
1496+
("drop_in_place".to_string(), pointer_layout, void_pointer_type_di_node)
14941497
}
14951498
ty::VtblEntry::Method(_) => {
14961499
// Note: This code does not try to give a proper name to each method
14971500
// because their might be multiple methods with the same name
14981501
// (coming from different traits).
1499-
(format!("__method{index}"), void_pointer_type_di_node)
1502+
(format!("__method{index}"), pointer_layout, void_pointer_type_di_node)
1503+
}
1504+
ty::VtblEntry::TraitVPtr(_) => (
1505+
format!("__super_trait_ptr{index}"),
1506+
pointer_layout,
1507+
void_pointer_type_di_node,
1508+
),
1509+
ty::VtblEntry::MetadataAlign => {
1510+
("align".to_string(), usize_layout, usize_di_node)
15001511
}
1501-
ty::VtblEntry::TraitVPtr(_) => {
1502-
(format!("__super_trait_ptr{index}"), void_pointer_type_di_node)
1512+
ty::VtblEntry::MetadataSize => {
1513+
("size".to_string(), usize_layout, usize_di_node)
15031514
}
1504-
ty::VtblEntry::MetadataAlign => ("align".to_string(), usize_di_node),
1505-
ty::VtblEntry::MetadataSize => ("size".to_string(), usize_di_node),
15061515
ty::VtblEntry::Vacant => return None,
15071516
};
15081517

1509-
let field_offset = pointer_size * index as u64;
1518+
let current_offset = field_offset;
1519+
field_offset += entries_size[index];
15101520

15111521
Some(build_field_di_node(
15121522
cx,
15131523
vtable_type_di_node,
15141524
&field_name,
1515-
pointer_layout,
1516-
field_offset,
1525+
layout,
1526+
current_offset,
15171527
DIFlags::FlagZero,
15181528
field_type_di_node,
15191529
None,

compiler/rustc_codegen_ssa/src/meth.rs

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,19 @@ impl<'a, 'tcx> VirtualIndex {
2828

2929
let llty = bx.fn_ptr_backend_type(fn_abi);
3030
let ptr_size = bx.data_layout().pointer_size();
31-
let vtable_byte_offset = self.0 * ptr_size.bytes();
31+
let usize_size = bx.data_layout().pointer_offset();
32+
33+
// FIXME(xdoardo) Find a way to remove the overhead for this comparison too?
34+
let vtable_byte_offset = if ptr_size == usize_size {
35+
ptr_size.bytes() * self.0
36+
} else {
37+
match self.0 {
38+
0 => 0,
39+
1 => usize_size.bytes(),
40+
2 => usize_size.bytes() * 2,
41+
n => usize_size.bytes() * 2 + ptr_size.bytes() * (n - 2),
42+
}
43+
};
3244

3345
load_vtable(bx, llvtable, llty, vtable_byte_offset, ty, nonnull)
3446
}

compiler/rustc_middle/src/ty/vtable.rs

Lines changed: 49 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,25 @@ pub enum VtblEntry<'tcx> {
2525
TraitVPtr(TraitRef<'tcx>),
2626
}
2727

28+
impl<'tcx> VtblEntry<'tcx> {
29+
/// Return the [`rustc_abi::Size`] it takes to represent in-memory the given entry kind.
30+
pub fn memory_size(&self, ctxt: &impl rustc_abi::HasDataLayout) -> rustc_abi::Size {
31+
let dl = ctxt.data_layout();
32+
match self {
33+
VtblEntry::MetadataSize | VtblEntry::MetadataAlign => dl.pointer_offset(),
34+
VtblEntry::MetadataDropInPlace
35+
| VtblEntry::Method(_)
36+
| VtblEntry::TraitVPtr(_)
37+
| VtblEntry::Vacant => dl.pointer_size(),
38+
}
39+
}
40+
41+
/// Return the [`rustc_abi::Size`] of the data the given entry kind can contain.
42+
pub fn data_size(&self, ctxt: &impl rustc_abi::HasDataLayout) -> rustc_abi::Size {
43+
ctxt.data_layout().pointer_offset()
44+
}
45+
}
46+
2847
impl<'tcx> fmt::Debug for VtblEntry<'tcx> {
2948
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3049
// We want to call `Display` on `Instance` and `PolyTraitRef`,
@@ -85,6 +104,11 @@ pub(super) fn vtable_allocation_provider<'tcx>(
85104
tcx: TyCtxt<'tcx>,
86105
key: (Ty<'tcx>, Option<ty::ExistentialTraitRef<'tcx>>),
87106
) -> AllocId {
107+
108+
// FIXME(xdoardo): Figure out to remove the overhead for the computations relative to the sizes
109+
// of the entries happens only for targets that need it (i.e. those where sizeof(usize) !=
110+
// sizeof(ptr)
111+
88112
let (ty, poly_trait_ref) = key;
89113

90114
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
@@ -106,19 +130,25 @@ pub(super) fn vtable_allocation_provider<'tcx>(
106130
let size = layout.size.bytes();
107131
let align = layout.align.bytes();
108132

109-
let ptr_size = tcx.data_layout.pointer_size();
110-
let ptr_capacity = tcx.data_layout.pointer_offset();
133+
let usize_size = tcx.data_layout.pointer_offset();
111134
let ptr_align = tcx.data_layout.pointer_align().abi;
135+
let size_zero = rustc_abi::Size::from_bits(0);
112136

113-
let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
137+
let entries_memory_size =
138+
vtable_entries.iter().map(|e| e.memory_size(&tcx)).collect::<Vec<_>>();
139+
let entries_data_size = vtable_entries.iter().map(|e| e.data_size(&tcx)).collect::<Vec<_>>();
140+
let vtable_size = entries_memory_size.iter().fold(size_zero, |a, s| a + *s);
114141
let mut vtable = Allocation::new(vtable_size, ptr_align, AllocInit::Uninit, ());
142+
let mut field_offset = size_zero;
115143

116-
// No need to do any alignment checks on the memory accesses below, because we know the
117-
// allocation is correctly aligned as we created it above. Also we're only offsetting by
118-
// multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
144+
// We must check that the offsetting in steps of `usizes` does not break the alignment
145+
// requirements of the other entries.
146+
assert!(
147+
usize_size.bits() % ptr_align.bits() == 0 || ptr_align.bits() % usize_size.bits() == 0,
148+
"usize_size: {usize_size:?}, ptr_alignment: {ptr_align:?}"
149+
);
119150

120151
for (idx, entry) in vtable_entries.iter().enumerate() {
121-
let idx: u64 = u64::try_from(idx).unwrap();
122152
let scalar = match *entry {
123153
VtblEntry::MetadataDropInPlace => {
124154
if ty.needs_drop(tcx, ty::TypingEnv::fully_monomorphized()) {
@@ -130,9 +160,12 @@ pub(super) fn vtable_allocation_provider<'tcx>(
130160
Scalar::from_maybe_pointer(Pointer::null(), &tcx)
131161
}
132162
}
133-
VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_capacity),
134-
VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_capacity),
135-
VtblEntry::Vacant => continue,
163+
VtblEntry::MetadataSize => Scalar::from_uint(size, usize_size),
164+
VtblEntry::MetadataAlign => Scalar::from_uint(align, usize_size),
165+
VtblEntry::Vacant => {
166+
field_offset += entries_memory_size[idx];
167+
continue;
168+
}
136169
VtblEntry::Method(instance) => {
137170
// Prepare the fn ptr we write into the vtable.
138171
let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT);
@@ -146,8 +179,13 @@ pub(super) fn vtable_allocation_provider<'tcx>(
146179
Scalar::from_pointer(vptr, &tcx)
147180
}
148181
};
182+
183+
let current_offset = field_offset;
184+
let field_data_size = entries_data_size[idx];
185+
field_offset += entries_memory_size[idx];
186+
149187
vtable
150-
.write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_capacity), scalar)
188+
.write_scalar(&tcx, alloc_range(current_offset, field_data_size), scalar)
151189
.expect("failed to build vtable representation");
152190
}
153191

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
//@ revisions: cheriot
2+
//@ [cheriot] compile-flags: --target riscv32cheriot-unknown-cheriotrtos -Cdebuginfo=2 -Copt-level=0 -Csymbol-mangling-version=v0
3+
//@ [cheriot] needs-llvm-components: riscv
4+
5+
#![crate_type = "lib"]
6+
#![no_std]
7+
8+
extern crate alloc;
9+
use alloc::boxed::Box;
10+
11+
// cheriot: @ptable
12+
// Force emission for debuginfo for usize and *const() early..
13+
pub static mut XYZ: Option<(usize, *const ())> = None;
14+
15+
pub struct Foo {
16+
dropme: alloc::string::String,
17+
}
18+
19+
pub trait SomeTrait {
20+
fn method1(&self) -> u32;
21+
fn method2(&self) -> u32;
22+
}
23+
24+
impl SomeTrait for Foo {
25+
fn method1(&self) -> u32 {
26+
1
27+
}
28+
fn method2(&self) -> u32 {
29+
2
30+
}
31+
}
32+
33+
pub trait SomeTraitWithGenerics<T, U> {
34+
fn method1(&self) -> (T, U);
35+
}
36+
37+
impl SomeTraitWithGenerics<u64, i8> for Foo {
38+
fn method1(&self) -> (u64, i8) {
39+
(1, 2)
40+
}
41+
}
42+
43+
pub fn foo(x: &Foo) -> (u32, (u64, i8), &dyn Send) {
44+
let y: &dyn SomeTrait = x;
45+
let z: &dyn SomeTraitWithGenerics<u64, i8> = x;
46+
(y.method1(), z.method1(), x as &dyn Send)
47+
}
48+
49+
// Constructing the debuginfo name for the FnOnce vtable below initially caused an ICE on MSVC
50+
// because the trait type contains a late bound region that needed to be erased before the type
51+
// layout for the niche enum `Option<&dyn Fn()>` could be computed.
52+
pub fn bar() -> Box<dyn FnOnce(Option<&dyn Fn()>)> {
53+
Box::new(|_x: Option<&dyn Fn()>| {})
54+
}
55+
56+
fn generic_closure<T: 'static>(x: T) -> Box<dyn FnOnce() -> T> {
57+
Box::new(move || x)
58+
}
59+
60+
pub fn instantiate_generic_closures() -> (Box<dyn FnOnce() -> u32>, Box<dyn FnOnce() -> bool>) {
61+
(generic_closure(1u32), generic_closure(false))
62+
}

0 commit comments

Comments
 (0)