1use std::fmt;
23use rustc_ast::Mutability;
4use rustc_macros::HashStable;
5use rustc_type_ir::elaborate;
67use crate::mir::interpret::{
8AllocId, AllocInit, Allocation, CTFE_ALLOC_SALT, Pointer, Scalar, alloc_range,
9};
10use crate::ty::{self, Instance, TraitRef, Ty, TyCtxt};
1112#[derive(#[automatically_derived]
impl<'tcx> ::core::clone::Clone for VtblEntry<'tcx> {
#[inline]
fn clone(&self) -> VtblEntry<'tcx> {
let _: ::core::clone::AssertParamIsClone<Instance<'tcx>>;
let _: ::core::clone::AssertParamIsClone<TraitRef<'tcx>>;
*self
}
}Clone, #[automatically_derived]
impl<'tcx> ::core::marker::Copy for VtblEntry<'tcx> { }Copy, #[automatically_derived]
impl<'tcx> ::core::cmp::PartialEq for VtblEntry<'tcx> {
#[inline]
fn eq(&self, other: &VtblEntry<'tcx>) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(VtblEntry::Method(__self_0), VtblEntry::Method(__arg1_0)) =>
__self_0 == __arg1_0,
(VtblEntry::TraitVPtr(__self_0),
VtblEntry::TraitVPtr(__arg1_0)) => __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, const _: () =
{
impl<'tcx, '__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for VtblEntry<'tcx> {
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
VtblEntry::MetadataDropInPlace => {}
VtblEntry::MetadataSize => {}
VtblEntry::MetadataAlign => {}
VtblEntry::Vacant => {}
VtblEntry::Method(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
VtblEntry::TraitVPtr(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable)]
13pub enum VtblEntry<'tcx> {
14/// destructor of this type (used in vtable header)
15MetadataDropInPlace,
16/// layout size of this type (used in vtable header)
17MetadataSize,
18/// layout align of this type (used in vtable header)
19MetadataAlign,
20/// non-dispatchable associated function that is excluded from trait object
21Vacant,
22/// dispatchable associated function
23Method(Instance<'tcx>),
24/// pointer to a separate supertrait vtable, can be used by trait upcasting coercion
25TraitVPtr(TraitRef<'tcx>),
26}
2728impl<'tcx> fmt::Debugfor VtblEntry<'tcx> {
29fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30// We want to call `Display` on `Instance` and `PolyTraitRef`,
31 // so we implement this manually.
32match self {
33 VtblEntry::MetadataDropInPlace => f.write_fmt(format_args!("MetadataDropInPlace"))write!(f, "MetadataDropInPlace"),
34 VtblEntry::MetadataSize => f.write_fmt(format_args!("MetadataSize"))write!(f, "MetadataSize"),
35 VtblEntry::MetadataAlign => f.write_fmt(format_args!("MetadataAlign"))write!(f, "MetadataAlign"),
36 VtblEntry::Vacant => f.write_fmt(format_args!("Vacant"))write!(f, "Vacant"),
37 VtblEntry::Method(instance) => f.write_fmt(format_args!("Method({0})", instance))write!(f, "Method({instance})"),
38 VtblEntry::TraitVPtr(trait_ref) => f.write_fmt(format_args!("TraitVPtr({0})", trait_ref))write!(f, "TraitVPtr({trait_ref})"),
39 }
40 }
41}
4243// Needs to be associated with the `'tcx` lifetime
44impl<'tcx> TyCtxt<'tcx> {
45pub const COMMON_VTABLE_ENTRIES: &'tcx [VtblEntry<'tcx>] =
46&[VtblEntry::MetadataDropInPlace, VtblEntry::MetadataSize, VtblEntry::MetadataAlign];
47}
4849pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
50pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
51pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;
5253// Note that we don't have access to a self type here, this has to be purely based on the trait (and
54// supertrait) definitions. That means we can't call into the same vtable_entries code since that
55// returns a specific instantiation (e.g., with Vacant slots when bounds aren't satisfied). The goal
56// here is to do a best-effort approximation without duplicating a lot of code.
57//
58// This function is used in layout computation for e.g. &dyn Trait, so it's critical that this
59// function is an accurate approximation. We verify this when actually computing the vtable below.
60pub(crate) fn vtable_min_entries<'tcx>(
61 tcx: TyCtxt<'tcx>,
62 trait_ref: Option<ty::ExistentialTraitRef<'tcx>>,
63) -> usize {
64let mut count = TyCtxt::COMMON_VTABLE_ENTRIES.len();
65let Some(trait_ref) = trait_refelse {
66return count;
67 };
6869// This includes self in supertraits.
70for def_id in elaborate::supertrait_def_ids(tcx, trait_ref.def_id) {
71 count += tcx.own_existential_vtable_entries(def_id).len();
72 }
7374count75}
7677/// Retrieves an allocation that represents the contents of a vtable.
78/// Since this is a query, allocations are cached and not duplicated.
79///
80/// This is an "internal" `AllocId` that should never be used as a value in the interpreted program.
81/// The interpreter should use `AllocId` that refer to a `GlobalAlloc::VTable` instead.
82/// (This is similar to statics, which also have a similar "internal" `AllocId` storing their
83/// initial contents.)
84pub(super) fn vtable_allocation_provider<'tcx>(
85 tcx: TyCtxt<'tcx>,
86 key: (Ty<'tcx>, Option<ty::ExistentialTraitRef<'tcx>>),
87) -> AllocId {
88let (ty, poly_trait_ref) = key;
8990let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
91let trait_ref = poly_trait_ref.with_self_ty(tcx, ty);
92let trait_ref = tcx.erase_and_anonymize_regions(trait_ref);
9394tcx.vtable_entries(trait_ref)
95 } else {
96TyCtxt::COMMON_VTABLE_ENTRIES97 };
9899// This confirms that the layout computation for &dyn Trait has an accurate sizing.
100if !(vtable_entries.len() >= vtable_min_entries(tcx, poly_trait_ref)) {
::core::panicking::panic("assertion failed: vtable_entries.len() >= vtable_min_entries(tcx, poly_trait_ref)")
};assert!(vtable_entries.len() >= vtable_min_entries(tcx, poly_trait_ref));
101102let layout = tcx103 .layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(ty))
104 .expect("failed to build vtable representation");
105if !layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("can\'t create a vtable for an unsized type"));
}
};assert!(layout.is_sized(), "can't create a vtable for an unsized type");
106let size = layout.size.bytes();
107let align = layout.align.bytes();
108109let ptr_size = tcx.data_layout.pointer_size();
110let ptr_align = tcx.data_layout.pointer_align().abi;
111112let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
113let mut vtable = Allocation::new(vtable_size, ptr_align, AllocInit::Uninit, ());
114115// No need to do any alignment checks on the memory accesses below, because we know the
116 // allocation is correctly aligned as we created it above. Also we're only offsetting by
117 // multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
118119for (idx, entry) in vtable_entries.iter().enumerate() {
120let idx: u64 = u64::try_from(idx).unwrap();
121let scalar = match *entry {
122 VtblEntry::MetadataDropInPlace => {
123if ty.needs_drop(tcx, ty::TypingEnv::fully_monomorphized()) {
124let instance = ty::Instance::resolve_drop_in_place(tcx, ty);
125let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT);
126let fn_ptr = Pointer::from(fn_alloc_id);
127 Scalar::from_pointer(fn_ptr, &tcx)
128 } else {
129 Scalar::from_maybe_pointer(Pointer::null(), &tcx)
130 }
131 }
132 VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size),
133 VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size),
134 VtblEntry::Vacant => continue,
135 VtblEntry::Method(instance) => {
136// Prepare the fn ptr we write into the vtable.
137let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT);
138let fn_ptr = Pointer::from(fn_alloc_id);
139 Scalar::from_pointer(fn_ptr, &tcx)
140 }
141 VtblEntry::TraitVPtr(trait_ref) => {
142let super_trait_ref = ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref);
143let supertrait_alloc_id = tcx.vtable_allocation((ty, Some(super_trait_ref)));
144let vptr = Pointer::from(supertrait_alloc_id);
145 Scalar::from_pointer(vptr, &tcx)
146 }
147 };
148 vtable
149 .write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_size), scalar)
150 .expect("failed to build vtable representation");
151 }
152153vtable.mutability = Mutability::Not;
154tcx.reserve_and_set_memory_alloc(tcx.mk_const_alloc(vtable))
155}