rustc_middle/mir/interpret/
mod.rs1#[macro_use]
4mod error;
5
6mod allocation;
7mod pointer;
8mod queries;
9mod value;
10
11use std::io::{Read, Write};
12use std::num::NonZero;
13use std::{fmt, io};
14
15use rustc_abi::{AddressSpace, Align, Endian, HasDataLayout, Size};
16use rustc_ast::{LitKind, Mutability};
17use rustc_data_structures::fx::FxHashMap;
18use rustc_data_structures::sharded::ShardedHashMap;
19use rustc_data_structures::sync::{AtomicU64, Lock};
20use rustc_hir::def::DefKind;
21use rustc_hir::def_id::{DefId, LocalDefId};
22use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
23use rustc_serialize::{Decodable, Encodable};
24use tracing::{debug, trace};
25pub use {
27 err_exhaust, err_inval, err_machine_stop, err_ub, err_ub_custom, err_ub_format, err_unsup,
28 err_unsup_format, throw_exhaust, throw_inval, throw_machine_stop, throw_ub, throw_ub_custom,
29 throw_ub_format, throw_unsup, throw_unsup_format,
30};
31
32pub use self::allocation::{
33 AllocBytes, AllocError, AllocInit, AllocRange, AllocResult, Allocation, ConstAllocation,
34 InitChunk, InitChunkIter, alloc_range,
35};
36pub use self::error::{
37 BadBytesAccess, CheckAlignMsg, CheckInAllocMsg, ErrorHandled, EvalStaticInitializerRawResult,
38 EvalToAllocationRawResult, EvalToConstValueResult, EvalToValTreeResult, ExpectedKind,
39 InterpErrorInfo, InterpErrorKind, InterpResult, InvalidMetaKind, InvalidProgramInfo,
40 MachineStopType, Misalignment, PointerKind, ReportedErrorInfo, ResourceExhaustionInfo,
41 ScalarSizeMismatch, UndefinedBehaviorInfo, UnsupportedOpInfo, ValidationErrorInfo,
42 ValidationErrorKind, interp_ok,
43};
44pub use self::pointer::{CtfeProvenance, Pointer, PointerArithmetic, Provenance};
45pub use self::value::Scalar;
46use crate::mir;
47use crate::ty::codec::{TyDecoder, TyEncoder};
48use crate::ty::print::with_no_trimmed_paths;
49use crate::ty::{self, Instance, Ty, TyCtxt};
50
51#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, TyEncodable, TyDecodable)]
55#[derive(HashStable, TypeFoldable, TypeVisitable)]
56pub struct GlobalId<'tcx> {
57 pub instance: ty::Instance<'tcx>,
60
61 pub promoted: Option<mir::Promoted>,
63}
64
65impl<'tcx> GlobalId<'tcx> {
66 pub fn display(self, tcx: TyCtxt<'tcx>) -> String {
67 let instance_name = with_no_trimmed_paths!(tcx.def_path_str(self.instance.def.def_id()));
68 if let Some(promoted) = self.promoted {
69 format!("{instance_name}::{promoted:?}")
70 } else {
71 instance_name
72 }
73 }
74}
75
76#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, HashStable)]
78pub struct LitToConstInput<'tcx> {
79 pub lit: &'tcx LitKind,
81 pub ty: Ty<'tcx>,
83 pub neg: bool,
85}
86
87#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
88pub struct AllocId(pub NonZero<u64>);
89
90impl fmt::Debug for AllocId {
93 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
94 if f.alternate() { write!(f, "a{}", self.0) } else { write!(f, "alloc{}", self.0) }
95 }
96}
97
98#[derive(TyDecodable, TyEncodable)]
101enum AllocDiscriminant {
102 Alloc,
103 Fn,
104 VTable,
105 Static,
106}
107
108pub fn specialized_encode_alloc_id<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>>(
109 encoder: &mut E,
110 tcx: TyCtxt<'tcx>,
111 alloc_id: AllocId,
112) {
113 match tcx.global_alloc(alloc_id) {
114 GlobalAlloc::Memory(alloc) => {
115 trace!("encoding {:?} with {:#?}", alloc_id, alloc);
116 AllocDiscriminant::Alloc.encode(encoder);
117 alloc.encode(encoder);
118 }
119 GlobalAlloc::Function { instance } => {
120 trace!("encoding {:?} with {:#?}", alloc_id, instance);
121 AllocDiscriminant::Fn.encode(encoder);
122 instance.encode(encoder);
123 }
124 GlobalAlloc::VTable(ty, poly_trait_ref) => {
125 trace!("encoding {:?} with {ty:#?}, {poly_trait_ref:#?}", alloc_id);
126 AllocDiscriminant::VTable.encode(encoder);
127 ty.encode(encoder);
128 poly_trait_ref.encode(encoder);
129 }
130 GlobalAlloc::Static(did) => {
131 assert!(!tcx.is_thread_local_static(did));
132 AllocDiscriminant::Static.encode(encoder);
135 Encodable::<E>::encode(&did, encoder);
138 }
139 }
140}
141
142#[derive(Clone)]
143enum State {
144 Empty,
145 Done(AllocId),
146}
147
148pub struct AllocDecodingState {
149 decoding_state: Vec<Lock<State>>,
151 data_offsets: Vec<u64>,
153}
154
155impl AllocDecodingState {
156 #[inline]
157 pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
158 AllocDecodingSession { state: self }
159 }
160
161 pub fn new(data_offsets: Vec<u64>) -> Self {
162 let decoding_state =
163 std::iter::repeat_with(|| Lock::new(State::Empty)).take(data_offsets.len()).collect();
164
165 Self { decoding_state, data_offsets }
166 }
167}
168
169#[derive(Copy, Clone)]
170pub struct AllocDecodingSession<'s> {
171 state: &'s AllocDecodingState,
172}
173
174impl<'s> AllocDecodingSession<'s> {
175 pub fn decode_alloc_id<'tcx, D>(&self, decoder: &mut D) -> AllocId
177 where
178 D: TyDecoder<I = TyCtxt<'tcx>>,
179 {
180 let idx = usize::try_from(decoder.read_u32()).unwrap();
182 let pos = usize::try_from(self.state.data_offsets[idx]).unwrap();
183
184 let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
187 let alloc_kind = AllocDiscriminant::decode(decoder);
188 (alloc_kind, decoder.position())
189 });
190
191 let mut entry = self.state.decoding_state[idx].lock();
204 if let State::Done(alloc_id) = *entry {
207 return alloc_id;
208 }
209
210 let alloc_id = decoder.with_position(pos, |decoder| match alloc_kind {
212 AllocDiscriminant::Alloc => {
213 trace!("creating memory alloc ID");
214 let alloc = <ConstAllocation<'tcx> as Decodable<_>>::decode(decoder);
215 trace!("decoded alloc {:?}", alloc);
216 decoder.interner().reserve_and_set_memory_alloc(alloc)
217 }
218 AllocDiscriminant::Fn => {
219 trace!("creating fn alloc ID");
220 let instance = ty::Instance::decode(decoder);
221 trace!("decoded fn alloc instance: {:?}", instance);
222 decoder.interner().reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT)
223 }
224 AllocDiscriminant::VTable => {
225 trace!("creating vtable alloc ID");
226 let ty = Decodable::decode(decoder);
227 let poly_trait_ref = Decodable::decode(decoder);
228 trace!("decoded vtable alloc instance: {ty:?}, {poly_trait_ref:?}");
229 decoder.interner().reserve_and_set_vtable_alloc(ty, poly_trait_ref, CTFE_ALLOC_SALT)
230 }
231 AllocDiscriminant::Static => {
232 trace!("creating extern static alloc ID");
233 let did = <DefId as Decodable<D>>::decode(decoder);
234 trace!("decoded static def-ID: {:?}", did);
235 decoder.interner().reserve_and_set_static_alloc(did)
236 }
237 });
238
239 *entry = State::Done(alloc_id);
240
241 alloc_id
242 }
243}
244
245#[derive(Debug, Clone, Eq, PartialEq, Hash, TyDecodable, TyEncodable, HashStable)]
248pub enum GlobalAlloc<'tcx> {
249 Function { instance: Instance<'tcx> },
251 VTable(Ty<'tcx>, &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>),
256 Static(DefId),
259 Memory(ConstAllocation<'tcx>),
261}
262
263impl<'tcx> GlobalAlloc<'tcx> {
264 #[track_caller]
266 #[inline]
267 pub fn unwrap_memory(&self) -> ConstAllocation<'tcx> {
268 match *self {
269 GlobalAlloc::Memory(mem) => mem,
270 _ => bug!("expected memory, got {:?}", self),
271 }
272 }
273
274 #[track_caller]
276 #[inline]
277 pub fn unwrap_fn(&self) -> Instance<'tcx> {
278 match *self {
279 GlobalAlloc::Function { instance, .. } => instance,
280 _ => bug!("expected function, got {:?}", self),
281 }
282 }
283
284 #[track_caller]
286 #[inline]
287 pub fn unwrap_vtable(&self) -> (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>) {
288 match *self {
289 GlobalAlloc::VTable(ty, dyn_ty) => (ty, dyn_ty.principal()),
290 _ => bug!("expected vtable, got {:?}", self),
291 }
292 }
293
294 #[inline]
296 pub fn address_space(&self, cx: &impl HasDataLayout) -> AddressSpace {
297 match self {
298 GlobalAlloc::Function { .. } => cx.data_layout().instruction_address_space,
299 GlobalAlloc::Static(..) | GlobalAlloc::Memory(..) | GlobalAlloc::VTable(..) => {
300 AddressSpace::DATA
301 }
302 }
303 }
304
305 pub fn mutability(&self, tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> Mutability {
306 match self {
308 GlobalAlloc::Static(did) => {
309 let DefKind::Static { safety: _, mutability, nested } = tcx.def_kind(did) else {
310 bug!()
311 };
312 if nested {
313 if cfg!(debug_assertions) {
316 let alloc = tcx.eval_static_initializer(did).unwrap();
317 assert_eq!(alloc.0.mutability, mutability);
318 }
319 mutability
320 } else {
321 let mutability = match mutability {
322 Mutability::Not
323 if !tcx
324 .type_of(did)
325 .no_bound_vars()
326 .expect("statics should not have generic parameters")
327 .is_freeze(tcx, typing_env) =>
328 {
329 Mutability::Mut
330 }
331 _ => mutability,
332 };
333 mutability
334 }
335 }
336 GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
337 GlobalAlloc::Function { .. } | GlobalAlloc::VTable(..) => {
338 Mutability::Not
340 }
341 }
342 }
343
344 pub fn size_and_align(
345 &self,
346 tcx: TyCtxt<'tcx>,
347 typing_env: ty::TypingEnv<'tcx>,
348 ) -> (Size, Align) {
349 match self {
350 GlobalAlloc::Static(def_id) => {
351 let DefKind::Static { nested, .. } = tcx.def_kind(def_id) else {
352 bug!("GlobalAlloc::Static is not a static")
353 };
354
355 if nested {
356 let alloc = tcx.eval_static_initializer(def_id).unwrap();
361 (alloc.0.size(), alloc.0.align)
362 } else {
363 let ty = tcx
368 .type_of(def_id)
369 .no_bound_vars()
370 .expect("statics should not have generic parameters");
371 let layout = tcx.layout_of(typing_env.as_query_input(ty)).unwrap();
372 assert!(layout.is_sized());
373 (layout.size, layout.align.abi)
374 }
375 }
376 GlobalAlloc::Memory(alloc) => {
377 let alloc = alloc.inner();
378 (alloc.size(), alloc.align)
379 }
380 GlobalAlloc::Function { .. } => (Size::ZERO, Align::ONE),
381 GlobalAlloc::VTable(..) => {
382 return (Size::ZERO, tcx.data_layout.pointer_align.abi);
384 }
385 }
386 }
387}
388
389pub const CTFE_ALLOC_SALT: usize = 0;
390
391pub(crate) struct AllocMap<'tcx> {
392 to_alloc: ShardedHashMap<AllocId, GlobalAlloc<'tcx>>,
400
401 dedup: Lock<FxHashMap<(GlobalAlloc<'tcx>, usize), AllocId>>,
406
407 next_id: AtomicU64,
410}
411
412impl<'tcx> AllocMap<'tcx> {
413 pub(crate) fn new() -> Self {
414 AllocMap {
415 to_alloc: Default::default(),
416 dedup: Default::default(),
417 next_id: AtomicU64::new(1),
418 }
419 }
420 fn reserve(&self) -> AllocId {
421 let next_id = self.next_id.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
425 AllocId(NonZero::new(next_id).unwrap())
426 }
427}
428
429impl<'tcx> TyCtxt<'tcx> {
430 pub fn reserve_alloc_id(self) -> AllocId {
436 self.alloc_map.reserve()
437 }
438
439 fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>, salt: usize) -> AllocId {
442 if let GlobalAlloc::Memory(mem) = alloc {
443 if mem.inner().mutability.is_mut() {
444 bug!("trying to dedup-reserve mutable memory");
445 }
446 }
447 let alloc_salt = (alloc, salt);
448 let mut dedup = self.alloc_map.dedup.lock();
450 if let Some(&alloc_id) = dedup.get(&alloc_salt) {
451 return alloc_id;
452 }
453 let id = self.alloc_map.reserve();
454 debug!("creating alloc {:?} with id {id:?}", alloc_salt.0);
455 let had_previous = self
456 .alloc_map
457 .to_alloc
458 .lock_shard_by_value(&id)
459 .insert(id, alloc_salt.0.clone())
460 .is_some();
461 assert!(!had_previous);
463 dedup.insert(alloc_salt, id);
464 id
465 }
466
467 pub fn reserve_and_set_memory_dedup(self, mem: ConstAllocation<'tcx>, salt: usize) -> AllocId {
470 self.reserve_and_set_dedup(GlobalAlloc::Memory(mem), salt)
471 }
472
473 pub fn reserve_and_set_static_alloc(self, static_id: DefId) -> AllocId {
476 let salt = 0; self.reserve_and_set_dedup(GlobalAlloc::Static(static_id), salt)
478 }
479
480 pub fn reserve_and_set_fn_alloc(self, instance: Instance<'tcx>, salt: usize) -> AllocId {
482 self.reserve_and_set_dedup(GlobalAlloc::Function { instance }, salt)
483 }
484
485 pub fn reserve_and_set_vtable_alloc(
487 self,
488 ty: Ty<'tcx>,
489 dyn_ty: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
490 salt: usize,
491 ) -> AllocId {
492 self.reserve_and_set_dedup(GlobalAlloc::VTable(ty, dyn_ty), salt)
493 }
494
495 pub fn reserve_and_set_memory_alloc(self, mem: ConstAllocation<'tcx>) -> AllocId {
501 let id = self.reserve_alloc_id();
502 self.set_alloc_id_memory(id, mem);
503 id
504 }
505
506 #[inline]
512 pub fn try_get_global_alloc(self, id: AllocId) -> Option<GlobalAlloc<'tcx>> {
513 self.alloc_map.to_alloc.lock_shard_by_value(&id).get(&id).cloned()
514 }
515
516 #[inline]
517 #[track_caller]
518 pub fn global_alloc(self, id: AllocId) -> GlobalAlloc<'tcx> {
523 match self.try_get_global_alloc(id) {
524 Some(alloc) => alloc,
525 None => bug!("could not find allocation for {id:?}"),
526 }
527 }
528
529 pub fn set_alloc_id_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) {
532 if let Some(old) =
533 self.alloc_map.to_alloc.lock_shard_by_value(&id).insert(id, GlobalAlloc::Memory(mem))
534 {
535 bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
536 }
537 }
538
539 pub fn set_nested_alloc_id_static(self, id: AllocId, def_id: LocalDefId) {
542 if let Some(old) = self
543 .alloc_map
544 .to_alloc
545 .lock_shard_by_value(&id)
546 .insert(id, GlobalAlloc::Static(def_id.to_def_id()))
547 {
548 bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
549 }
550 }
551}
552
553#[inline]
558pub fn write_target_uint(
559 endianness: Endian,
560 mut target: &mut [u8],
561 data: u128,
562) -> Result<(), io::Error> {
563 match endianness {
566 Endian::Little => target.write(&data.to_le_bytes())?,
567 Endian::Big => target.write(&data.to_be_bytes()[16 - target.len()..])?,
568 };
569 debug_assert!(target.len() == 0); Ok(())
571}
572
573#[inline]
574pub fn read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, io::Error> {
575 let mut buf = [0u8; std::mem::size_of::<u128>()];
577 let uint = match endianness {
579 Endian::Little => {
580 source.read_exact(&mut buf[..source.len()])?;
581 Ok(u128::from_le_bytes(buf))
582 }
583 Endian::Big => {
584 source.read_exact(&mut buf[16 - source.len()..])?;
585 Ok(u128::from_be_bytes(buf))
586 }
587 };
588 debug_assert!(source.len() == 0); uint
590}