1use std::any::Any;
5use std::borrow::Cow;
6use std::cell::{Cell, RefCell};
7use std::collections::hash_map::Entry;
8use std::path::Path;
9use std::{fmt, process};
10
11use rand::rngs::StdRng;
12use rand::{Rng, SeedableRng};
13use rustc_abi::{Align, ExternAbi, Size};
14use rustc_apfloat::{Float, FloatConvert};
15use rustc_attr_parsing::InlineAttr;
16use rustc_data_structures::fx::{FxHashMap, FxHashSet};
17#[allow(unused)]
18use rustc_data_structures::static_assert_size;
19use rustc_middle::mir;
20use rustc_middle::query::TyCtxtAt;
21use rustc_middle::ty::layout::{
22 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
23};
24use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
25use rustc_session::config::InliningThreshold;
26use rustc_span::def_id::{CrateNum, DefId};
27use rustc_span::{Span, SpanData, Symbol};
28use rustc_target::callconv::FnAbi;
29
30use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
31use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
32use crate::concurrency::weak_memory;
33use crate::*;
34
35pub const SIGRTMIN: i32 = 34;
39
40pub const SIGRTMAX: i32 = 42;
44
45const ADDRS_PER_ANON_GLOBAL: usize = 32;
49
50pub struct FrameExtra<'tcx> {
52 pub borrow_tracker: Option<borrow_tracker::FrameState>,
54
55 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
59
60 pub timing: Option<measureme::DetachedTiming>,
64
65 pub is_user_relevant: bool,
70
71 salt: usize,
76
77 pub data_race: Option<data_race::FrameState>,
79}
80
81impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
82 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
83 let FrameExtra {
85 borrow_tracker,
86 catch_unwind,
87 timing: _,
88 is_user_relevant,
89 salt,
90 data_race,
91 } = self;
92 f.debug_struct("FrameData")
93 .field("borrow_tracker", borrow_tracker)
94 .field("catch_unwind", catch_unwind)
95 .field("is_user_relevant", is_user_relevant)
96 .field("salt", salt)
97 .field("data_race", data_race)
98 .finish()
99 }
100}
101
102impl VisitProvenance for FrameExtra<'_> {
103 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
104 let FrameExtra {
105 catch_unwind,
106 borrow_tracker,
107 timing: _,
108 is_user_relevant: _,
109 salt: _,
110 data_race: _,
111 } = self;
112
113 catch_unwind.visit_provenance(visit);
114 borrow_tracker.visit_provenance(visit);
115 }
116}
117
118#[derive(Debug, Copy, Clone, PartialEq, Eq)]
120pub enum MiriMemoryKind {
121 Rust,
123 Miri,
125 C,
127 WinHeap,
129 WinLocal,
131 Machine,
134 Runtime,
137 Global,
140 ExternStatic,
143 Tls,
146 Mmap,
148}
149
150impl From<MiriMemoryKind> for MemoryKind {
151 #[inline(always)]
152 fn from(kind: MiriMemoryKind) -> MemoryKind {
153 MemoryKind::Machine(kind)
154 }
155}
156
157impl MayLeak for MiriMemoryKind {
158 #[inline(always)]
159 fn may_leak(self) -> bool {
160 use self::MiriMemoryKind::*;
161 match self {
162 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
163 Machine | Global | ExternStatic | Tls | Mmap => true,
164 }
165 }
166}
167
168impl MiriMemoryKind {
169 fn should_save_allocation_span(self) -> bool {
171 use self::MiriMemoryKind::*;
172 match self {
173 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
175 Machine | Global | ExternStatic | Tls | Runtime => false,
177 }
178 }
179}
180
181impl fmt::Display for MiriMemoryKind {
182 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
183 use self::MiriMemoryKind::*;
184 match self {
185 Rust => write!(f, "Rust heap"),
186 Miri => write!(f, "Miri bare-metal heap"),
187 C => write!(f, "C heap"),
188 WinHeap => write!(f, "Windows heap"),
189 WinLocal => write!(f, "Windows local memory"),
190 Machine => write!(f, "machine-managed memory"),
191 Runtime => write!(f, "language runtime memory"),
192 Global => write!(f, "global (static or const)"),
193 ExternStatic => write!(f, "extern static"),
194 Tls => write!(f, "thread-local static"),
195 Mmap => write!(f, "mmap"),
196 }
197 }
198}
199
200pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
201
202#[derive(Clone, Copy, PartialEq, Eq, Hash)]
208pub enum Provenance {
209 Concrete {
212 alloc_id: AllocId,
213 tag: BorTag,
215 },
216 Wildcard,
233}
234
235#[derive(Copy, Clone, PartialEq)]
237pub enum ProvenanceExtra {
238 Concrete(BorTag),
239 Wildcard,
240}
241
242#[cfg(target_pointer_width = "64")]
243static_assert_size!(StrictPointer, 24);
244#[cfg(target_pointer_width = "64")]
248static_assert_size!(Scalar, 32);
249
250impl fmt::Debug for Provenance {
251 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
252 match self {
253 Provenance::Concrete { alloc_id, tag } => {
254 if f.alternate() {
256 write!(f, "[{alloc_id:#?}]")?;
257 } else {
258 write!(f, "[{alloc_id:?}]")?;
259 }
260 write!(f, "{tag:?}")?;
262 }
263 Provenance::Wildcard => {
264 write!(f, "[wildcard]")?;
265 }
266 }
267 Ok(())
268 }
269}
270
271impl interpret::Provenance for Provenance {
272 const OFFSET_IS_ADDR: bool = true;
274
275 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
277
278 fn get_alloc_id(self) -> Option<AllocId> {
279 match self {
280 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
281 Provenance::Wildcard => None,
282 }
283 }
284
285 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
286 let (prov, addr) = ptr.into_parts(); write!(f, "{:#x}", addr.bytes())?;
288 if f.alternate() {
289 write!(f, "{prov:#?}")?;
290 } else {
291 write!(f, "{prov:?}")?;
292 }
293 Ok(())
294 }
295
296 fn join(left: Option<Self>, right: Option<Self>) -> Option<Self> {
297 match (left, right) {
298 (
300 Some(Provenance::Concrete { alloc_id: left_alloc, tag: left_tag }),
301 Some(Provenance::Concrete { alloc_id: right_alloc, tag: right_tag }),
302 ) if left_alloc == right_alloc && left_tag == right_tag => left,
303 (Some(Provenance::Wildcard), o) | (o, Some(Provenance::Wildcard)) => o,
306 _ => None,
308 }
309 }
310}
311
312impl fmt::Debug for ProvenanceExtra {
313 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
314 match self {
315 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
316 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
317 }
318 }
319}
320
321impl ProvenanceExtra {
322 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
323 match self {
324 ProvenanceExtra::Concrete(pid) => f(pid),
325 ProvenanceExtra::Wildcard => None,
326 }
327 }
328}
329
330#[derive(Debug)]
332pub struct AllocExtra<'tcx> {
333 pub borrow_tracker: Option<borrow_tracker::AllocState>,
335 pub data_race: Option<data_race::AllocState>,
338 pub weak_memory: Option<weak_memory::AllocState>,
341 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
346 pub sync: FxHashMap<Size, Box<dyn Any>>,
351}
352
353impl<'tcx> Clone for AllocExtra<'tcx> {
356 fn clone(&self) -> Self {
357 panic!("our allocations should never be cloned");
358 }
359}
360
361impl VisitProvenance for AllocExtra<'_> {
362 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
363 let AllocExtra { borrow_tracker, data_race, weak_memory, backtrace: _, sync: _ } = self;
364
365 borrow_tracker.visit_provenance(visit);
366 data_race.visit_provenance(visit);
367 weak_memory.visit_provenance(visit);
368 }
369}
370
371pub struct PrimitiveLayouts<'tcx> {
373 pub unit: TyAndLayout<'tcx>,
374 pub i8: TyAndLayout<'tcx>,
375 pub i16: TyAndLayout<'tcx>,
376 pub i32: TyAndLayout<'tcx>,
377 pub i64: TyAndLayout<'tcx>,
378 pub i128: TyAndLayout<'tcx>,
379 pub isize: TyAndLayout<'tcx>,
380 pub u8: TyAndLayout<'tcx>,
381 pub u16: TyAndLayout<'tcx>,
382 pub u32: TyAndLayout<'tcx>,
383 pub u64: TyAndLayout<'tcx>,
384 pub u128: TyAndLayout<'tcx>,
385 pub usize: TyAndLayout<'tcx>,
386 pub bool: TyAndLayout<'tcx>,
387 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
390
391impl<'tcx> PrimitiveLayouts<'tcx> {
392 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
393 let tcx = layout_cx.tcx();
394 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
395 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
396 Ok(Self {
397 unit: layout_cx.layout_of(tcx.types.unit)?,
398 i8: layout_cx.layout_of(tcx.types.i8)?,
399 i16: layout_cx.layout_of(tcx.types.i16)?,
400 i32: layout_cx.layout_of(tcx.types.i32)?,
401 i64: layout_cx.layout_of(tcx.types.i64)?,
402 i128: layout_cx.layout_of(tcx.types.i128)?,
403 isize: layout_cx.layout_of(tcx.types.isize)?,
404 u8: layout_cx.layout_of(tcx.types.u8)?,
405 u16: layout_cx.layout_of(tcx.types.u16)?,
406 u32: layout_cx.layout_of(tcx.types.u32)?,
407 u64: layout_cx.layout_of(tcx.types.u64)?,
408 u128: layout_cx.layout_of(tcx.types.u128)?,
409 usize: layout_cx.layout_of(tcx.types.usize)?,
410 bool: layout_cx.layout_of(tcx.types.bool)?,
411 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
412 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
413 })
414 }
415
416 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
417 match size.bits() {
418 8 => Some(self.u8),
419 16 => Some(self.u16),
420 32 => Some(self.u32),
421 64 => Some(self.u64),
422 128 => Some(self.u128),
423 _ => None,
424 }
425 }
426
427 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
428 match size.bits() {
429 8 => Some(self.i8),
430 16 => Some(self.i16),
431 32 => Some(self.i32),
432 64 => Some(self.i64),
433 128 => Some(self.i128),
434 _ => None,
435 }
436 }
437}
438
439pub struct MiriMachine<'tcx> {
444 pub tcx: TyCtxt<'tcx>,
446
447 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
449
450 pub data_race: Option<data_race::GlobalState>,
452
453 pub alloc_addresses: alloc_addresses::GlobalState,
455
456 pub(crate) env_vars: EnvVars<'tcx>,
458
459 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
461
462 pub(crate) argc: Option<Pointer>,
466 pub(crate) argv: Option<Pointer>,
467 pub(crate) cmd_line: Option<Pointer>,
468
469 pub(crate) tls: TlsData<'tcx>,
471
472 pub(crate) isolated_op: IsolatedOp,
476
477 pub(crate) validation: ValidationMode,
479
480 pub(crate) fds: shims::FdTable,
482 pub(crate) dirs: shims::DirTable,
484
485 pub(crate) epoll_interests: shims::EpollInterestTable,
487
488 pub(crate) clock: Clock,
490
491 pub(crate) threads: ThreadManager<'tcx>,
493
494 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
498
499 pub(crate) sync: SynchronizationObjects,
501
502 pub(crate) layouts: PrimitiveLayouts<'tcx>,
504
505 pub(crate) static_roots: Vec<AllocId>,
507
508 profiler: Option<measureme::Profiler>,
511 string_cache: FxHashMap<String, measureme::StringId>,
514
515 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
518
519 pub(crate) backtrace_style: BacktraceStyle,
521
522 pub(crate) local_crates: Vec<CrateNum>,
524
525 extern_statics: FxHashMap<Symbol, StrictPointer>,
527
528 pub(crate) rng: RefCell<StdRng>,
531
532 tracked_alloc_ids: FxHashSet<AllocId>,
535 track_alloc_accesses: bool,
537
538 pub(crate) check_alignment: AlignmentCheck,
540
541 pub(crate) cmpxchg_weak_failure_rate: f64,
543
544 pub(crate) mute_stdout_stderr: bool,
546
547 pub(crate) weak_memory: bool,
549
550 pub(crate) preemption_rate: f64,
552
553 pub(crate) report_progress: Option<u32>,
555 pub(crate) basic_block_count: u64,
557
558 #[cfg(unix)]
560 pub native_lib: Option<(libloading::Library, std::path::PathBuf)>,
561 #[cfg(not(unix))]
562 pub native_lib: Option<!>,
563
564 pub(crate) gc_interval: u32,
566 pub(crate) since_gc: u32,
568
569 pub(crate) num_cpus: u32,
571
572 pub(crate) page_size: u64,
574 pub(crate) stack_addr: u64,
575 pub(crate) stack_size: u64,
576
577 pub(crate) collect_leak_backtraces: bool,
579
580 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
583
584 const_cache: RefCell<FxHashMap<(mir::Const<'tcx>, usize), OpTy<'tcx>>>,
588
589 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
596
597 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
599
600 pub(crate) pthread_mutex_sanity: Cell<bool>,
602 pub(crate) pthread_rwlock_sanity: Cell<bool>,
603 pub(crate) pthread_condvar_sanity: Cell<bool>,
604
605 pub(crate) sb_extern_type_warned: Cell<bool>,
607 #[cfg(unix)]
609 pub(crate) native_call_mem_warned: Cell<bool>,
610 pub(crate) reject_in_isolation_warned: RefCell<FxHashSet<String>>,
612 pub(crate) int2ptr_warned: RefCell<FxHashSet<Span>>,
614
615 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
617}
618
619impl<'tcx> MiriMachine<'tcx> {
620 pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx>) -> Self {
621 let tcx = layout_cx.tcx();
622 let local_crates = helpers::get_local_crates(tcx);
623 let layouts =
624 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
625 let profiler = config.measureme_out.as_ref().map(|out| {
626 let crate_name =
627 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
628 let pid = process::id();
629 let filename = format!("{crate_name}-{pid:07}");
634 let path = Path::new(out).join(filename);
635 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
636 });
637 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
638 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
639 let data_race = config.data_race_detector.then(|| data_race::GlobalState::new(config));
640 let page_size = if let Some(page_size) = config.page_size {
644 page_size
645 } else {
646 let target = &tcx.sess.target;
647 match target.arch.as_ref() {
648 "wasm32" | "wasm64" => 64 * 1024, "aarch64" => {
650 if target.options.vendor.as_ref() == "apple" {
651 16 * 1024
655 } else {
656 4 * 1024
657 }
658 }
659 _ => 4 * 1024,
660 }
661 };
662 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
664 let stack_size =
665 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
666 assert!(
667 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
668 "miri only supports up to {} CPUs, but {} were configured",
669 cpu_affinity::MAX_CPUS,
670 config.num_cpus
671 );
672 let threads = ThreadManager::default();
673 let mut thread_cpu_affinity = FxHashMap::default();
674 if matches!(&*tcx.sess.target.os, "linux" | "freebsd" | "android") {
675 thread_cpu_affinity
676 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
677 }
678 MiriMachine {
679 tcx,
680 borrow_tracker,
681 data_race,
682 alloc_addresses: RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr)),
683 env_vars: EnvVars::default(),
685 main_fn_ret_place: None,
686 argc: None,
687 argv: None,
688 cmd_line: None,
689 tls: TlsData::default(),
690 isolated_op: config.isolated_op,
691 validation: config.validation,
692 fds: shims::FdTable::init(config.mute_stdout_stderr),
693 epoll_interests: shims::EpollInterestTable::new(),
694 dirs: Default::default(),
695 layouts,
696 threads,
697 thread_cpu_affinity,
698 sync: SynchronizationObjects::default(),
699 static_roots: Vec::new(),
700 profiler,
701 string_cache: Default::default(),
702 exported_symbols_cache: FxHashMap::default(),
703 backtrace_style: config.backtrace_style,
704 local_crates,
705 extern_statics: FxHashMap::default(),
706 rng: RefCell::new(rng),
707 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
708 track_alloc_accesses: config.track_alloc_accesses,
709 check_alignment: config.check_alignment,
710 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
711 mute_stdout_stderr: config.mute_stdout_stderr,
712 weak_memory: config.weak_memory_emulation,
713 preemption_rate: config.preemption_rate,
714 report_progress: config.report_progress,
715 basic_block_count: 0,
716 clock: Clock::new(config.isolated_op == IsolatedOp::Allow),
717 #[cfg(unix)]
718 native_lib: config.native_lib.as_ref().map(|lib_file_path| {
719 let host_triple = rustc_session::config::host_tuple();
720 let target_triple = tcx.sess.opts.target_triple.tuple();
721 if host_triple != target_triple {
723 panic!(
724 "calling external C functions in linked .so file requires host and target to be the same: host={}, target={}",
725 host_triple,
726 target_triple,
727 );
728 }
729 (
733 unsafe {
734 libloading::Library::new(lib_file_path)
735 .expect("failed to read specified extern shared object file")
736 },
737 lib_file_path.clone(),
738 )
739 }),
740 #[cfg(not(unix))]
741 native_lib: config.native_lib.as_ref().map(|_| {
742 panic!("calling functions from native libraries via FFI is only supported on Unix")
743 }),
744 gc_interval: config.gc_interval,
745 since_gc: 0,
746 num_cpus: config.num_cpus,
747 page_size,
748 stack_addr,
749 stack_size,
750 collect_leak_backtraces: config.collect_leak_backtraces,
751 allocation_spans: RefCell::new(FxHashMap::default()),
752 const_cache: RefCell::new(FxHashMap::default()),
753 symbolic_alignment: RefCell::new(FxHashMap::default()),
754 union_data_ranges: FxHashMap::default(),
755 pthread_mutex_sanity: Cell::new(false),
756 pthread_rwlock_sanity: Cell::new(false),
757 pthread_condvar_sanity: Cell::new(false),
758 sb_extern_type_warned: Cell::new(false),
759 #[cfg(unix)]
760 native_call_mem_warned: Cell::new(false),
761 reject_in_isolation_warned: Default::default(),
762 int2ptr_warned: Default::default(),
763 mangle_internal_symbol_cache: Default::default(),
764 }
765 }
766
767 pub(crate) fn late_init(
768 ecx: &mut MiriInterpCx<'tcx>,
769 config: &MiriConfig,
770 on_main_stack_empty: StackEmptyCallback<'tcx>,
771 ) -> InterpResult<'tcx> {
772 EnvVars::init(ecx, config)?;
773 MiriMachine::init_extern_statics(ecx)?;
774 ThreadManager::init(ecx, on_main_stack_empty);
775 interp_ok(())
776 }
777
778 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
779 let ptr = ptr.into_pointer_or_addr().unwrap();
781 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
782 }
783
784 pub(crate) fn communicate(&self) -> bool {
785 self.isolated_op == IsolatedOp::Allow
786 }
787
788 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
790 let def_id = frame.instance.def_id();
791 def_id.is_local() || self.local_crates.contains(&def_id.krate)
792 }
793
794 pub(crate) fn handle_abnormal_termination(&mut self) {
796 drop(self.profiler.take());
801 }
802
803 pub(crate) fn page_align(&self) -> Align {
804 Align::from_bytes(self.page_size).unwrap()
805 }
806
807 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
808 self.allocation_spans
809 .borrow()
810 .get(&alloc_id)
811 .map(|(allocated, _deallocated)| allocated.data())
812 }
813
814 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
815 self.allocation_spans
816 .borrow()
817 .get(&alloc_id)
818 .and_then(|(_allocated, deallocated)| *deallocated)
819 .map(Span::data)
820 }
821
822 fn init_allocation(
823 ecx: &MiriInterpCx<'tcx>,
824 id: AllocId,
825 kind: MemoryKind,
826 size: Size,
827 align: Align,
828 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
829 if ecx.machine.tracked_alloc_ids.contains(&id) {
830 ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id, size, align, kind));
831 }
832
833 let borrow_tracker = ecx
834 .machine
835 .borrow_tracker
836 .as_ref()
837 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
838
839 let data_race = ecx.machine.data_race.as_ref().map(|data_race| {
840 data_race::AllocState::new_allocation(
841 data_race,
842 &ecx.machine.threads,
843 size,
844 kind,
845 ecx.machine.current_span(),
846 )
847 });
848 let weak_memory = ecx.machine.weak_memory.then(weak_memory::AllocState::new_allocation);
849
850 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
854 None
855 } else {
856 Some(ecx.generate_stacktrace())
857 };
858
859 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
860 ecx.machine
861 .allocation_spans
862 .borrow_mut()
863 .insert(id, (ecx.machine.current_span(), None));
864 }
865
866 interp_ok(AllocExtra {
867 borrow_tracker,
868 data_race,
869 weak_memory,
870 backtrace,
871 sync: FxHashMap::default(),
872 })
873 }
874}
875
876impl VisitProvenance for MiriMachine<'_> {
877 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
878 #[rustfmt::skip]
879 let MiriMachine {
880 threads,
881 thread_cpu_affinity: _,
882 sync: _,
883 tls,
884 env_vars,
885 main_fn_ret_place,
886 argc,
887 argv,
888 cmd_line,
889 extern_statics,
890 dirs,
891 borrow_tracker,
892 data_race,
893 alloc_addresses,
894 fds,
895 epoll_interests:_,
896 tcx: _,
897 isolated_op: _,
898 validation: _,
899 clock: _,
900 layouts: _,
901 static_roots: _,
902 profiler: _,
903 string_cache: _,
904 exported_symbols_cache: _,
905 backtrace_style: _,
906 local_crates: _,
907 rng: _,
908 tracked_alloc_ids: _,
909 track_alloc_accesses: _,
910 check_alignment: _,
911 cmpxchg_weak_failure_rate: _,
912 mute_stdout_stderr: _,
913 weak_memory: _,
914 preemption_rate: _,
915 report_progress: _,
916 basic_block_count: _,
917 native_lib: _,
918 gc_interval: _,
919 since_gc: _,
920 num_cpus: _,
921 page_size: _,
922 stack_addr: _,
923 stack_size: _,
924 collect_leak_backtraces: _,
925 allocation_spans: _,
926 const_cache: _,
927 symbolic_alignment: _,
928 union_data_ranges: _,
929 pthread_mutex_sanity: _,
930 pthread_rwlock_sanity: _,
931 pthread_condvar_sanity: _,
932 sb_extern_type_warned: _,
933 #[cfg(unix)]
934 native_call_mem_warned: _,
935 reject_in_isolation_warned: _,
936 int2ptr_warned: _,
937 mangle_internal_symbol_cache: _,
938 } = self;
939
940 threads.visit_provenance(visit);
941 tls.visit_provenance(visit);
942 env_vars.visit_provenance(visit);
943 dirs.visit_provenance(visit);
944 fds.visit_provenance(visit);
945 data_race.visit_provenance(visit);
946 borrow_tracker.visit_provenance(visit);
947 alloc_addresses.visit_provenance(visit);
948 main_fn_ret_place.visit_provenance(visit);
949 argc.visit_provenance(visit);
950 argv.visit_provenance(visit);
951 cmd_line.visit_provenance(visit);
952 for ptr in extern_statics.values() {
953 ptr.visit_provenance(visit);
954 }
955 }
956}
957
958pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
960
961pub trait MiriInterpCxExt<'tcx> {
963 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
964 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
965}
966impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
967 #[inline(always)]
968 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
969 self
970 }
971 #[inline(always)]
972 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
973 self
974 }
975}
976
977impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
979 type MemoryKind = MiriMemoryKind;
980 type ExtraFnVal = DynSym;
981
982 type FrameExtra = FrameExtra<'tcx>;
983 type AllocExtra = AllocExtra<'tcx>;
984
985 type Provenance = Provenance;
986 type ProvenanceExtra = ProvenanceExtra;
987 type Bytes = MiriAllocBytes;
988
989 type MemoryMap =
990 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
991
992 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
993
994 const PANIC_ON_ALLOC_FAIL: bool = false;
995
996 #[inline(always)]
997 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
998 ecx.machine.check_alignment != AlignmentCheck::None
999 }
1000
1001 #[inline(always)]
1002 fn alignment_check(
1003 ecx: &MiriInterpCx<'tcx>,
1004 alloc_id: AllocId,
1005 alloc_align: Align,
1006 alloc_kind: AllocKind,
1007 offset: Size,
1008 align: Align,
1009 ) -> Option<Misalignment> {
1010 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1011 return None;
1013 }
1014 if alloc_kind != AllocKind::LiveData {
1015 return None;
1017 }
1018 let (promised_offset, promised_align) = ecx
1020 .machine
1021 .symbolic_alignment
1022 .borrow()
1023 .get(&alloc_id)
1024 .copied()
1025 .unwrap_or((Size::ZERO, alloc_align));
1026 if promised_align < align {
1027 Some(Misalignment { has: promised_align, required: align })
1029 } else {
1030 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1032 if distance % align.bytes() == 0 {
1034 None
1036 } else {
1037 let distance_pow2 = 1 << distance.trailing_zeros();
1039 Some(Misalignment {
1040 has: Align::from_bytes(distance_pow2).unwrap(),
1041 required: align,
1042 })
1043 }
1044 }
1045 }
1046
1047 #[inline(always)]
1048 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1049 ecx.machine.validation != ValidationMode::No
1050 }
1051 #[inline(always)]
1052 fn enforce_validity_recursively(
1053 ecx: &InterpCx<'tcx, Self>,
1054 _layout: TyAndLayout<'tcx>,
1055 ) -> bool {
1056 ecx.machine.validation == ValidationMode::Deep
1057 }
1058
1059 #[inline(always)]
1060 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1061 !ecx.tcx.sess.overflow_checks()
1062 }
1063
1064 fn check_fn_target_features(
1065 ecx: &MiriInterpCx<'tcx>,
1066 instance: ty::Instance<'tcx>,
1067 ) -> InterpResult<'tcx> {
1068 let attrs = ecx.tcx.codegen_fn_attrs(instance.def_id());
1069 if attrs
1070 .target_features
1071 .iter()
1072 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1073 {
1074 let unavailable = attrs
1075 .target_features
1076 .iter()
1077 .filter(|&feature| {
1078 !feature.implied && !ecx.tcx.sess.target_features.contains(&feature.name)
1079 })
1080 .fold(String::new(), |mut s, feature| {
1081 if !s.is_empty() {
1082 s.push_str(", ");
1083 }
1084 s.push_str(feature.name.as_str());
1085 s
1086 });
1087 let msg = format!(
1088 "calling a function that requires unavailable target features: {unavailable}"
1089 );
1090 if ecx.tcx.sess.target.is_like_wasm {
1093 throw_machine_stop!(TerminationInfo::Abort(msg));
1094 } else {
1095 throw_ub_format!("{msg}");
1096 }
1097 }
1098 interp_ok(())
1099 }
1100
1101 #[inline(always)]
1102 fn find_mir_or_eval_fn(
1103 ecx: &mut MiriInterpCx<'tcx>,
1104 instance: ty::Instance<'tcx>,
1105 abi: &FnAbi<'tcx, Ty<'tcx>>,
1106 args: &[FnArg<'tcx, Provenance>],
1107 dest: &MPlaceTy<'tcx>,
1108 ret: Option<mir::BasicBlock>,
1109 unwind: mir::UnwindAction,
1110 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1111 if ecx.tcx.is_foreign_item(instance.def_id()) {
1113 let args = ecx.copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1121 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1122 }
1123
1124 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1126 }
1127
1128 #[inline(always)]
1129 fn call_extra_fn(
1130 ecx: &mut MiriInterpCx<'tcx>,
1131 fn_val: DynSym,
1132 abi: &FnAbi<'tcx, Ty<'tcx>>,
1133 args: &[FnArg<'tcx, Provenance>],
1134 dest: &MPlaceTy<'tcx>,
1135 ret: Option<mir::BasicBlock>,
1136 unwind: mir::UnwindAction,
1137 ) -> InterpResult<'tcx> {
1138 let args = ecx.copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1140 }
1141
1142 #[inline(always)]
1143 fn call_intrinsic(
1144 ecx: &mut MiriInterpCx<'tcx>,
1145 instance: ty::Instance<'tcx>,
1146 args: &[OpTy<'tcx>],
1147 dest: &MPlaceTy<'tcx>,
1148 ret: Option<mir::BasicBlock>,
1149 unwind: mir::UnwindAction,
1150 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1151 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1152 }
1153
1154 #[inline(always)]
1155 fn assert_panic(
1156 ecx: &mut MiriInterpCx<'tcx>,
1157 msg: &mir::AssertMessage<'tcx>,
1158 unwind: mir::UnwindAction,
1159 ) -> InterpResult<'tcx> {
1160 ecx.assert_panic(msg, unwind)
1161 }
1162
1163 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1164 ecx.start_panic_nounwind(msg)
1165 }
1166
1167 fn unwind_terminate(
1168 ecx: &mut InterpCx<'tcx, Self>,
1169 reason: mir::UnwindTerminateReason,
1170 ) -> InterpResult<'tcx> {
1171 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1173 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1174 ecx.call_function(
1175 panic,
1176 ExternAbi::Rust,
1177 &[],
1178 None,
1179 StackPopCleanup::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1180 )?;
1181 interp_ok(())
1182 }
1183
1184 #[inline(always)]
1185 fn binary_ptr_op(
1186 ecx: &MiriInterpCx<'tcx>,
1187 bin_op: mir::BinOp,
1188 left: &ImmTy<'tcx>,
1189 right: &ImmTy<'tcx>,
1190 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1191 ecx.binary_ptr_op(bin_op, left, right)
1192 }
1193
1194 #[inline(always)]
1195 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1196 ecx: &InterpCx<'tcx, Self>,
1197 inputs: &[F1],
1198 ) -> F2 {
1199 ecx.generate_nan(inputs)
1200 }
1201
1202 #[inline(always)]
1203 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1204 ecx.equal_float_min_max(a, b)
1205 }
1206
1207 #[inline(always)]
1208 fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1209 interp_ok(ecx.tcx.sess.ub_checks())
1210 }
1211
1212 #[inline(always)]
1213 fn contract_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1214 interp_ok(ecx.tcx.sess.contract_checks())
1215 }
1216
1217 #[inline(always)]
1218 fn thread_local_static_pointer(
1219 ecx: &mut MiriInterpCx<'tcx>,
1220 def_id: DefId,
1221 ) -> InterpResult<'tcx, StrictPointer> {
1222 ecx.get_or_create_thread_local_alloc(def_id)
1223 }
1224
1225 fn extern_static_pointer(
1226 ecx: &MiriInterpCx<'tcx>,
1227 def_id: DefId,
1228 ) -> InterpResult<'tcx, StrictPointer> {
1229 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1230 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1231 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1235 panic!("extern_statics cannot contain wildcards")
1236 };
1237 let info = ecx.get_alloc_info(alloc_id);
1238 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1239 let extern_decl_layout =
1240 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1241 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1242 throw_unsup_format!(
1243 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1244 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1245 but Miri emulates it via an extern static shim \
1246 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1247 name = ecx.tcx.def_path_str(def_id),
1248 krate = ecx.tcx.crate_name(def_id.krate),
1249 decl_size = extern_decl_layout.size.bytes(),
1250 decl_align = extern_decl_layout.align.abi.bytes(),
1251 shim_size = info.size.bytes(),
1252 shim_align = info.align.bytes(),
1253 )
1254 }
1255 interp_ok(ptr)
1256 } else {
1257 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1258 }
1259 }
1260
1261 fn init_local_allocation(
1262 ecx: &MiriInterpCx<'tcx>,
1263 id: AllocId,
1264 kind: MemoryKind,
1265 size: Size,
1266 align: Align,
1267 ) -> InterpResult<'tcx, Self::AllocExtra> {
1268 assert!(kind != MiriMemoryKind::Global.into());
1269 MiriMachine::init_allocation(ecx, id, kind, size, align)
1270 }
1271
1272 fn adjust_alloc_root_pointer(
1273 ecx: &MiriInterpCx<'tcx>,
1274 ptr: interpret::Pointer<CtfeProvenance>,
1275 kind: Option<MemoryKind>,
1276 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1277 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1278 let alloc_id = ptr.provenance.alloc_id();
1279 if cfg!(debug_assertions) {
1280 match ecx.tcx.try_get_global_alloc(alloc_id) {
1282 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1283 panic!("adjust_alloc_root_pointer called on thread-local static")
1284 }
1285 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1286 panic!("adjust_alloc_root_pointer called on extern static")
1287 }
1288 _ => {}
1289 }
1290 }
1291 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1293 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1294 } else {
1295 BorTag::default()
1297 };
1298 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1299 }
1300
1301 #[inline(always)]
1303 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1304 ecx.ptr_from_addr_cast(addr)
1305 }
1306
1307 #[inline(always)]
1311 fn expose_provenance(
1312 ecx: &InterpCx<'tcx, Self>,
1313 provenance: Self::Provenance,
1314 ) -> InterpResult<'tcx> {
1315 ecx.expose_provenance(provenance)
1316 }
1317
1318 fn ptr_get_alloc(
1330 ecx: &MiriInterpCx<'tcx>,
1331 ptr: StrictPointer,
1332 size: i64,
1333 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1334 let rel = ecx.ptr_get_alloc(ptr, size);
1335
1336 rel.map(|(alloc_id, size)| {
1337 let tag = match ptr.provenance {
1338 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1339 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1340 };
1341 (alloc_id, size, tag)
1342 })
1343 }
1344
1345 fn adjust_global_allocation<'b>(
1354 ecx: &InterpCx<'tcx, Self>,
1355 id: AllocId,
1356 alloc: &'b Allocation,
1357 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1358 {
1359 let alloc = alloc.adjust_from_tcx(
1360 &ecx.tcx,
1361 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1362 |ptr| ecx.global_root_pointer(ptr),
1363 )?;
1364 let kind = MiriMemoryKind::Global.into();
1365 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1366 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1367 }
1368
1369 #[inline(always)]
1370 fn before_memory_read(
1371 _tcx: TyCtxtAt<'tcx>,
1372 machine: &Self,
1373 alloc_extra: &AllocExtra<'tcx>,
1374 _ptr: Pointer,
1375 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1376 range: AllocRange,
1377 ) -> InterpResult<'tcx> {
1378 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1379 machine
1380 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Read));
1381 }
1382 if let Some(data_race) = &alloc_extra.data_race {
1383 data_race.read(alloc_id, range, NaReadType::Read, None, machine)?;
1384 }
1385 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1386 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1387 }
1388 if let Some(weak_memory) = &alloc_extra.weak_memory {
1389 weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap());
1390 }
1391 interp_ok(())
1392 }
1393
1394 #[inline(always)]
1395 fn before_memory_write(
1396 _tcx: TyCtxtAt<'tcx>,
1397 machine: &mut Self,
1398 alloc_extra: &mut AllocExtra<'tcx>,
1399 _ptr: Pointer,
1400 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1401 range: AllocRange,
1402 ) -> InterpResult<'tcx> {
1403 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1404 machine
1405 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Write));
1406 }
1407 if let Some(data_race) = &mut alloc_extra.data_race {
1408 data_race.write(alloc_id, range, NaWriteType::Write, None, machine)?;
1409 }
1410 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1411 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1412 }
1413 if let Some(weak_memory) = &alloc_extra.weak_memory {
1414 weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap());
1415 }
1416 interp_ok(())
1417 }
1418
1419 #[inline(always)]
1420 fn before_memory_deallocation(
1421 _tcx: TyCtxtAt<'tcx>,
1422 machine: &mut Self,
1423 alloc_extra: &mut AllocExtra<'tcx>,
1424 _ptr: Pointer,
1425 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1426 size: Size,
1427 align: Align,
1428 kind: MemoryKind,
1429 ) -> InterpResult<'tcx> {
1430 if machine.tracked_alloc_ids.contains(&alloc_id) {
1431 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1432 }
1433 if let Some(data_race) = &mut alloc_extra.data_race {
1434 data_race.write(
1435 alloc_id,
1436 alloc_range(Size::ZERO, size),
1437 NaWriteType::Deallocate,
1438 None,
1439 machine,
1440 )?;
1441 }
1442 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1443 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1444 }
1445 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1446 {
1447 *deallocated_at = Some(machine.current_span());
1448 }
1449 machine.free_alloc_id(alloc_id, size, align, kind);
1450 interp_ok(())
1451 }
1452
1453 #[inline(always)]
1454 fn retag_ptr_value(
1455 ecx: &mut InterpCx<'tcx, Self>,
1456 kind: mir::RetagKind,
1457 val: &ImmTy<'tcx>,
1458 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1459 if ecx.machine.borrow_tracker.is_some() {
1460 ecx.retag_ptr_value(kind, val)
1461 } else {
1462 interp_ok(val.clone())
1463 }
1464 }
1465
1466 #[inline(always)]
1467 fn retag_place_contents(
1468 ecx: &mut InterpCx<'tcx, Self>,
1469 kind: mir::RetagKind,
1470 place: &PlaceTy<'tcx>,
1471 ) -> InterpResult<'tcx> {
1472 if ecx.machine.borrow_tracker.is_some() {
1473 ecx.retag_place_contents(kind, place)?;
1474 }
1475 interp_ok(())
1476 }
1477
1478 fn protect_in_place_function_argument(
1479 ecx: &mut InterpCx<'tcx, Self>,
1480 place: &MPlaceTy<'tcx>,
1481 ) -> InterpResult<'tcx> {
1482 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1485 ecx.protect_place(place)?
1486 } else {
1487 place.clone()
1489 };
1490 ecx.write_uninit(&protected_place)?;
1495 interp_ok(())
1497 }
1498
1499 #[inline(always)]
1500 fn init_frame(
1501 ecx: &mut InterpCx<'tcx, Self>,
1502 frame: Frame<'tcx, Provenance>,
1503 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1504 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1506 let fn_name = frame.instance().to_string();
1507 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1508 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1509
1510 Some(profiler.start_recording_interval_event_detached(
1511 *name,
1512 measureme::EventId::from_label(*name),
1513 ecx.active_thread().to_u32(),
1514 ))
1515 } else {
1516 None
1517 };
1518
1519 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1520
1521 let extra = FrameExtra {
1522 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1523 catch_unwind: None,
1524 timing,
1525 is_user_relevant: ecx.machine.is_user_relevant(&frame),
1526 salt: ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL),
1527 data_race: ecx.machine.data_race.as_ref().map(|_| data_race::FrameState::default()),
1528 };
1529
1530 interp_ok(frame.with_extra(extra))
1531 }
1532
1533 fn stack<'a>(
1534 ecx: &'a InterpCx<'tcx, Self>,
1535 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1536 ecx.active_thread_stack()
1537 }
1538
1539 fn stack_mut<'a>(
1540 ecx: &'a mut InterpCx<'tcx, Self>,
1541 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1542 ecx.active_thread_stack_mut()
1543 }
1544
1545 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1546 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1548 if let Some(report_progress) = ecx.machine.report_progress {
1550 if ecx.machine.basic_block_count % u64::from(report_progress) == 0 {
1551 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1552 block_count: ecx.machine.basic_block_count,
1553 });
1554 }
1555 }
1556
1557 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1562 ecx.machine.since_gc = 0;
1563 ecx.run_provenance_gc();
1564 }
1565
1566 ecx.maybe_preempt_active_thread();
1569
1570 ecx.machine.clock.tick();
1572
1573 interp_ok(())
1574 }
1575
1576 #[inline(always)]
1577 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1578 if ecx.frame().extra.is_user_relevant {
1579 let stack_len = ecx.active_thread_stack().len();
1582 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1583 }
1584 interp_ok(())
1585 }
1586
1587 fn before_stack_pop(
1588 ecx: &InterpCx<'tcx, Self>,
1589 frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
1590 ) -> InterpResult<'tcx> {
1591 if ecx.machine.borrow_tracker.is_some() {
1594 ecx.on_stack_pop(frame)?;
1595 }
1596 info!("Leaving {}", ecx.frame().instance());
1600 interp_ok(())
1601 }
1602
1603 #[inline(always)]
1604 fn after_stack_pop(
1605 ecx: &mut InterpCx<'tcx, Self>,
1606 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1607 unwinding: bool,
1608 ) -> InterpResult<'tcx, ReturnAction> {
1609 if frame.extra.is_user_relevant {
1610 ecx.active_thread_mut().recompute_top_user_relevant_frame();
1615 }
1616 let res = {
1617 let mut frame = frame;
1619 let timing = frame.extra.timing.take();
1620 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1621 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1622 profiler.finish_recording_interval_event(timing.unwrap());
1623 }
1624 res
1625 };
1626 if !ecx.active_thread_stack().is_empty() {
1629 info!("Continuing in {}", ecx.frame().instance());
1630 }
1631 res
1632 }
1633
1634 fn after_local_read(
1635 ecx: &InterpCx<'tcx, Self>,
1636 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1637 local: mir::Local,
1638 ) -> InterpResult<'tcx> {
1639 if let Some(data_race) = &frame.extra.data_race {
1640 data_race.local_read(local, &ecx.machine);
1641 }
1642 interp_ok(())
1643 }
1644
1645 fn after_local_write(
1646 ecx: &mut InterpCx<'tcx, Self>,
1647 local: mir::Local,
1648 storage_live: bool,
1649 ) -> InterpResult<'tcx> {
1650 if let Some(data_race) = &ecx.frame().extra.data_race {
1651 data_race.local_write(local, storage_live, &ecx.machine);
1652 }
1653 interp_ok(())
1654 }
1655
1656 fn after_local_moved_to_memory(
1657 ecx: &mut InterpCx<'tcx, Self>,
1658 local: mir::Local,
1659 mplace: &MPlaceTy<'tcx>,
1660 ) -> InterpResult<'tcx> {
1661 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1662 panic!("after_local_allocated should only be called on fresh allocations");
1663 };
1664 let local_decl = &ecx.frame().body().local_decls[local];
1666 let span = local_decl.source_info.span;
1667 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1668 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1670 if let Some(data_race) =
1671 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1672 {
1673 data_race.local_moved_to_memory(local, alloc_info.data_race.as_mut().unwrap(), machine);
1674 }
1675 interp_ok(())
1676 }
1677
1678 fn eval_mir_constant<F>(
1679 ecx: &InterpCx<'tcx, Self>,
1680 val: mir::Const<'tcx>,
1681 span: Span,
1682 layout: Option<TyAndLayout<'tcx>>,
1683 eval: F,
1684 ) -> InterpResult<'tcx, OpTy<'tcx>>
1685 where
1686 F: Fn(
1687 &InterpCx<'tcx, Self>,
1688 mir::Const<'tcx>,
1689 Span,
1690 Option<TyAndLayout<'tcx>>,
1691 ) -> InterpResult<'tcx, OpTy<'tcx>>,
1692 {
1693 let frame = ecx.active_thread_stack().last().unwrap();
1694 let mut cache = ecx.machine.const_cache.borrow_mut();
1695 match cache.entry((val, frame.extra.salt)) {
1696 Entry::Vacant(ve) => {
1697 let op = eval(ecx, val, span, layout)?;
1698 ve.insert(op.clone());
1699 interp_ok(op)
1700 }
1701 Entry::Occupied(oe) => interp_ok(oe.get().clone()),
1702 }
1703 }
1704
1705 fn get_global_alloc_salt(
1706 ecx: &InterpCx<'tcx, Self>,
1707 instance: Option<ty::Instance<'tcx>>,
1708 ) -> usize {
1709 let unique = if let Some(instance) = instance {
1710 let is_generic = instance
1723 .args
1724 .into_iter()
1725 .any(|kind| !matches!(kind.unpack(), ty::GenericArgKind::Lifetime(_)));
1726 let can_be_inlined = matches!(
1727 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1728 InliningThreshold::Always
1729 ) || !matches!(
1730 ecx.tcx.codegen_fn_attrs(instance.def_id()).inline,
1731 InlineAttr::Never
1732 );
1733 !is_generic && !can_be_inlined
1734 } else {
1735 false
1737 };
1738 if unique {
1740 CTFE_ALLOC_SALT
1741 } else {
1742 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1743 }
1744 }
1745
1746 fn cached_union_data_range<'e>(
1747 ecx: &'e mut InterpCx<'tcx, Self>,
1748 ty: Ty<'tcx>,
1749 compute_range: impl FnOnce() -> RangeSet,
1750 ) -> Cow<'e, RangeSet> {
1751 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1752 }
1753}
1754
1755pub trait MachineCallback<'tcx, T>: VisitProvenance {
1757 fn call(
1759 self: Box<Self>,
1760 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1761 arg: T,
1762 ) -> InterpResult<'tcx>;
1763}
1764
1765pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1767
1768#[macro_export]
1785macro_rules! callback {
1786 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1787 { $($name:ident: $type:ty),* $(,)? }
1788 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
1789 struct Callback<$tcx, $($lft),*> {
1790 $($name: $type,)*
1791 _phantom: std::marker::PhantomData<&$tcx ()>,
1792 }
1793
1794 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
1795 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
1796 $(
1797 self.$name.visit_provenance(_visit);
1798 )*
1799 }
1800 }
1801
1802 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
1803 fn call(
1804 self: Box<Self>,
1805 $this: &mut MiriInterpCx<$tcx>,
1806 $arg: $arg_ty
1807 ) -> InterpResult<$tcx> {
1808 #[allow(unused_variables)]
1809 let Callback { $($name,)* _phantom } = *self;
1810 $body
1811 }
1812 }
1813
1814 Box::new(Callback {
1815 $($name,)*
1816 _phantom: std::marker::PhantomData
1817 })
1818 }};
1819}