1use std::any::Any;
5use std::borrow::Cow;
6use std::cell::{Cell, RefCell};
7use std::path::Path;
8use std::rc::Rc;
9use std::{fmt, process};
10
11use rand::rngs::StdRng;
12use rand::{Rng, SeedableRng};
13use rustc_abi::{Align, ExternAbi, Size};
14use rustc_apfloat::{Float, FloatConvert};
15use rustc_data_structures::fx::{FxHashMap, FxHashSet};
16#[allow(unused)]
17use rustc_data_structures::static_assert_size;
18use rustc_hir::attrs::InlineAttr;
19use rustc_middle::middle::codegen_fn_attrs::TargetFeatureKind;
20use rustc_middle::mir;
21use rustc_middle::query::TyCtxtAt;
22use rustc_middle::ty::layout::{
23 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
24};
25use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
26use rustc_session::config::InliningThreshold;
27use rustc_span::def_id::{CrateNum, DefId};
28use rustc_span::{Span, SpanData, Symbol};
29use rustc_target::callconv::FnAbi;
30
31use crate::alloc_addresses::EvalContextExt;
32use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
33use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
34use crate::concurrency::{AllocDataRaceHandler, GenmcCtx, GlobalDataRaceHandler, weak_memory};
35use crate::*;
36
37pub const SIGRTMIN: i32 = 34;
41
42pub const SIGRTMAX: i32 = 42;
46
47const ADDRS_PER_ANON_GLOBAL: usize = 32;
51
52#[derive(Copy, Clone, Debug, PartialEq)]
53pub enum AlignmentCheck {
54 None,
56 Symbolic,
58 Int,
60}
61
62#[derive(Copy, Clone, Debug, PartialEq)]
63pub enum RejectOpWith {
64 Abort,
66
67 NoWarning,
71
72 Warning,
74
75 WarningWithoutBacktrace,
77}
78
79#[derive(Copy, Clone, Debug, PartialEq)]
80pub enum IsolatedOp {
81 Reject(RejectOpWith),
86
87 Allow,
89}
90
91#[derive(Debug, Copy, Clone, PartialEq, Eq)]
92pub enum BacktraceStyle {
93 Short,
95 Full,
97 Off,
99}
100
101#[derive(Debug, Copy, Clone, PartialEq, Eq)]
102pub enum ValidationMode {
103 No,
105 Shallow,
107 Deep,
109}
110
111#[derive(Debug, Copy, Clone, PartialEq, Eq)]
112pub enum FloatRoundingErrorMode {
113 Random,
115 None,
117 Max,
119}
120
121pub struct FrameExtra<'tcx> {
123 pub borrow_tracker: Option<borrow_tracker::FrameState>,
125
126 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
130
131 pub timing: Option<measureme::DetachedTiming>,
135
136 pub is_user_relevant: bool,
141
142 pub data_race: Option<data_race::FrameState>,
144}
145
146impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
147 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
148 let FrameExtra { borrow_tracker, catch_unwind, timing: _, is_user_relevant, data_race } =
150 self;
151 f.debug_struct("FrameData")
152 .field("borrow_tracker", borrow_tracker)
153 .field("catch_unwind", catch_unwind)
154 .field("is_user_relevant", is_user_relevant)
155 .field("data_race", data_race)
156 .finish()
157 }
158}
159
160impl VisitProvenance for FrameExtra<'_> {
161 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
162 let FrameExtra {
163 catch_unwind,
164 borrow_tracker,
165 timing: _,
166 is_user_relevant: _,
167 data_race: _,
168 } = self;
169
170 catch_unwind.visit_provenance(visit);
171 borrow_tracker.visit_provenance(visit);
172 }
173}
174
175#[derive(Debug, Copy, Clone, PartialEq, Eq)]
177pub enum MiriMemoryKind {
178 Rust,
180 Miri,
182 C,
184 WinHeap,
186 WinLocal,
188 Machine,
191 Runtime,
194 Global,
197 ExternStatic,
200 Tls,
203 Mmap,
205}
206
207impl From<MiriMemoryKind> for MemoryKind {
208 #[inline(always)]
209 fn from(kind: MiriMemoryKind) -> MemoryKind {
210 MemoryKind::Machine(kind)
211 }
212}
213
214impl MayLeak for MiriMemoryKind {
215 #[inline(always)]
216 fn may_leak(self) -> bool {
217 use self::MiriMemoryKind::*;
218 match self {
219 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
220 Machine | Global | ExternStatic | Tls | Mmap => true,
221 }
222 }
223}
224
225impl MiriMemoryKind {
226 fn should_save_allocation_span(self) -> bool {
228 use self::MiriMemoryKind::*;
229 match self {
230 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
232 Machine | Global | ExternStatic | Tls | Runtime => false,
234 }
235 }
236}
237
238impl fmt::Display for MiriMemoryKind {
239 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
240 use self::MiriMemoryKind::*;
241 match self {
242 Rust => write!(f, "Rust heap"),
243 Miri => write!(f, "Miri bare-metal heap"),
244 C => write!(f, "C heap"),
245 WinHeap => write!(f, "Windows heap"),
246 WinLocal => write!(f, "Windows local memory"),
247 Machine => write!(f, "machine-managed memory"),
248 Runtime => write!(f, "language runtime memory"),
249 Global => write!(f, "global (static or const)"),
250 ExternStatic => write!(f, "extern static"),
251 Tls => write!(f, "thread-local static"),
252 Mmap => write!(f, "mmap"),
253 }
254 }
255}
256
257pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
258
259#[derive(Clone, Copy, PartialEq, Eq, Hash)]
265pub enum Provenance {
266 Concrete {
269 alloc_id: AllocId,
270 tag: BorTag,
272 },
273 Wildcard,
290}
291
292#[derive(Copy, Clone, PartialEq)]
294pub enum ProvenanceExtra {
295 Concrete(BorTag),
296 Wildcard,
297}
298
299#[cfg(target_pointer_width = "64")]
300static_assert_size!(StrictPointer, 24);
301#[cfg(target_pointer_width = "64")]
305static_assert_size!(Scalar, 32);
306
307impl fmt::Debug for Provenance {
308 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
309 match self {
310 Provenance::Concrete { alloc_id, tag } => {
311 if f.alternate() {
313 write!(f, "[{alloc_id:#?}]")?;
314 } else {
315 write!(f, "[{alloc_id:?}]")?;
316 }
317 write!(f, "{tag:?}")?;
319 }
320 Provenance::Wildcard => {
321 write!(f, "[wildcard]")?;
322 }
323 }
324 Ok(())
325 }
326}
327
328impl interpret::Provenance for Provenance {
329 const OFFSET_IS_ADDR: bool = true;
331
332 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
334
335 fn get_alloc_id(self) -> Option<AllocId> {
336 match self {
337 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
338 Provenance::Wildcard => None,
339 }
340 }
341
342 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
343 let (prov, addr) = ptr.into_raw_parts(); write!(f, "{:#x}", addr.bytes())?;
345 if f.alternate() {
346 write!(f, "{prov:#?}")?;
347 } else {
348 write!(f, "{prov:?}")?;
349 }
350 Ok(())
351 }
352
353 fn join(left: Self, right: Self) -> Option<Self> {
354 match (left, right) {
355 (
357 Provenance::Concrete { alloc_id: left_alloc, tag: left_tag },
358 Provenance::Concrete { alloc_id: right_alloc, tag: right_tag },
359 ) if left_alloc == right_alloc && left_tag == right_tag => Some(left),
360 (Provenance::Wildcard, o) | (o, Provenance::Wildcard) => Some(o),
363 _ => None,
365 }
366 }
367}
368
369impl fmt::Debug for ProvenanceExtra {
370 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
371 match self {
372 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
373 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
374 }
375 }
376}
377
378impl ProvenanceExtra {
379 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
380 match self {
381 ProvenanceExtra::Concrete(pid) => f(pid),
382 ProvenanceExtra::Wildcard => None,
383 }
384 }
385}
386
387#[derive(Debug)]
389pub struct AllocExtra<'tcx> {
390 pub borrow_tracker: Option<borrow_tracker::AllocState>,
392 pub data_race: AllocDataRaceHandler,
396 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
401 pub sync: FxHashMap<Size, Box<dyn Any>>,
406}
407
408impl<'tcx> Clone for AllocExtra<'tcx> {
411 fn clone(&self) -> Self {
412 panic!("our allocations should never be cloned");
413 }
414}
415
416impl VisitProvenance for AllocExtra<'_> {
417 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
418 let AllocExtra { borrow_tracker, data_race, backtrace: _, sync: _ } = self;
419
420 borrow_tracker.visit_provenance(visit);
421 data_race.visit_provenance(visit);
422 }
423}
424
425pub struct PrimitiveLayouts<'tcx> {
427 pub unit: TyAndLayout<'tcx>,
428 pub i8: TyAndLayout<'tcx>,
429 pub i16: TyAndLayout<'tcx>,
430 pub i32: TyAndLayout<'tcx>,
431 pub i64: TyAndLayout<'tcx>,
432 pub i128: TyAndLayout<'tcx>,
433 pub isize: TyAndLayout<'tcx>,
434 pub u8: TyAndLayout<'tcx>,
435 pub u16: TyAndLayout<'tcx>,
436 pub u32: TyAndLayout<'tcx>,
437 pub u64: TyAndLayout<'tcx>,
438 pub u128: TyAndLayout<'tcx>,
439 pub usize: TyAndLayout<'tcx>,
440 pub bool: TyAndLayout<'tcx>,
441 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
444
445impl<'tcx> PrimitiveLayouts<'tcx> {
446 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
447 let tcx = layout_cx.tcx();
448 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
449 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
450 Ok(Self {
451 unit: layout_cx.layout_of(tcx.types.unit)?,
452 i8: layout_cx.layout_of(tcx.types.i8)?,
453 i16: layout_cx.layout_of(tcx.types.i16)?,
454 i32: layout_cx.layout_of(tcx.types.i32)?,
455 i64: layout_cx.layout_of(tcx.types.i64)?,
456 i128: layout_cx.layout_of(tcx.types.i128)?,
457 isize: layout_cx.layout_of(tcx.types.isize)?,
458 u8: layout_cx.layout_of(tcx.types.u8)?,
459 u16: layout_cx.layout_of(tcx.types.u16)?,
460 u32: layout_cx.layout_of(tcx.types.u32)?,
461 u64: layout_cx.layout_of(tcx.types.u64)?,
462 u128: layout_cx.layout_of(tcx.types.u128)?,
463 usize: layout_cx.layout_of(tcx.types.usize)?,
464 bool: layout_cx.layout_of(tcx.types.bool)?,
465 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
466 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
467 })
468 }
469
470 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
471 match size.bits() {
472 8 => Some(self.u8),
473 16 => Some(self.u16),
474 32 => Some(self.u32),
475 64 => Some(self.u64),
476 128 => Some(self.u128),
477 _ => None,
478 }
479 }
480
481 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
482 match size.bits() {
483 8 => Some(self.i8),
484 16 => Some(self.i16),
485 32 => Some(self.i32),
486 64 => Some(self.i64),
487 128 => Some(self.i128),
488 _ => None,
489 }
490 }
491}
492
493pub struct MiriMachine<'tcx> {
498 pub tcx: TyCtxt<'tcx>,
500
501 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
503
504 pub data_race: GlobalDataRaceHandler,
510
511 pub alloc_addresses: alloc_addresses::GlobalState,
513
514 pub(crate) env_vars: EnvVars<'tcx>,
516
517 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
519
520 pub(crate) argc: Option<Pointer>,
524 pub(crate) argv: Option<Pointer>,
525 pub(crate) cmd_line: Option<Pointer>,
526
527 pub(crate) tls: TlsData<'tcx>,
529
530 pub(crate) isolated_op: IsolatedOp,
534
535 pub(crate) validation: ValidationMode,
537
538 pub(crate) fds: shims::FdTable,
540 pub(crate) dirs: shims::DirTable,
542
543 pub(crate) epoll_interests: shims::EpollInterestTable,
545
546 pub(crate) monotonic_clock: MonotonicClock,
548
549 pub(crate) threads: ThreadManager<'tcx>,
551
552 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
556
557 pub(crate) layouts: PrimitiveLayouts<'tcx>,
559
560 pub(crate) static_roots: Vec<AllocId>,
562
563 profiler: Option<measureme::Profiler>,
566 string_cache: FxHashMap<String, measureme::StringId>,
569
570 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
573
574 pub(crate) backtrace_style: BacktraceStyle,
576
577 pub(crate) local_crates: Vec<CrateNum>,
579
580 extern_statics: FxHashMap<Symbol, StrictPointer>,
582
583 pub(crate) rng: RefCell<StdRng>,
586
587 pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>,
589
590 tracked_alloc_ids: FxHashSet<AllocId>,
593 track_alloc_accesses: bool,
595
596 pub(crate) check_alignment: AlignmentCheck,
598
599 pub(crate) cmpxchg_weak_failure_rate: f64,
601
602 pub(crate) preemption_rate: f64,
604
605 pub(crate) report_progress: Option<u32>,
607 pub(crate) basic_block_count: u64,
609
610 #[cfg(all(unix, feature = "native-lib"))]
612 pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>,
613 #[cfg(not(all(unix, feature = "native-lib")))]
614 pub native_lib: Vec<!>,
615
616 pub(crate) gc_interval: u32,
618 pub(crate) since_gc: u32,
620
621 pub(crate) num_cpus: u32,
623
624 pub(crate) page_size: u64,
626 pub(crate) stack_addr: u64,
627 pub(crate) stack_size: u64,
628
629 pub(crate) collect_leak_backtraces: bool,
631
632 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
635
636 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
643
644 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
646
647 pub(crate) pthread_mutex_sanity: Cell<bool>,
649 pub(crate) pthread_rwlock_sanity: Cell<bool>,
650 pub(crate) pthread_condvar_sanity: Cell<bool>,
651
652 pub(crate) sb_extern_type_warned: Cell<bool>,
654 #[allow(unused)]
656 pub(crate) native_call_mem_warned: Cell<bool>,
657 pub(crate) reject_in_isolation_warned: RefCell<FxHashSet<String>>,
659 pub(crate) int2ptr_warned: RefCell<FxHashSet<Span>>,
661
662 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
664
665 pub force_intrinsic_fallback: bool,
667
668 pub float_nondet: bool,
670 pub float_rounding_error: FloatRoundingErrorMode,
672
673 pub short_fd_operations: bool,
675}
676
677impl<'tcx> MiriMachine<'tcx> {
678 pub(crate) fn new(
682 config: &MiriConfig,
683 layout_cx: LayoutCx<'tcx>,
684 genmc_ctx: Option<Rc<GenmcCtx>>,
685 ) -> Self {
686 let tcx = layout_cx.tcx();
687 let local_crates = helpers::get_local_crates(tcx);
688 let layouts =
689 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
690 let profiler = config.measureme_out.as_ref().map(|out| {
691 let crate_name =
692 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
693 let pid = process::id();
694 let filename = format!("{crate_name}-{pid:07}");
699 let path = Path::new(out).join(filename);
700 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
701 });
702 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
703 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
704 let data_race = if config.genmc_config.is_some() {
705 GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap())
707 } else if config.data_race_detector {
708 GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config)))
709 } else {
710 GlobalDataRaceHandler::None
711 };
712 let page_size = if let Some(page_size) = config.page_size {
716 page_size
717 } else {
718 let target = &tcx.sess.target;
719 match target.arch.as_ref() {
720 "wasm32" | "wasm64" => 64 * 1024, "aarch64" => {
722 if target.options.vendor.as_ref() == "apple" {
723 16 * 1024
727 } else {
728 4 * 1024
729 }
730 }
731 _ => 4 * 1024,
732 }
733 };
734 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
736 let stack_size =
737 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
738 assert!(
739 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
740 "miri only supports up to {} CPUs, but {} were configured",
741 cpu_affinity::MAX_CPUS,
742 config.num_cpus
743 );
744 let threads = ThreadManager::new(config);
745 let mut thread_cpu_affinity = FxHashMap::default();
746 if matches!(&*tcx.sess.target.os, "linux" | "freebsd" | "android") {
747 thread_cpu_affinity
748 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
749 }
750 let alloc_addresses =
751 RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr, tcx));
752 MiriMachine {
753 tcx,
754 borrow_tracker,
755 data_race,
756 alloc_addresses,
757 env_vars: EnvVars::default(),
759 main_fn_ret_place: None,
760 argc: None,
761 argv: None,
762 cmd_line: None,
763 tls: TlsData::default(),
764 isolated_op: config.isolated_op,
765 validation: config.validation,
766 fds: shims::FdTable::init(config.mute_stdout_stderr),
767 epoll_interests: shims::EpollInterestTable::new(),
768 dirs: Default::default(),
769 layouts,
770 threads,
771 thread_cpu_affinity,
772 static_roots: Vec::new(),
773 profiler,
774 string_cache: Default::default(),
775 exported_symbols_cache: FxHashMap::default(),
776 backtrace_style: config.backtrace_style,
777 local_crates,
778 extern_statics: FxHashMap::default(),
779 rng: RefCell::new(rng),
780 allocator: if !config.native_lib.is_empty() {
781 Some(Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new())))
782 } else { None },
783 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
784 track_alloc_accesses: config.track_alloc_accesses,
785 check_alignment: config.check_alignment,
786 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
787 preemption_rate: config.preemption_rate,
788 report_progress: config.report_progress,
789 basic_block_count: 0,
790 monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow),
791 #[cfg(all(unix, feature = "native-lib"))]
792 native_lib: config.native_lib.iter().map(|lib_file_path| {
793 let host_triple = rustc_session::config::host_tuple();
794 let target_triple = tcx.sess.opts.target_triple.tuple();
795 if host_triple != target_triple {
797 panic!(
798 "calling native C functions in linked .so file requires host and target to be the same: \
799 host={host_triple}, target={target_triple}",
800 );
801 }
802 (
806 unsafe {
807 libloading::Library::new(lib_file_path)
808 .expect("failed to read specified extern shared object file")
809 },
810 lib_file_path.clone(),
811 )
812 }).collect(),
813 #[cfg(not(all(unix, feature = "native-lib")))]
814 native_lib: config.native_lib.iter().map(|_| {
815 panic!("calling functions from native libraries via FFI is not supported in this build of Miri")
816 }).collect(),
817 gc_interval: config.gc_interval,
818 since_gc: 0,
819 num_cpus: config.num_cpus,
820 page_size,
821 stack_addr,
822 stack_size,
823 collect_leak_backtraces: config.collect_leak_backtraces,
824 allocation_spans: RefCell::new(FxHashMap::default()),
825 symbolic_alignment: RefCell::new(FxHashMap::default()),
826 union_data_ranges: FxHashMap::default(),
827 pthread_mutex_sanity: Cell::new(false),
828 pthread_rwlock_sanity: Cell::new(false),
829 pthread_condvar_sanity: Cell::new(false),
830 sb_extern_type_warned: Cell::new(false),
831 native_call_mem_warned: Cell::new(false),
832 reject_in_isolation_warned: Default::default(),
833 int2ptr_warned: Default::default(),
834 mangle_internal_symbol_cache: Default::default(),
835 force_intrinsic_fallback: config.force_intrinsic_fallback,
836 float_nondet: config.float_nondet,
837 float_rounding_error: config.float_rounding_error,
838 short_fd_operations: config.short_fd_operations,
839 }
840 }
841
842 pub(crate) fn late_init(
843 ecx: &mut MiriInterpCx<'tcx>,
844 config: &MiriConfig,
845 on_main_stack_empty: StackEmptyCallback<'tcx>,
846 ) -> InterpResult<'tcx> {
847 EnvVars::init(ecx, config)?;
848 MiriMachine::init_extern_statics(ecx)?;
849 ThreadManager::init(ecx, on_main_stack_empty);
850 interp_ok(())
851 }
852
853 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
854 let ptr = ptr.into_pointer_or_addr().unwrap();
856 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
857 }
858
859 pub(crate) fn communicate(&self) -> bool {
860 self.isolated_op == IsolatedOp::Allow
861 }
862
863 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
865 let def_id = frame.instance.def_id();
866 def_id.is_local() || self.local_crates.contains(&def_id.krate)
867 }
868
869 pub(crate) fn handle_abnormal_termination(&mut self) {
871 drop(self.profiler.take());
876 }
877
878 pub(crate) fn page_align(&self) -> Align {
879 Align::from_bytes(self.page_size).unwrap()
880 }
881
882 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
883 self.allocation_spans
884 .borrow()
885 .get(&alloc_id)
886 .map(|(allocated, _deallocated)| allocated.data())
887 }
888
889 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
890 self.allocation_spans
891 .borrow()
892 .get(&alloc_id)
893 .and_then(|(_allocated, deallocated)| *deallocated)
894 .map(Span::data)
895 }
896
897 fn init_allocation(
898 ecx: &MiriInterpCx<'tcx>,
899 id: AllocId,
900 kind: MemoryKind,
901 size: Size,
902 align: Align,
903 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
904 if ecx.machine.tracked_alloc_ids.contains(&id) {
905 ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id, size, align, kind));
906 }
907
908 let borrow_tracker = ecx
909 .machine
910 .borrow_tracker
911 .as_ref()
912 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
913
914 let data_race = match &ecx.machine.data_race {
915 GlobalDataRaceHandler::None => AllocDataRaceHandler::None,
916 GlobalDataRaceHandler::Vclocks(data_race) =>
917 AllocDataRaceHandler::Vclocks(
918 data_race::AllocState::new_allocation(
919 data_race,
920 &ecx.machine.threads,
921 size,
922 kind,
923 ecx.machine.current_span(),
924 ),
925 data_race.weak_memory.then(weak_memory::AllocState::new_allocation),
926 ),
927 GlobalDataRaceHandler::Genmc(_genmc_ctx) => {
928 AllocDataRaceHandler::Genmc
931 }
932 };
933
934 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
938 None
939 } else {
940 Some(ecx.generate_stacktrace())
941 };
942
943 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
944 ecx.machine
945 .allocation_spans
946 .borrow_mut()
947 .insert(id, (ecx.machine.current_span(), None));
948 }
949
950 interp_ok(AllocExtra { borrow_tracker, data_race, backtrace, sync: FxHashMap::default() })
951 }
952}
953
954impl VisitProvenance for MiriMachine<'_> {
955 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
956 #[rustfmt::skip]
957 let MiriMachine {
958 threads,
959 thread_cpu_affinity: _,
960 tls,
961 env_vars,
962 main_fn_ret_place,
963 argc,
964 argv,
965 cmd_line,
966 extern_statics,
967 dirs,
968 borrow_tracker,
969 data_race,
970 alloc_addresses,
971 fds,
972 epoll_interests:_,
973 tcx: _,
974 isolated_op: _,
975 validation: _,
976 monotonic_clock: _,
977 layouts: _,
978 static_roots: _,
979 profiler: _,
980 string_cache: _,
981 exported_symbols_cache: _,
982 backtrace_style: _,
983 local_crates: _,
984 rng: _,
985 allocator: _,
986 tracked_alloc_ids: _,
987 track_alloc_accesses: _,
988 check_alignment: _,
989 cmpxchg_weak_failure_rate: _,
990 preemption_rate: _,
991 report_progress: _,
992 basic_block_count: _,
993 native_lib: _,
994 gc_interval: _,
995 since_gc: _,
996 num_cpus: _,
997 page_size: _,
998 stack_addr: _,
999 stack_size: _,
1000 collect_leak_backtraces: _,
1001 allocation_spans: _,
1002 symbolic_alignment: _,
1003 union_data_ranges: _,
1004 pthread_mutex_sanity: _,
1005 pthread_rwlock_sanity: _,
1006 pthread_condvar_sanity: _,
1007 sb_extern_type_warned: _,
1008 native_call_mem_warned: _,
1009 reject_in_isolation_warned: _,
1010 int2ptr_warned: _,
1011 mangle_internal_symbol_cache: _,
1012 force_intrinsic_fallback: _,
1013 float_nondet: _,
1014 float_rounding_error: _,
1015 short_fd_operations: _,
1016 } = self;
1017
1018 threads.visit_provenance(visit);
1019 tls.visit_provenance(visit);
1020 env_vars.visit_provenance(visit);
1021 dirs.visit_provenance(visit);
1022 fds.visit_provenance(visit);
1023 data_race.visit_provenance(visit);
1024 borrow_tracker.visit_provenance(visit);
1025 alloc_addresses.visit_provenance(visit);
1026 main_fn_ret_place.visit_provenance(visit);
1027 argc.visit_provenance(visit);
1028 argv.visit_provenance(visit);
1029 cmd_line.visit_provenance(visit);
1030 for ptr in extern_statics.values() {
1031 ptr.visit_provenance(visit);
1032 }
1033 }
1034}
1035
1036pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
1038
1039pub trait MiriInterpCxExt<'tcx> {
1041 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
1042 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
1043}
1044impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
1045 #[inline(always)]
1046 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
1047 self
1048 }
1049 #[inline(always)]
1050 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
1051 self
1052 }
1053}
1054
1055impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
1057 type MemoryKind = MiriMemoryKind;
1058 type ExtraFnVal = DynSym;
1059
1060 type FrameExtra = FrameExtra<'tcx>;
1061 type AllocExtra = AllocExtra<'tcx>;
1062
1063 type Provenance = Provenance;
1064 type ProvenanceExtra = ProvenanceExtra;
1065 type Bytes = MiriAllocBytes;
1066
1067 type MemoryMap =
1068 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
1069
1070 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
1071
1072 const PANIC_ON_ALLOC_FAIL: bool = false;
1073
1074 #[inline(always)]
1075 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
1076 ecx.machine.check_alignment != AlignmentCheck::None
1077 }
1078
1079 #[inline(always)]
1080 fn alignment_check(
1081 ecx: &MiriInterpCx<'tcx>,
1082 alloc_id: AllocId,
1083 alloc_align: Align,
1084 alloc_kind: AllocKind,
1085 offset: Size,
1086 align: Align,
1087 ) -> Option<Misalignment> {
1088 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1089 return None;
1091 }
1092 if alloc_kind != AllocKind::LiveData {
1093 return None;
1095 }
1096 let (promised_offset, promised_align) = ecx
1098 .machine
1099 .symbolic_alignment
1100 .borrow()
1101 .get(&alloc_id)
1102 .copied()
1103 .unwrap_or((Size::ZERO, alloc_align));
1104 if promised_align < align {
1105 Some(Misalignment { has: promised_align, required: align })
1107 } else {
1108 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1110 if distance.is_multiple_of(align.bytes()) {
1112 None
1114 } else {
1115 let distance_pow2 = 1 << distance.trailing_zeros();
1117 Some(Misalignment {
1118 has: Align::from_bytes(distance_pow2).unwrap(),
1119 required: align,
1120 })
1121 }
1122 }
1123 }
1124
1125 #[inline(always)]
1126 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1127 ecx.machine.validation != ValidationMode::No
1128 }
1129 #[inline(always)]
1130 fn enforce_validity_recursively(
1131 ecx: &InterpCx<'tcx, Self>,
1132 _layout: TyAndLayout<'tcx>,
1133 ) -> bool {
1134 ecx.machine.validation == ValidationMode::Deep
1135 }
1136
1137 #[inline(always)]
1138 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1139 !ecx.tcx.sess.overflow_checks()
1140 }
1141
1142 fn check_fn_target_features(
1143 ecx: &MiriInterpCx<'tcx>,
1144 instance: ty::Instance<'tcx>,
1145 ) -> InterpResult<'tcx> {
1146 let attrs = ecx.tcx.codegen_instance_attrs(instance.def);
1147 if attrs
1148 .target_features
1149 .iter()
1150 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1151 {
1152 let unavailable = attrs
1153 .target_features
1154 .iter()
1155 .filter(|&feature| {
1156 feature.kind != TargetFeatureKind::Implied
1157 && !ecx.tcx.sess.target_features.contains(&feature.name)
1158 })
1159 .fold(String::new(), |mut s, feature| {
1160 if !s.is_empty() {
1161 s.push_str(", ");
1162 }
1163 s.push_str(feature.name.as_str());
1164 s
1165 });
1166 let msg = format!(
1167 "calling a function that requires unavailable target features: {unavailable}"
1168 );
1169 if ecx.tcx.sess.target.is_like_wasm {
1172 throw_machine_stop!(TerminationInfo::Abort(msg));
1173 } else {
1174 throw_ub_format!("{msg}");
1175 }
1176 }
1177 interp_ok(())
1178 }
1179
1180 #[inline(always)]
1181 fn find_mir_or_eval_fn(
1182 ecx: &mut MiriInterpCx<'tcx>,
1183 instance: ty::Instance<'tcx>,
1184 abi: &FnAbi<'tcx, Ty<'tcx>>,
1185 args: &[FnArg<'tcx, Provenance>],
1186 dest: &PlaceTy<'tcx>,
1187 ret: Option<mir::BasicBlock>,
1188 unwind: mir::UnwindAction,
1189 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1190 if ecx.tcx.is_foreign_item(instance.def_id()) {
1192 let _trace = enter_trace_span!("emulate_foreign_item");
1193 let args = ecx.copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1201 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1202 }
1203
1204 let _trace = enter_trace_span!("load_mir");
1206 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1207 }
1208
1209 #[inline(always)]
1210 fn call_extra_fn(
1211 ecx: &mut MiriInterpCx<'tcx>,
1212 fn_val: DynSym,
1213 abi: &FnAbi<'tcx, Ty<'tcx>>,
1214 args: &[FnArg<'tcx, Provenance>],
1215 dest: &PlaceTy<'tcx>,
1216 ret: Option<mir::BasicBlock>,
1217 unwind: mir::UnwindAction,
1218 ) -> InterpResult<'tcx> {
1219 let args = ecx.copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1221 }
1222
1223 #[inline(always)]
1224 fn call_intrinsic(
1225 ecx: &mut MiriInterpCx<'tcx>,
1226 instance: ty::Instance<'tcx>,
1227 args: &[OpTy<'tcx>],
1228 dest: &PlaceTy<'tcx>,
1229 ret: Option<mir::BasicBlock>,
1230 unwind: mir::UnwindAction,
1231 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1232 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1233 }
1234
1235 #[inline(always)]
1236 fn assert_panic(
1237 ecx: &mut MiriInterpCx<'tcx>,
1238 msg: &mir::AssertMessage<'tcx>,
1239 unwind: mir::UnwindAction,
1240 ) -> InterpResult<'tcx> {
1241 ecx.assert_panic(msg, unwind)
1242 }
1243
1244 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1245 ecx.start_panic_nounwind(msg)
1246 }
1247
1248 fn unwind_terminate(
1249 ecx: &mut InterpCx<'tcx, Self>,
1250 reason: mir::UnwindTerminateReason,
1251 ) -> InterpResult<'tcx> {
1252 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1254 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1255 ecx.call_function(
1256 panic,
1257 ExternAbi::Rust,
1258 &[],
1259 None,
1260 ReturnContinuation::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1261 )?;
1262 interp_ok(())
1263 }
1264
1265 #[inline(always)]
1266 fn binary_ptr_op(
1267 ecx: &MiriInterpCx<'tcx>,
1268 bin_op: mir::BinOp,
1269 left: &ImmTy<'tcx>,
1270 right: &ImmTy<'tcx>,
1271 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1272 ecx.binary_ptr_op(bin_op, left, right)
1273 }
1274
1275 #[inline(always)]
1276 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1277 ecx: &InterpCx<'tcx, Self>,
1278 inputs: &[F1],
1279 ) -> F2 {
1280 ecx.generate_nan(inputs)
1281 }
1282
1283 #[inline(always)]
1284 fn apply_float_nondet(
1285 ecx: &mut InterpCx<'tcx, Self>,
1286 val: ImmTy<'tcx>,
1287 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1288 crate::math::apply_random_float_error_to_imm(ecx, val, 4)
1289 }
1290
1291 #[inline(always)]
1292 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1293 ecx.equal_float_min_max(a, b)
1294 }
1295
1296 #[inline(always)]
1297 fn float_fuse_mul_add(ecx: &mut InterpCx<'tcx, Self>) -> bool {
1298 ecx.machine.float_nondet && ecx.machine.rng.get_mut().random()
1299 }
1300
1301 #[inline(always)]
1302 fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1303 interp_ok(ecx.tcx.sess.ub_checks())
1304 }
1305
1306 #[inline(always)]
1307 fn contract_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1308 interp_ok(ecx.tcx.sess.contract_checks())
1309 }
1310
1311 #[inline(always)]
1312 fn thread_local_static_pointer(
1313 ecx: &mut MiriInterpCx<'tcx>,
1314 def_id: DefId,
1315 ) -> InterpResult<'tcx, StrictPointer> {
1316 ecx.get_or_create_thread_local_alloc(def_id)
1317 }
1318
1319 fn extern_static_pointer(
1320 ecx: &MiriInterpCx<'tcx>,
1321 def_id: DefId,
1322 ) -> InterpResult<'tcx, StrictPointer> {
1323 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1324 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1325 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1329 panic!("extern_statics cannot contain wildcards")
1330 };
1331 let info = ecx.get_alloc_info(alloc_id);
1332 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1333 let extern_decl_layout =
1334 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1335 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1336 throw_unsup_format!(
1337 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1338 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1339 but Miri emulates it via an extern static shim \
1340 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1341 name = ecx.tcx.def_path_str(def_id),
1342 krate = ecx.tcx.crate_name(def_id.krate),
1343 decl_size = extern_decl_layout.size.bytes(),
1344 decl_align = extern_decl_layout.align.bytes(),
1345 shim_size = info.size.bytes(),
1346 shim_align = info.align.bytes(),
1347 )
1348 }
1349 interp_ok(ptr)
1350 } else {
1351 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1352 }
1353 }
1354
1355 fn init_local_allocation(
1356 ecx: &MiriInterpCx<'tcx>,
1357 id: AllocId,
1358 kind: MemoryKind,
1359 size: Size,
1360 align: Align,
1361 ) -> InterpResult<'tcx, Self::AllocExtra> {
1362 assert!(kind != MiriMemoryKind::Global.into());
1363 MiriMachine::init_allocation(ecx, id, kind, size, align)
1364 }
1365
1366 fn adjust_alloc_root_pointer(
1367 ecx: &MiriInterpCx<'tcx>,
1368 ptr: interpret::Pointer<CtfeProvenance>,
1369 kind: Option<MemoryKind>,
1370 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1371 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1372 let alloc_id = ptr.provenance.alloc_id();
1373 if cfg!(debug_assertions) {
1374 match ecx.tcx.try_get_global_alloc(alloc_id) {
1376 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1377 panic!("adjust_alloc_root_pointer called on thread-local static")
1378 }
1379 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1380 panic!("adjust_alloc_root_pointer called on extern static")
1381 }
1382 _ => {}
1383 }
1384 }
1385 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1387 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1388 } else {
1389 BorTag::default()
1391 };
1392 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1393 }
1394
1395 #[inline(always)]
1397 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1398 ecx.ptr_from_addr_cast(addr)
1399 }
1400
1401 #[inline(always)]
1405 fn expose_provenance(
1406 ecx: &InterpCx<'tcx, Self>,
1407 provenance: Self::Provenance,
1408 ) -> InterpResult<'tcx> {
1409 ecx.expose_provenance(provenance)
1410 }
1411
1412 fn ptr_get_alloc(
1424 ecx: &MiriInterpCx<'tcx>,
1425 ptr: StrictPointer,
1426 size: i64,
1427 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1428 let rel = ecx.ptr_get_alloc(ptr, size);
1429
1430 rel.map(|(alloc_id, size)| {
1431 let tag = match ptr.provenance {
1432 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1433 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1434 };
1435 (alloc_id, size, tag)
1436 })
1437 }
1438
1439 fn adjust_global_allocation<'b>(
1448 ecx: &InterpCx<'tcx, Self>,
1449 id: AllocId,
1450 alloc: &'b Allocation,
1451 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1452 {
1453 let alloc = alloc.adjust_from_tcx(
1454 &ecx.tcx,
1455 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1456 |ptr| ecx.global_root_pointer(ptr),
1457 )?;
1458 let kind = MiriMemoryKind::Global.into();
1459 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1460 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1461 }
1462
1463 #[inline(always)]
1464 fn before_memory_read(
1465 _tcx: TyCtxtAt<'tcx>,
1466 machine: &Self,
1467 alloc_extra: &AllocExtra<'tcx>,
1468 ptr: Pointer,
1469 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1470 range: AllocRange,
1471 ) -> InterpResult<'tcx> {
1472 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1473 machine
1474 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Read));
1475 }
1476 match &machine.data_race {
1478 GlobalDataRaceHandler::None => {}
1479 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1480 genmc_ctx.memory_load(machine, ptr.addr(), range.size)?,
1481 GlobalDataRaceHandler::Vclocks(_data_race) => {
1482 let _trace = enter_trace_span!(data_race::before_memory_read);
1483 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) = &alloc_extra.data_race
1484 else {
1485 unreachable!();
1486 };
1487 data_race.read(alloc_id, range, NaReadType::Read, None, machine)?;
1488 if let Some(weak_memory) = weak_memory {
1489 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1490 }
1491 }
1492 }
1493 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1494 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1495 }
1496 interp_ok(())
1497 }
1498
1499 #[inline(always)]
1500 fn before_memory_write(
1501 _tcx: TyCtxtAt<'tcx>,
1502 machine: &mut Self,
1503 alloc_extra: &mut AllocExtra<'tcx>,
1504 ptr: Pointer,
1505 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1506 range: AllocRange,
1507 ) -> InterpResult<'tcx> {
1508 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1509 machine
1510 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Write));
1511 }
1512 match &machine.data_race {
1513 GlobalDataRaceHandler::None => {}
1514 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1515 genmc_ctx.memory_store(machine, ptr.addr(), range.size)?,
1516 GlobalDataRaceHandler::Vclocks(_global_state) => {
1517 let _trace = enter_trace_span!(data_race::before_memory_write);
1518 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) =
1519 &mut alloc_extra.data_race
1520 else {
1521 unreachable!()
1522 };
1523 data_race.write(alloc_id, range, NaWriteType::Write, None, machine)?;
1524 if let Some(weak_memory) = weak_memory {
1525 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1526 }
1527 }
1528 }
1529 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1530 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1531 }
1532 interp_ok(())
1533 }
1534
1535 #[inline(always)]
1536 fn before_memory_deallocation(
1537 _tcx: TyCtxtAt<'tcx>,
1538 machine: &mut Self,
1539 alloc_extra: &mut AllocExtra<'tcx>,
1540 ptr: Pointer,
1541 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1542 size: Size,
1543 align: Align,
1544 kind: MemoryKind,
1545 ) -> InterpResult<'tcx> {
1546 if machine.tracked_alloc_ids.contains(&alloc_id) {
1547 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1548 }
1549 match &machine.data_race {
1550 GlobalDataRaceHandler::None => {}
1551 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1552 genmc_ctx.handle_dealloc(machine, alloc_id, ptr.addr(), kind)?,
1553 GlobalDataRaceHandler::Vclocks(_global_state) => {
1554 let _trace = enter_trace_span!(data_race::before_memory_deallocation);
1555 let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap();
1556 data_race.write(
1557 alloc_id,
1558 alloc_range(Size::ZERO, size),
1559 NaWriteType::Deallocate,
1560 None,
1561 machine,
1562 )?;
1563 }
1564 }
1565 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1566 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1567 }
1568 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1569 {
1570 *deallocated_at = Some(machine.current_span());
1571 }
1572 machine.free_alloc_id(alloc_id, size, align, kind);
1573 interp_ok(())
1574 }
1575
1576 #[inline(always)]
1577 fn retag_ptr_value(
1578 ecx: &mut InterpCx<'tcx, Self>,
1579 kind: mir::RetagKind,
1580 val: &ImmTy<'tcx>,
1581 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1582 if ecx.machine.borrow_tracker.is_some() {
1583 ecx.retag_ptr_value(kind, val)
1584 } else {
1585 interp_ok(val.clone())
1586 }
1587 }
1588
1589 #[inline(always)]
1590 fn retag_place_contents(
1591 ecx: &mut InterpCx<'tcx, Self>,
1592 kind: mir::RetagKind,
1593 place: &PlaceTy<'tcx>,
1594 ) -> InterpResult<'tcx> {
1595 if ecx.machine.borrow_tracker.is_some() {
1596 ecx.retag_place_contents(kind, place)?;
1597 }
1598 interp_ok(())
1599 }
1600
1601 fn protect_in_place_function_argument(
1602 ecx: &mut InterpCx<'tcx, Self>,
1603 place: &MPlaceTy<'tcx>,
1604 ) -> InterpResult<'tcx> {
1605 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1608 ecx.protect_place(place)?
1609 } else {
1610 place.clone()
1612 };
1613 ecx.write_uninit(&protected_place)?;
1618 interp_ok(())
1620 }
1621
1622 #[inline(always)]
1623 fn init_frame(
1624 ecx: &mut InterpCx<'tcx, Self>,
1625 frame: Frame<'tcx, Provenance>,
1626 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1627 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1629 let fn_name = frame.instance().to_string();
1630 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1631 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1632
1633 Some(profiler.start_recording_interval_event_detached(
1634 *name,
1635 measureme::EventId::from_label(*name),
1636 ecx.active_thread().to_u32(),
1637 ))
1638 } else {
1639 None
1640 };
1641
1642 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1643
1644 let extra = FrameExtra {
1645 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1646 catch_unwind: None,
1647 timing,
1648 is_user_relevant: ecx.machine.is_user_relevant(&frame),
1649 data_race: ecx
1650 .machine
1651 .data_race
1652 .as_vclocks_ref()
1653 .map(|_| data_race::FrameState::default()),
1654 };
1655
1656 interp_ok(frame.with_extra(extra))
1657 }
1658
1659 fn stack<'a>(
1660 ecx: &'a InterpCx<'tcx, Self>,
1661 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1662 ecx.active_thread_stack()
1663 }
1664
1665 fn stack_mut<'a>(
1666 ecx: &'a mut InterpCx<'tcx, Self>,
1667 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1668 ecx.active_thread_stack_mut()
1669 }
1670
1671 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1672 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1674 if let Some(report_progress) = ecx.machine.report_progress {
1676 if ecx.machine.basic_block_count.is_multiple_of(u64::from(report_progress)) {
1677 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1678 block_count: ecx.machine.basic_block_count,
1679 });
1680 }
1681 }
1682
1683 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1688 ecx.machine.since_gc = 0;
1689 ecx.run_provenance_gc();
1690 }
1691
1692 ecx.maybe_preempt_active_thread();
1695
1696 ecx.machine.monotonic_clock.tick();
1698
1699 interp_ok(())
1700 }
1701
1702 #[inline(always)]
1703 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1704 if ecx.frame().extra.is_user_relevant {
1705 let stack_len = ecx.active_thread_stack().len();
1708 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1709 }
1710 interp_ok(())
1711 }
1712
1713 fn before_stack_pop(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1714 let frame = ecx.frame();
1715 if ecx.machine.borrow_tracker.is_some() {
1718 ecx.on_stack_pop(frame)?;
1719 }
1720 if frame.extra.is_user_relevant {
1721 ecx.active_thread_mut().recompute_top_user_relevant_frame(1);
1727 }
1728 info!("Leaving {}", ecx.frame().instance());
1732 interp_ok(())
1733 }
1734
1735 #[inline(always)]
1736 fn after_stack_pop(
1737 ecx: &mut InterpCx<'tcx, Self>,
1738 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1739 unwinding: bool,
1740 ) -> InterpResult<'tcx, ReturnAction> {
1741 let res = {
1742 let mut frame = frame;
1744 let timing = frame.extra.timing.take();
1745 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1746 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1747 profiler.finish_recording_interval_event(timing.unwrap());
1748 }
1749 res
1750 };
1751 if !ecx.active_thread_stack().is_empty() {
1754 info!("Continuing in {}", ecx.frame().instance());
1755 }
1756 res
1757 }
1758
1759 fn after_local_read(
1760 ecx: &InterpCx<'tcx, Self>,
1761 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1762 local: mir::Local,
1763 ) -> InterpResult<'tcx> {
1764 if let Some(data_race) = &frame.extra.data_race {
1765 let _trace = enter_trace_span!(data_race::after_local_read);
1766 data_race.local_read(local, &ecx.machine);
1767 }
1768 interp_ok(())
1769 }
1770
1771 fn after_local_write(
1772 ecx: &mut InterpCx<'tcx, Self>,
1773 local: mir::Local,
1774 storage_live: bool,
1775 ) -> InterpResult<'tcx> {
1776 if let Some(data_race) = &ecx.frame().extra.data_race {
1777 let _trace = enter_trace_span!(data_race::after_local_write);
1778 data_race.local_write(local, storage_live, &ecx.machine);
1779 }
1780 interp_ok(())
1781 }
1782
1783 fn after_local_moved_to_memory(
1784 ecx: &mut InterpCx<'tcx, Self>,
1785 local: mir::Local,
1786 mplace: &MPlaceTy<'tcx>,
1787 ) -> InterpResult<'tcx> {
1788 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1789 panic!("after_local_allocated should only be called on fresh allocations");
1790 };
1791 let local_decl = &ecx.frame().body().local_decls[local];
1793 let span = local_decl.source_info.span;
1794 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1795 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1797 if let Some(data_race) =
1798 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1799 {
1800 let _trace = enter_trace_span!(data_race::after_local_moved_to_memory);
1801 data_race.local_moved_to_memory(
1802 local,
1803 alloc_info.data_race.as_vclocks_mut().unwrap(),
1804 machine,
1805 );
1806 }
1807 interp_ok(())
1808 }
1809
1810 fn get_global_alloc_salt(
1811 ecx: &InterpCx<'tcx, Self>,
1812 instance: Option<ty::Instance<'tcx>>,
1813 ) -> usize {
1814 let unique = if let Some(instance) = instance {
1815 let is_generic = instance
1828 .args
1829 .into_iter()
1830 .any(|arg| !matches!(arg.kind(), ty::GenericArgKind::Lifetime(_)));
1831 let can_be_inlined = matches!(
1832 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1833 InliningThreshold::Always
1834 ) || !matches!(
1835 ecx.tcx.codegen_instance_attrs(instance.def).inline,
1836 InlineAttr::Never
1837 );
1838 !is_generic && !can_be_inlined
1839 } else {
1840 false
1842 };
1843 if unique {
1845 CTFE_ALLOC_SALT
1846 } else {
1847 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1848 }
1849 }
1850
1851 fn cached_union_data_range<'e>(
1852 ecx: &'e mut InterpCx<'tcx, Self>,
1853 ty: Ty<'tcx>,
1854 compute_range: impl FnOnce() -> RangeSet,
1855 ) -> Cow<'e, RangeSet> {
1856 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1857 }
1858
1859 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams {
1860 use crate::alloc::MiriAllocParams;
1861
1862 match &self.allocator {
1863 Some(alloc) => MiriAllocParams::Isolated(alloc.clone()),
1864 None => MiriAllocParams::Global,
1865 }
1866 }
1867
1868 fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
1869 #[cfg(feature = "tracing")]
1870 {
1871 span().entered()
1872 }
1873 #[cfg(not(feature = "tracing"))]
1874 #[expect(clippy::unused_unit)]
1875 {
1876 let _ = span; ()
1878 }
1879 }
1880}
1881
1882pub trait MachineCallback<'tcx, T>: VisitProvenance {
1884 fn call(
1886 self: Box<Self>,
1887 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1888 arg: T,
1889 ) -> InterpResult<'tcx>;
1890}
1891
1892pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1894
1895#[macro_export]
1912macro_rules! callback {
1913 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1914 { $($name:ident: $type:ty),* $(,)? }
1915 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
1916 struct Callback<$tcx, $($lft),*> {
1917 $($name: $type,)*
1918 _phantom: std::marker::PhantomData<&$tcx ()>,
1919 }
1920
1921 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
1922 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
1923 $(
1924 self.$name.visit_provenance(_visit);
1925 )*
1926 }
1927 }
1928
1929 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
1930 fn call(
1931 self: Box<Self>,
1932 $this: &mut MiriInterpCx<$tcx>,
1933 $arg: $arg_ty
1934 ) -> InterpResult<$tcx> {
1935 #[allow(unused_variables)]
1936 let Callback { $($name,)* _phantom } = *self;
1937 $body
1938 }
1939 }
1940
1941 Box::new(Callback {
1942 $($name,)*
1943 _phantom: std::marker::PhantomData
1944 })
1945 }};
1946}