1use std::borrow::Cow;
5use std::cell::{Cell, RefCell};
6use std::collections::BTreeMap;
7use std::path::Path;
8use std::rc::Rc;
9use std::{fmt, process};
10
11use rand::rngs::StdRng;
12use rand::{Rng, SeedableRng};
13use rustc_abi::{Align, ExternAbi, Size};
14use rustc_apfloat::{Float, FloatConvert};
15use rustc_ast::expand::allocator::{self, SpecialAllocatorMethod};
16use rustc_data_structures::either::Either;
17use rustc_data_structures::fx::{FxHashMap, FxHashSet};
18#[allow(unused)]
19use rustc_data_structures::static_assert_size;
20use rustc_hir::attrs::InlineAttr;
21use rustc_log::tracing;
22use rustc_middle::middle::codegen_fn_attrs::TargetFeatureKind;
23use rustc_middle::mir;
24use rustc_middle::query::TyCtxtAt;
25use rustc_middle::ty::layout::{
26 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
27};
28use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
29use rustc_session::config::InliningThreshold;
30use rustc_span::def_id::{CrateNum, DefId};
31use rustc_span::{Span, SpanData, Symbol};
32use rustc_symbol_mangling::mangle_internal_symbol;
33use rustc_target::callconv::FnAbi;
34use rustc_target::spec::{Arch, Os};
35
36use crate::alloc_addresses::EvalContextExt;
37use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
38use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
39use crate::concurrency::sync::SyncObj;
40use crate::concurrency::{
41 AllocDataRaceHandler, GenmcCtx, GenmcEvalContextExt as _, GlobalDataRaceHandler, weak_memory,
42};
43use crate::*;
44
45pub const SIGRTMIN: i32 = 34;
49
50pub const SIGRTMAX: i32 = 42;
54
55const ADDRS_PER_ANON_GLOBAL: usize = 32;
59
60#[derive(Copy, Clone, Debug, PartialEq)]
61pub enum AlignmentCheck {
62 None,
64 Symbolic,
66 Int,
68}
69
70#[derive(Copy, Clone, Debug, PartialEq)]
71pub enum RejectOpWith {
72 Abort,
74
75 NoWarning,
79
80 Warning,
82
83 WarningWithoutBacktrace,
85}
86
87#[derive(Copy, Clone, Debug, PartialEq)]
88pub enum IsolatedOp {
89 Reject(RejectOpWith),
94
95 Allow,
97}
98
99#[derive(Debug, Copy, Clone, PartialEq, Eq)]
100pub enum BacktraceStyle {
101 Short,
103 Full,
105 Off,
107}
108
109#[derive(Debug, Copy, Clone, PartialEq, Eq)]
110pub enum ValidationMode {
111 No,
113 Shallow,
115 Deep,
117}
118
119#[derive(Debug, Copy, Clone, PartialEq, Eq)]
120pub enum FloatRoundingErrorMode {
121 Random,
123 None,
125 Max,
127}
128
129pub struct FrameExtra<'tcx> {
131 pub borrow_tracker: Option<borrow_tracker::FrameState>,
133
134 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
138
139 pub timing: Option<measureme::DetachedTiming>,
143
144 pub user_relevance: u8,
148
149 pub data_race: Option<data_race::FrameState>,
151}
152
153impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
154 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
155 let FrameExtra { borrow_tracker, catch_unwind, timing: _, user_relevance, data_race } =
157 self;
158 f.debug_struct("FrameData")
159 .field("borrow_tracker", borrow_tracker)
160 .field("catch_unwind", catch_unwind)
161 .field("user_relevance", user_relevance)
162 .field("data_race", data_race)
163 .finish()
164 }
165}
166
167impl VisitProvenance for FrameExtra<'_> {
168 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
169 let FrameExtra { catch_unwind, borrow_tracker, timing: _, user_relevance: _, data_race: _ } =
170 self;
171
172 catch_unwind.visit_provenance(visit);
173 borrow_tracker.visit_provenance(visit);
174 }
175}
176
177#[derive(Debug, Copy, Clone, PartialEq, Eq)]
179pub enum MiriMemoryKind {
180 Rust,
182 Miri,
184 C,
186 WinHeap,
188 WinLocal,
190 Machine,
193 Runtime,
196 Global,
199 ExternStatic,
202 Tls,
205 Mmap,
207}
208
209impl From<MiriMemoryKind> for MemoryKind {
210 #[inline(always)]
211 fn from(kind: MiriMemoryKind) -> MemoryKind {
212 MemoryKind::Machine(kind)
213 }
214}
215
216impl MayLeak for MiriMemoryKind {
217 #[inline(always)]
218 fn may_leak(self) -> bool {
219 use self::MiriMemoryKind::*;
220 match self {
221 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
222 Machine | Global | ExternStatic | Tls | Mmap => true,
223 }
224 }
225}
226
227impl MiriMemoryKind {
228 fn should_save_allocation_span(self) -> bool {
230 use self::MiriMemoryKind::*;
231 match self {
232 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
234 Machine | Global | ExternStatic | Tls | Runtime => false,
236 }
237 }
238}
239
240impl fmt::Display for MiriMemoryKind {
241 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
242 use self::MiriMemoryKind::*;
243 match self {
244 Rust => write!(f, "Rust heap"),
245 Miri => write!(f, "Miri bare-metal heap"),
246 C => write!(f, "C heap"),
247 WinHeap => write!(f, "Windows heap"),
248 WinLocal => write!(f, "Windows local memory"),
249 Machine => write!(f, "machine-managed memory"),
250 Runtime => write!(f, "language runtime memory"),
251 Global => write!(f, "global (static or const)"),
252 ExternStatic => write!(f, "extern static"),
253 Tls => write!(f, "thread-local static"),
254 Mmap => write!(f, "mmap"),
255 }
256 }
257}
258
259pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
260
261#[derive(Clone, Copy, PartialEq, Eq, Hash)]
267pub enum Provenance {
268 Concrete {
271 alloc_id: AllocId,
272 tag: BorTag,
274 },
275 Wildcard,
292}
293
294#[derive(Copy, Clone, PartialEq)]
296pub enum ProvenanceExtra {
297 Concrete(BorTag),
298 Wildcard,
299}
300
301#[cfg(target_pointer_width = "64")]
302static_assert_size!(StrictPointer, 24);
303#[cfg(target_pointer_width = "64")]
308static_assert_size!(Scalar, 32);
309
310impl fmt::Debug for Provenance {
311 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
312 match self {
313 Provenance::Concrete { alloc_id, tag } => {
314 if f.alternate() {
316 write!(f, "[{alloc_id:#?}]")?;
317 } else {
318 write!(f, "[{alloc_id:?}]")?;
319 }
320 write!(f, "{tag:?}")?;
322 }
323 Provenance::Wildcard => {
324 write!(f, "[wildcard]")?;
325 }
326 }
327 Ok(())
328 }
329}
330
331impl interpret::Provenance for Provenance {
332 const OFFSET_IS_ADDR: bool = true;
334
335 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
337
338 fn get_alloc_id(self) -> Option<AllocId> {
339 match self {
340 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
341 Provenance::Wildcard => None,
342 }
343 }
344
345 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
346 let (prov, addr) = ptr.into_raw_parts(); write!(f, "{:#x}", addr.bytes())?;
348 if f.alternate() {
349 write!(f, "{prov:#?}")?;
350 } else {
351 write!(f, "{prov:?}")?;
352 }
353 Ok(())
354 }
355}
356
357impl fmt::Debug for ProvenanceExtra {
358 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
359 match self {
360 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
361 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
362 }
363 }
364}
365
366impl ProvenanceExtra {
367 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
368 match self {
369 ProvenanceExtra::Concrete(pid) => f(pid),
370 ProvenanceExtra::Wildcard => None,
371 }
372 }
373}
374
375#[derive(Debug)]
377pub struct AllocExtra<'tcx> {
378 pub borrow_tracker: Option<borrow_tracker::AllocState>,
380 pub data_race: AllocDataRaceHandler,
384 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
389 pub sync_objs: BTreeMap<Size, Box<dyn SyncObj>>,
394}
395
396impl<'tcx> Clone for AllocExtra<'tcx> {
399 fn clone(&self) -> Self {
400 panic!("our allocations should never be cloned");
401 }
402}
403
404impl VisitProvenance for AllocExtra<'_> {
405 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
406 let AllocExtra { borrow_tracker, data_race, backtrace: _, sync_objs: _ } = self;
407
408 borrow_tracker.visit_provenance(visit);
409 data_race.visit_provenance(visit);
410 }
411}
412
413pub struct PrimitiveLayouts<'tcx> {
415 pub unit: TyAndLayout<'tcx>,
416 pub i8: TyAndLayout<'tcx>,
417 pub i16: TyAndLayout<'tcx>,
418 pub i32: TyAndLayout<'tcx>,
419 pub i64: TyAndLayout<'tcx>,
420 pub i128: TyAndLayout<'tcx>,
421 pub isize: TyAndLayout<'tcx>,
422 pub u8: TyAndLayout<'tcx>,
423 pub u16: TyAndLayout<'tcx>,
424 pub u32: TyAndLayout<'tcx>,
425 pub u64: TyAndLayout<'tcx>,
426 pub u128: TyAndLayout<'tcx>,
427 pub usize: TyAndLayout<'tcx>,
428 pub bool: TyAndLayout<'tcx>,
429 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
432
433impl<'tcx> PrimitiveLayouts<'tcx> {
434 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
435 let tcx = layout_cx.tcx();
436 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
437 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
438 Ok(Self {
439 unit: layout_cx.layout_of(tcx.types.unit)?,
440 i8: layout_cx.layout_of(tcx.types.i8)?,
441 i16: layout_cx.layout_of(tcx.types.i16)?,
442 i32: layout_cx.layout_of(tcx.types.i32)?,
443 i64: layout_cx.layout_of(tcx.types.i64)?,
444 i128: layout_cx.layout_of(tcx.types.i128)?,
445 isize: layout_cx.layout_of(tcx.types.isize)?,
446 u8: layout_cx.layout_of(tcx.types.u8)?,
447 u16: layout_cx.layout_of(tcx.types.u16)?,
448 u32: layout_cx.layout_of(tcx.types.u32)?,
449 u64: layout_cx.layout_of(tcx.types.u64)?,
450 u128: layout_cx.layout_of(tcx.types.u128)?,
451 usize: layout_cx.layout_of(tcx.types.usize)?,
452 bool: layout_cx.layout_of(tcx.types.bool)?,
453 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
454 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
455 })
456 }
457
458 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
459 match size.bits() {
460 8 => Some(self.u8),
461 16 => Some(self.u16),
462 32 => Some(self.u32),
463 64 => Some(self.u64),
464 128 => Some(self.u128),
465 _ => None,
466 }
467 }
468
469 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
470 match size.bits() {
471 8 => Some(self.i8),
472 16 => Some(self.i16),
473 32 => Some(self.i32),
474 64 => Some(self.i64),
475 128 => Some(self.i128),
476 _ => None,
477 }
478 }
479}
480
481pub struct MiriMachine<'tcx> {
486 pub tcx: TyCtxt<'tcx>,
488
489 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
491
492 pub data_race: GlobalDataRaceHandler,
498
499 pub alloc_addresses: alloc_addresses::GlobalState,
501
502 pub(crate) env_vars: EnvVars<'tcx>,
504
505 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
507
508 pub(crate) argc: Option<Pointer>,
512 pub(crate) argv: Option<Pointer>,
513 pub(crate) cmd_line: Option<Pointer>,
514
515 pub(crate) tls: TlsData<'tcx>,
517
518 pub(crate) isolated_op: IsolatedOp,
522
523 pub(crate) validation: ValidationMode,
525
526 pub(crate) fds: shims::FdTable,
528 pub(crate) dirs: shims::DirTable,
530
531 pub(crate) epoll_interests: shims::EpollInterestTable,
533
534 pub(crate) monotonic_clock: MonotonicClock,
536
537 pub(crate) threads: ThreadManager<'tcx>,
539
540 pub(crate) blocking_io: BlockingIoManager,
542
543 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
547
548 pub(crate) layouts: PrimitiveLayouts<'tcx>,
550
551 pub(crate) static_roots: Vec<AllocId>,
553
554 profiler: Option<measureme::Profiler>,
557 string_cache: FxHashMap<String, measureme::StringId>,
560
561 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
564
565 pub(crate) backtrace_style: BacktraceStyle,
567
568 pub(crate) user_relevant_crates: Vec<CrateNum>,
570
571 pub(crate) extern_statics: FxHashMap<Symbol, StrictPointer>,
573
574 pub(crate) rng: RefCell<StdRng>,
577
578 pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>,
580
581 pub(crate) tracked_alloc_ids: FxHashSet<AllocId>,
584 track_alloc_accesses: bool,
586
587 pub(crate) check_alignment: AlignmentCheck,
589
590 pub(crate) cmpxchg_weak_failure_rate: f64,
592
593 pub(crate) preemption_rate: f64,
595
596 pub(crate) report_progress: Option<u32>,
598 pub(crate) basic_block_count: u64,
600
601 #[cfg(all(feature = "native-lib", unix))]
603 pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>,
604 #[cfg(not(all(feature = "native-lib", unix)))]
605 pub native_lib: Vec<!>,
606 #[cfg(all(feature = "native-lib", unix))]
608 pub native_lib_ecx_interchange: &'static Cell<usize>,
609
610 pub(crate) gc_interval: u32,
612 pub(crate) since_gc: u32,
614
615 pub(crate) num_cpus: u32,
617
618 pub(crate) page_size: u64,
620 pub(crate) stack_addr: u64,
621 pub(crate) stack_size: u64,
622
623 pub(crate) collect_leak_backtraces: bool,
625
626 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
629
630 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
637
638 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
640
641 pub(crate) pthread_mutex_sanity: Cell<bool>,
643 pub(crate) pthread_rwlock_sanity: Cell<bool>,
644 pub(crate) pthread_condvar_sanity: Cell<bool>,
645
646 pub(crate) allocator_shim_symbols: FxHashMap<Symbol, Either<Symbol, SpecialAllocatorMethod>>,
650 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
652
653 pub force_intrinsic_fallback: bool,
655
656 pub float_nondet: bool,
658 pub float_rounding_error: FloatRoundingErrorMode,
660
661 pub short_fd_operations: bool,
663}
664
665impl<'tcx> MiriMachine<'tcx> {
666 pub(crate) fn new(
670 config: &MiriConfig,
671 layout_cx: LayoutCx<'tcx>,
672 genmc_ctx: Option<Rc<GenmcCtx>>,
673 ) -> Self {
674 let tcx = layout_cx.tcx();
675 let user_relevant_crates = Self::get_user_relevant_crates(tcx, config);
676 let layouts =
677 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
678 let profiler = config.measureme_out.as_ref().map(|out| {
679 let crate_name =
680 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
681 let pid = process::id();
682 let filename = format!("{crate_name}-{pid:07}");
687 let path = Path::new(out).join(filename);
688 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
689 });
690 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
691 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
692 let data_race = if config.genmc_config.is_some() {
693 GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap())
695 } else if config.data_race_detector {
696 GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config)))
697 } else {
698 GlobalDataRaceHandler::None
699 };
700 let page_size = if let Some(page_size) = config.page_size {
704 page_size
705 } else {
706 let target = &tcx.sess.target;
707 match target.arch {
708 Arch::Wasm32 | Arch::Wasm64 => 64 * 1024, Arch::AArch64 => {
710 if target.is_like_darwin {
711 16 * 1024
715 } else {
716 4 * 1024
717 }
718 }
719 _ => 4 * 1024,
720 }
721 };
722 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
724 let stack_size =
725 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
726 assert!(
727 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
728 "miri only supports up to {} CPUs, but {} were configured",
729 cpu_affinity::MAX_CPUS,
730 config.num_cpus
731 );
732 let threads = ThreadManager::new(config);
733 let mut thread_cpu_affinity = FxHashMap::default();
734 if matches!(&tcx.sess.target.os, Os::Linux | Os::FreeBsd | Os::Android) {
735 thread_cpu_affinity
736 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
737 }
738 let blocking_io = BlockingIoManager::new(config.isolated_op == IsolatedOp::Allow)
739 .expect("Couldn't create poll instance");
740 let alloc_addresses =
741 RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr, tcx));
742 MiriMachine {
743 tcx,
744 borrow_tracker,
745 data_race,
746 alloc_addresses,
747 env_vars: EnvVars::default(),
749 main_fn_ret_place: None,
750 argc: None,
751 argv: None,
752 cmd_line: None,
753 tls: TlsData::default(),
754 isolated_op: config.isolated_op,
755 validation: config.validation,
756 fds: shims::FdTable::init(config.mute_stdout_stderr),
757 epoll_interests: shims::EpollInterestTable::new(),
758 dirs: Default::default(),
759 layouts,
760 threads,
761 thread_cpu_affinity,
762 blocking_io,
763 static_roots: Vec::new(),
764 profiler,
765 string_cache: Default::default(),
766 exported_symbols_cache: FxHashMap::default(),
767 backtrace_style: config.backtrace_style,
768 user_relevant_crates,
769 extern_statics: FxHashMap::default(),
770 rng: RefCell::new(rng),
771 allocator: (!config.native_lib.is_empty())
772 .then(|| Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new()))),
773 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
774 track_alloc_accesses: config.track_alloc_accesses,
775 check_alignment: config.check_alignment,
776 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
777 preemption_rate: config.preemption_rate,
778 report_progress: config.report_progress,
779 basic_block_count: 0,
780 monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow),
781 #[cfg(all(feature = "native-lib", unix))]
782 native_lib: config.native_lib.iter().map(|lib_file_path| {
783 let host_triple = rustc_session::config::host_tuple();
784 let target_triple = tcx.sess.opts.target_triple.tuple();
785 if host_triple != target_triple {
787 panic!(
788 "calling native C functions in linked .so file requires host and target to be the same: \
789 host={host_triple}, target={target_triple}",
790 );
791 }
792 (
796 unsafe {
797 libloading::Library::new(lib_file_path)
798 .expect("failed to read specified extern shared object file")
799 },
800 lib_file_path.clone(),
801 )
802 }).collect(),
803 #[cfg(all(feature = "native-lib", unix))]
804 native_lib_ecx_interchange: Box::leak(Box::new(Cell::new(0))),
805 #[cfg(not(all(feature = "native-lib", unix)))]
806 native_lib: config.native_lib.iter().map(|_| {
807 panic!("calling functions from native libraries via FFI is not supported in this build of Miri")
808 }).collect(),
809 gc_interval: config.gc_interval,
810 since_gc: 0,
811 num_cpus: config.num_cpus,
812 page_size,
813 stack_addr,
814 stack_size,
815 collect_leak_backtraces: config.collect_leak_backtraces,
816 allocation_spans: RefCell::new(FxHashMap::default()),
817 symbolic_alignment: RefCell::new(FxHashMap::default()),
818 union_data_ranges: FxHashMap::default(),
819 pthread_mutex_sanity: Cell::new(false),
820 pthread_rwlock_sanity: Cell::new(false),
821 pthread_condvar_sanity: Cell::new(false),
822 allocator_shim_symbols: Self::allocator_shim_symbols(tcx),
823 mangle_internal_symbol_cache: Default::default(),
824 force_intrinsic_fallback: config.force_intrinsic_fallback,
825 float_nondet: config.float_nondet,
826 float_rounding_error: config.float_rounding_error,
827 short_fd_operations: config.short_fd_operations,
828 }
829 }
830
831 fn allocator_shim_symbols(
832 tcx: TyCtxt<'tcx>,
833 ) -> FxHashMap<Symbol, Either<Symbol, SpecialAllocatorMethod>> {
834 use rustc_codegen_ssa::base::allocator_shim_contents;
835
836 let Some(kind) = tcx.allocator_kind(()) else {
839 return Default::default();
840 };
841 let methods = allocator_shim_contents(tcx, kind);
842 let mut symbols = FxHashMap::default();
843 for method in methods {
844 let from_name = Symbol::intern(&mangle_internal_symbol(
845 tcx,
846 &allocator::global_fn_name(method.name),
847 ));
848 let to = match method.special {
849 Some(special) => Either::Right(special),
850 None =>
851 Either::Left(Symbol::intern(&mangle_internal_symbol(
852 tcx,
853 &allocator::default_fn_name(method.name),
854 ))),
855 };
856 symbols.try_insert(from_name, to).unwrap();
857 }
858 symbols
859 }
860
861 fn get_user_relevant_crates(tcx: TyCtxt<'_>, config: &MiriConfig) -> Vec<CrateNum> {
864 let local_crate_names = std::env::var("MIRI_LOCAL_CRATES")
867 .map(|crates| crates.split(',').map(|krate| krate.to_string()).collect::<Vec<_>>())
868 .unwrap_or_default();
869 let mut local_crates = Vec::new();
870 for &crate_num in tcx.crates(()) {
871 let name = tcx.crate_name(crate_num);
872 let name = name.as_str();
873 if local_crate_names
874 .iter()
875 .chain(&config.user_relevant_crates)
876 .any(|local_name| local_name == name)
877 {
878 local_crates.push(crate_num);
879 }
880 }
881 local_crates
882 }
883
884 pub(crate) fn late_init(
885 ecx: &mut MiriInterpCx<'tcx>,
886 config: &MiriConfig,
887 on_main_stack_empty: StackEmptyCallback<'tcx>,
888 ) -> InterpResult<'tcx> {
889 EnvVars::init(ecx, config)?;
890 MiriMachine::init_extern_statics(ecx)?;
891 ThreadManager::init(ecx, on_main_stack_empty);
892 interp_ok(())
893 }
894
895 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
896 let ptr = ptr.into_pointer_or_addr().unwrap();
898 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
899 }
900
901 pub(crate) fn communicate(&self) -> bool {
902 self.isolated_op == IsolatedOp::Allow
903 }
904
905 pub(crate) fn is_local(&self, instance: ty::Instance<'tcx>) -> bool {
907 let def_id = instance.def_id();
908 def_id.is_local() || self.user_relevant_crates.contains(&def_id.krate)
909 }
910
911 pub(crate) fn handle_abnormal_termination(&mut self) {
913 drop(self.profiler.take());
918 }
919
920 pub(crate) fn page_align(&self) -> Align {
921 Align::from_bytes(self.page_size).unwrap()
922 }
923
924 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
925 self.allocation_spans
926 .borrow()
927 .get(&alloc_id)
928 .map(|(allocated, _deallocated)| allocated.data())
929 }
930
931 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
932 self.allocation_spans
933 .borrow()
934 .get(&alloc_id)
935 .and_then(|(_allocated, deallocated)| *deallocated)
936 .map(Span::data)
937 }
938
939 fn init_allocation(
940 ecx: &MiriInterpCx<'tcx>,
941 id: AllocId,
942 kind: MemoryKind,
943 size: Size,
944 align: Align,
945 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
946 if ecx.machine.tracked_alloc_ids.contains(&id) {
947 ecx.emit_diagnostic(NonHaltingDiagnostic::TrackingAlloc(id, size, align));
948 }
949
950 let borrow_tracker = ecx
951 .machine
952 .borrow_tracker
953 .as_ref()
954 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
955
956 let data_race = match &ecx.machine.data_race {
957 GlobalDataRaceHandler::None => AllocDataRaceHandler::None,
958 GlobalDataRaceHandler::Vclocks(data_race) =>
959 AllocDataRaceHandler::Vclocks(
960 data_race::AllocState::new_allocation(
961 data_race,
962 &ecx.machine.threads,
963 size,
964 kind,
965 ecx.machine.current_user_relevant_span(),
966 ),
967 data_race.weak_memory.then(weak_memory::AllocState::new_allocation),
968 ),
969 GlobalDataRaceHandler::Genmc(_genmc_ctx) => {
970 AllocDataRaceHandler::Genmc
973 }
974 };
975
976 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
980 None
981 } else {
982 Some(ecx.generate_stacktrace())
983 };
984
985 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
986 ecx.machine
987 .allocation_spans
988 .borrow_mut()
989 .insert(id, (ecx.machine.current_user_relevant_span(), None));
990 }
991
992 interp_ok(AllocExtra {
993 borrow_tracker,
994 data_race,
995 backtrace,
996 sync_objs: BTreeMap::default(),
997 })
998 }
999}
1000
1001impl VisitProvenance for MiriMachine<'_> {
1002 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
1003 #[rustfmt::skip]
1004 let MiriMachine {
1005 threads,
1006 thread_cpu_affinity: _,
1007 tls,
1008 env_vars,
1009 main_fn_ret_place,
1010 argc,
1011 argv,
1012 cmd_line,
1013 extern_statics,
1014 dirs,
1015 borrow_tracker,
1016 data_race,
1017 alloc_addresses,
1018 fds,
1019 blocking_io:_,
1020 epoll_interests:_,
1021 tcx: _,
1022 isolated_op: _,
1023 validation: _,
1024 monotonic_clock: _,
1025 layouts: _,
1026 static_roots: _,
1027 profiler: _,
1028 string_cache: _,
1029 exported_symbols_cache: _,
1030 backtrace_style: _,
1031 user_relevant_crates: _,
1032 rng: _,
1033 allocator: _,
1034 tracked_alloc_ids: _,
1035 track_alloc_accesses: _,
1036 check_alignment: _,
1037 cmpxchg_weak_failure_rate: _,
1038 preemption_rate: _,
1039 report_progress: _,
1040 basic_block_count: _,
1041 native_lib: _,
1042 #[cfg(all(feature = "native-lib", unix))]
1043 native_lib_ecx_interchange: _,
1044 gc_interval: _,
1045 since_gc: _,
1046 num_cpus: _,
1047 page_size: _,
1048 stack_addr: _,
1049 stack_size: _,
1050 collect_leak_backtraces: _,
1051 allocation_spans: _,
1052 symbolic_alignment: _,
1053 union_data_ranges: _,
1054 pthread_mutex_sanity: _,
1055 pthread_rwlock_sanity: _,
1056 pthread_condvar_sanity: _,
1057 allocator_shim_symbols: _,
1058 mangle_internal_symbol_cache: _,
1059 force_intrinsic_fallback: _,
1060 float_nondet: _,
1061 float_rounding_error: _,
1062 short_fd_operations: _,
1063 } = self;
1064
1065 threads.visit_provenance(visit);
1066 tls.visit_provenance(visit);
1067 env_vars.visit_provenance(visit);
1068 dirs.visit_provenance(visit);
1069 fds.visit_provenance(visit);
1070 data_race.visit_provenance(visit);
1071 borrow_tracker.visit_provenance(visit);
1072 alloc_addresses.visit_provenance(visit);
1073 main_fn_ret_place.visit_provenance(visit);
1074 argc.visit_provenance(visit);
1075 argv.visit_provenance(visit);
1076 cmd_line.visit_provenance(visit);
1077 for ptr in extern_statics.values() {
1078 ptr.visit_provenance(visit);
1079 }
1080 }
1081}
1082
1083pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
1085
1086pub trait MiriInterpCxExt<'tcx> {
1088 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
1089 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
1090}
1091impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
1092 #[inline(always)]
1093 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
1094 self
1095 }
1096 #[inline(always)]
1097 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
1098 self
1099 }
1100}
1101
1102impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
1104 type MemoryKind = MiriMemoryKind;
1105 type ExtraFnVal = DynSym;
1106
1107 type FrameExtra = FrameExtra<'tcx>;
1108 type AllocExtra = AllocExtra<'tcx>;
1109
1110 type Provenance = Provenance;
1111 type ProvenanceExtra = ProvenanceExtra;
1112 type Bytes = MiriAllocBytes;
1113
1114 type MemoryMap =
1115 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
1116
1117 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
1118
1119 const PANIC_ON_ALLOC_FAIL: bool = false;
1120
1121 #[inline(always)]
1122 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
1123 ecx.machine.check_alignment != AlignmentCheck::None
1124 }
1125
1126 #[inline(always)]
1127 fn alignment_check(
1128 ecx: &MiriInterpCx<'tcx>,
1129 alloc_id: AllocId,
1130 alloc_align: Align,
1131 alloc_kind: AllocKind,
1132 offset: Size,
1133 align: Align,
1134 ) -> Option<Misalignment> {
1135 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1136 return None;
1138 }
1139 if alloc_kind != AllocKind::LiveData {
1140 return None;
1142 }
1143 let (promised_offset, promised_align) = ecx
1145 .machine
1146 .symbolic_alignment
1147 .borrow()
1148 .get(&alloc_id)
1149 .copied()
1150 .unwrap_or((Size::ZERO, alloc_align));
1151 if promised_align < align {
1152 Some(Misalignment { has: promised_align, required: align })
1154 } else {
1155 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1157 if distance.is_multiple_of(align.bytes()) {
1159 None
1161 } else {
1162 let distance_pow2 = 1 << distance.trailing_zeros();
1164 Some(Misalignment {
1165 has: Align::from_bytes(distance_pow2).unwrap(),
1166 required: align,
1167 })
1168 }
1169 }
1170 }
1171
1172 #[inline(always)]
1173 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1174 ecx.machine.validation != ValidationMode::No
1175 }
1176 #[inline(always)]
1177 fn enforce_validity_recursively(
1178 ecx: &InterpCx<'tcx, Self>,
1179 _layout: TyAndLayout<'tcx>,
1180 ) -> bool {
1181 ecx.machine.validation == ValidationMode::Deep
1182 }
1183
1184 #[inline(always)]
1185 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1186 !ecx.tcx.sess.overflow_checks()
1187 }
1188
1189 fn check_fn_target_features(
1190 ecx: &MiriInterpCx<'tcx>,
1191 instance: ty::Instance<'tcx>,
1192 ) -> InterpResult<'tcx> {
1193 let attrs = ecx.tcx.codegen_instance_attrs(instance.def);
1194 if attrs
1195 .target_features
1196 .iter()
1197 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1198 {
1199 let unavailable = attrs
1200 .target_features
1201 .iter()
1202 .filter(|&feature| {
1203 feature.kind != TargetFeatureKind::Implied
1204 && !ecx.tcx.sess.target_features.contains(&feature.name)
1205 })
1206 .fold(String::new(), |mut s, feature| {
1207 if !s.is_empty() {
1208 s.push_str(", ");
1209 }
1210 s.push_str(feature.name.as_str());
1211 s
1212 });
1213 let msg = format!(
1214 "calling a function that requires unavailable target features: {unavailable}"
1215 );
1216 if ecx.tcx.sess.target.is_like_wasm {
1219 throw_machine_stop!(TerminationInfo::Abort(msg));
1220 } else {
1221 throw_ub_format!("{msg}");
1222 }
1223 }
1224 interp_ok(())
1225 }
1226
1227 #[inline(always)]
1228 fn find_mir_or_eval_fn(
1229 ecx: &mut MiriInterpCx<'tcx>,
1230 instance: ty::Instance<'tcx>,
1231 abi: &FnAbi<'tcx, Ty<'tcx>>,
1232 args: &[FnArg<'tcx>],
1233 dest: &PlaceTy<'tcx>,
1234 ret: Option<mir::BasicBlock>,
1235 unwind: mir::UnwindAction,
1236 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1237 if ecx.tcx.is_foreign_item(instance.def_id()) {
1239 let _trace = enter_trace_span!("emulate_foreign_item");
1240 let args = MiriInterpCx::copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1248 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1249 }
1250
1251 if ecx.machine.data_race.as_genmc_ref().is_some()
1252 && ecx.genmc_intercept_function(instance, args, dest)?
1253 {
1254 ecx.return_to_block(ret)?;
1255 return interp_ok(None);
1256 }
1257
1258 let _trace = enter_trace_span!("load_mir");
1260 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1261 }
1262
1263 #[inline(always)]
1264 fn call_extra_fn(
1265 ecx: &mut MiriInterpCx<'tcx>,
1266 fn_val: DynSym,
1267 abi: &FnAbi<'tcx, Ty<'tcx>>,
1268 args: &[FnArg<'tcx>],
1269 dest: &PlaceTy<'tcx>,
1270 ret: Option<mir::BasicBlock>,
1271 unwind: mir::UnwindAction,
1272 ) -> InterpResult<'tcx> {
1273 let args = MiriInterpCx::copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1275 }
1276
1277 #[inline(always)]
1278 fn call_intrinsic(
1279 ecx: &mut MiriInterpCx<'tcx>,
1280 instance: ty::Instance<'tcx>,
1281 args: &[OpTy<'tcx>],
1282 dest: &PlaceTy<'tcx>,
1283 ret: Option<mir::BasicBlock>,
1284 unwind: mir::UnwindAction,
1285 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1286 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1287 }
1288
1289 #[inline(always)]
1290 fn assert_panic(
1291 ecx: &mut MiriInterpCx<'tcx>,
1292 msg: &mir::AssertMessage<'tcx>,
1293 unwind: mir::UnwindAction,
1294 ) -> InterpResult<'tcx> {
1295 ecx.assert_panic(msg, unwind)
1296 }
1297
1298 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1299 ecx.start_panic_nounwind(msg)
1300 }
1301
1302 fn unwind_terminate(
1303 ecx: &mut InterpCx<'tcx, Self>,
1304 reason: mir::UnwindTerminateReason,
1305 ) -> InterpResult<'tcx> {
1306 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1308 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1309 ecx.call_function(
1310 panic,
1311 ExternAbi::Rust,
1312 &[],
1313 None,
1314 ReturnContinuation::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1315 )?;
1316 interp_ok(())
1317 }
1318
1319 #[inline(always)]
1320 fn binary_ptr_op(
1321 ecx: &MiriInterpCx<'tcx>,
1322 bin_op: mir::BinOp,
1323 left: &ImmTy<'tcx>,
1324 right: &ImmTy<'tcx>,
1325 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1326 ecx.binary_ptr_op(bin_op, left, right)
1327 }
1328
1329 #[inline(always)]
1330 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1331 ecx: &InterpCx<'tcx, Self>,
1332 inputs: &[F1],
1333 ) -> F2 {
1334 ecx.generate_nan(inputs)
1335 }
1336
1337 #[inline(always)]
1338 fn apply_float_nondet(
1339 ecx: &mut InterpCx<'tcx, Self>,
1340 val: ImmTy<'tcx>,
1341 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1342 crate::math::apply_random_float_error_to_imm(ecx, val, 4)
1343 }
1344
1345 #[inline(always)]
1346 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1347 ecx.equal_float_min_max(a, b)
1348 }
1349
1350 #[inline(always)]
1351 fn float_fuse_mul_add(ecx: &InterpCx<'tcx, Self>) -> bool {
1352 ecx.machine.float_nondet && ecx.machine.rng.borrow_mut().random()
1353 }
1354
1355 #[inline(always)]
1356 fn runtime_checks(
1357 ecx: &InterpCx<'tcx, Self>,
1358 r: mir::RuntimeChecks,
1359 ) -> InterpResult<'tcx, bool> {
1360 interp_ok(r.value(ecx.tcx.sess))
1361 }
1362
1363 #[inline(always)]
1364 fn thread_local_static_pointer(
1365 ecx: &mut MiriInterpCx<'tcx>,
1366 def_id: DefId,
1367 ) -> InterpResult<'tcx, StrictPointer> {
1368 ecx.get_or_create_thread_local_alloc(def_id)
1369 }
1370
1371 fn extern_static_pointer(
1372 ecx: &MiriInterpCx<'tcx>,
1373 def_id: DefId,
1374 ) -> InterpResult<'tcx, StrictPointer> {
1375 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1376 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1377 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1381 panic!("extern_statics cannot contain wildcards")
1382 };
1383 let info = ecx.get_alloc_info(alloc_id);
1384 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1385 let extern_decl_layout =
1386 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1387 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1388 throw_unsup_format!(
1389 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1390 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1391 but Miri emulates it via an extern static shim \
1392 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1393 name = ecx.tcx.def_path_str(def_id),
1394 krate = ecx.tcx.crate_name(def_id.krate),
1395 decl_size = extern_decl_layout.size.bytes(),
1396 decl_align = extern_decl_layout.align.bytes(),
1397 shim_size = info.size.bytes(),
1398 shim_align = info.align.bytes(),
1399 )
1400 }
1401 interp_ok(ptr)
1402 } else {
1403 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1404 }
1405 }
1406
1407 fn init_local_allocation(
1408 ecx: &MiriInterpCx<'tcx>,
1409 id: AllocId,
1410 kind: MemoryKind,
1411 size: Size,
1412 align: Align,
1413 ) -> InterpResult<'tcx, Self::AllocExtra> {
1414 assert!(kind != MiriMemoryKind::Global.into());
1415 MiriMachine::init_allocation(ecx, id, kind, size, align)
1416 }
1417
1418 fn adjust_alloc_root_pointer(
1419 ecx: &MiriInterpCx<'tcx>,
1420 ptr: interpret::Pointer<CtfeProvenance>,
1421 kind: Option<MemoryKind>,
1422 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1423 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1424 let alloc_id = ptr.provenance.alloc_id();
1425 if cfg!(debug_assertions) {
1426 match ecx.tcx.try_get_global_alloc(alloc_id) {
1428 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1429 panic!("adjust_alloc_root_pointer called on thread-local static")
1430 }
1431 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1432 panic!("adjust_alloc_root_pointer called on extern static")
1433 }
1434 _ => {}
1435 }
1436 }
1437 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1439 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1440 } else {
1441 BorTag::default()
1443 };
1444 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1445 }
1446
1447 #[inline(always)]
1449 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1450 ecx.ptr_from_addr_cast(addr)
1451 }
1452
1453 #[inline(always)]
1457 fn expose_provenance(
1458 ecx: &InterpCx<'tcx, Self>,
1459 provenance: Self::Provenance,
1460 ) -> InterpResult<'tcx> {
1461 ecx.expose_provenance(provenance)
1462 }
1463
1464 fn ptr_get_alloc(
1476 ecx: &MiriInterpCx<'tcx>,
1477 ptr: StrictPointer,
1478 size: i64,
1479 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1480 let rel = ecx.ptr_get_alloc(ptr, size);
1481
1482 rel.map(|(alloc_id, size)| {
1483 let tag = match ptr.provenance {
1484 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1485 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1486 };
1487 (alloc_id, size, tag)
1488 })
1489 }
1490
1491 fn adjust_global_allocation<'b>(
1500 ecx: &InterpCx<'tcx, Self>,
1501 id: AllocId,
1502 alloc: &'b Allocation,
1503 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1504 {
1505 let alloc = alloc.adjust_from_tcx(
1506 &ecx.tcx,
1507 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1508 |ptr| ecx.global_root_pointer(ptr),
1509 )?;
1510 let kind = MiriMemoryKind::Global.into();
1511 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1512 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1513 }
1514
1515 #[inline(always)]
1516 fn before_memory_read(
1517 _tcx: TyCtxtAt<'tcx>,
1518 machine: &Self,
1519 alloc_extra: &AllocExtra<'tcx>,
1520 ptr: Pointer,
1521 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1522 range: AllocRange,
1523 ) -> InterpResult<'tcx> {
1524 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1525 machine.emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(
1526 alloc_id,
1527 range,
1528 borrow_tracker::AccessKind::Read,
1529 ));
1530 }
1531 match &machine.data_race {
1533 GlobalDataRaceHandler::None => {}
1534 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1535 genmc_ctx.memory_load(machine, ptr.addr(), range.size)?,
1536 GlobalDataRaceHandler::Vclocks(_data_race) => {
1537 let _trace = enter_trace_span!(data_race::before_memory_read);
1538 let AllocDataRaceHandler::Vclocks(data_race, _weak_memory) = &alloc_extra.data_race
1539 else {
1540 unreachable!();
1541 };
1542 data_race.read_non_atomic(alloc_id, range, NaReadType::Read, None, machine)?;
1543 }
1544 }
1545 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1546 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1547 }
1548 for (_offset, obj) in alloc_extra.sync_objs.range(range.start..range.end()) {
1550 obj.on_access(concurrency::sync::AccessKind::Read)?;
1551 }
1552
1553 interp_ok(())
1554 }
1555
1556 #[inline(always)]
1557 fn before_memory_write(
1558 _tcx: TyCtxtAt<'tcx>,
1559 machine: &mut Self,
1560 alloc_extra: &mut AllocExtra<'tcx>,
1561 ptr: Pointer,
1562 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1563 range: AllocRange,
1564 ) -> InterpResult<'tcx> {
1565 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1566 machine.emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(
1567 alloc_id,
1568 range,
1569 borrow_tracker::AccessKind::Write,
1570 ));
1571 }
1572 match &machine.data_race {
1573 GlobalDataRaceHandler::None => {}
1574 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1575 genmc_ctx.memory_store(machine, ptr.addr(), range.size)?,
1576 GlobalDataRaceHandler::Vclocks(_global_state) => {
1577 let _trace = enter_trace_span!(data_race::before_memory_write);
1578 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) =
1579 &mut alloc_extra.data_race
1580 else {
1581 unreachable!()
1582 };
1583 data_race.write_non_atomic(alloc_id, range, NaWriteType::Write, None, machine)?;
1584 if let Some(weak_memory) = weak_memory {
1585 weak_memory
1586 .non_atomic_write(range, machine.data_race.as_vclocks_ref().unwrap());
1587 }
1588 }
1589 }
1590 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1591 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1592 }
1593 if !alloc_extra.sync_objs.is_empty() {
1596 let mut to_delete = vec![];
1597 for (offset, obj) in alloc_extra.sync_objs.range(range.start..range.end()) {
1598 obj.on_access(concurrency::sync::AccessKind::Write)?;
1599 if obj.delete_on_write() {
1600 to_delete.push(*offset);
1601 }
1602 }
1603 for offset in to_delete {
1604 alloc_extra.sync_objs.remove(&offset);
1605 }
1606 }
1607 interp_ok(())
1608 }
1609
1610 #[inline(always)]
1611 fn before_memory_deallocation(
1612 _tcx: TyCtxtAt<'tcx>,
1613 machine: &mut Self,
1614 alloc_extra: &mut AllocExtra<'tcx>,
1615 ptr: Pointer,
1616 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1617 size: Size,
1618 align: Align,
1619 kind: MemoryKind,
1620 ) -> InterpResult<'tcx> {
1621 if machine.tracked_alloc_ids.contains(&alloc_id) {
1622 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1623 }
1624 match &machine.data_race {
1625 GlobalDataRaceHandler::None => {}
1626 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1627 genmc_ctx.handle_dealloc(machine, alloc_id, ptr.addr(), kind)?,
1628 GlobalDataRaceHandler::Vclocks(_global_state) => {
1629 let _trace = enter_trace_span!(data_race::before_memory_deallocation);
1630 let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap();
1631 data_race.write_non_atomic(
1632 alloc_id,
1633 alloc_range(Size::ZERO, size),
1634 NaWriteType::Deallocate,
1635 None,
1636 machine,
1637 )?;
1638 }
1639 }
1640 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1641 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1642 }
1643 for obj in alloc_extra.sync_objs.values() {
1645 obj.on_access(concurrency::sync::AccessKind::Dealloc)?;
1646 }
1647
1648 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1649 {
1650 *deallocated_at = Some(machine.current_user_relevant_span());
1651 }
1652 machine.free_alloc_id(alloc_id, size, align, kind);
1653 interp_ok(())
1654 }
1655
1656 #[inline(always)]
1657 fn retag_ptr_value(
1658 ecx: &mut InterpCx<'tcx, Self>,
1659 kind: mir::RetagKind,
1660 val: &ImmTy<'tcx>,
1661 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1662 if ecx.machine.borrow_tracker.is_some() {
1663 ecx.retag_ptr_value(kind, val)
1664 } else {
1665 interp_ok(val.clone())
1666 }
1667 }
1668
1669 #[inline(always)]
1670 fn retag_place_contents(
1671 ecx: &mut InterpCx<'tcx, Self>,
1672 kind: mir::RetagKind,
1673 place: &PlaceTy<'tcx>,
1674 ) -> InterpResult<'tcx> {
1675 if ecx.machine.borrow_tracker.is_some() {
1676 ecx.retag_place_contents(kind, place)?;
1677 }
1678 interp_ok(())
1679 }
1680
1681 fn protect_in_place_function_argument(
1682 ecx: &mut InterpCx<'tcx, Self>,
1683 place: &MPlaceTy<'tcx>,
1684 ) -> InterpResult<'tcx> {
1685 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1688 ecx.protect_place(place)?
1689 } else {
1690 place.clone()
1692 };
1693 ecx.write_uninit(&protected_place)?;
1698 interp_ok(())
1700 }
1701
1702 #[inline(always)]
1703 fn init_frame(
1704 ecx: &mut InterpCx<'tcx, Self>,
1705 frame: Frame<'tcx, Provenance>,
1706 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1707 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1709 let fn_name = frame.instance().to_string();
1710 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1711 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1712
1713 Some(profiler.start_recording_interval_event_detached(
1714 *name,
1715 measureme::EventId::from_label(*name),
1716 ecx.active_thread().to_u32(),
1717 ))
1718 } else {
1719 None
1720 };
1721
1722 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1723
1724 let extra = FrameExtra {
1725 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1726 catch_unwind: None,
1727 timing,
1728 user_relevance: ecx.machine.user_relevance(&frame),
1729 data_race: ecx
1730 .machine
1731 .data_race
1732 .as_vclocks_ref()
1733 .map(|_| data_race::FrameState::default()),
1734 };
1735
1736 interp_ok(frame.with_extra(extra))
1737 }
1738
1739 fn stack<'a>(
1740 ecx: &'a InterpCx<'tcx, Self>,
1741 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1742 ecx.active_thread_stack()
1743 }
1744
1745 fn stack_mut<'a>(
1746 ecx: &'a mut InterpCx<'tcx, Self>,
1747 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1748 ecx.active_thread_stack_mut()
1749 }
1750
1751 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1752 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1754 if let Some(report_progress) = ecx.machine.report_progress {
1756 if ecx.machine.basic_block_count.is_multiple_of(u64::from(report_progress)) {
1757 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1758 block_count: ecx.machine.basic_block_count,
1759 });
1760 }
1761 }
1762
1763 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1768 ecx.machine.since_gc = 0;
1769 ecx.run_provenance_gc();
1770 }
1771
1772 ecx.maybe_preempt_active_thread();
1775
1776 ecx.machine.monotonic_clock.tick();
1778
1779 interp_ok(())
1780 }
1781
1782 #[inline(always)]
1783 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1784 if ecx.frame().extra.user_relevance >= ecx.active_thread_ref().current_user_relevance() {
1785 let stack_len = ecx.active_thread_stack().len();
1788 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1789 }
1790 interp_ok(())
1791 }
1792
1793 fn before_stack_pop(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1794 let frame = ecx.frame();
1795 if ecx.machine.borrow_tracker.is_some() {
1798 ecx.on_stack_pop(frame)?;
1799 }
1800 if ecx
1801 .active_thread_ref()
1802 .top_user_relevant_frame()
1803 .expect("there should always be a most relevant frame for a non-empty stack")
1804 == ecx.frame_idx()
1805 {
1806 ecx.active_thread_mut().recompute_top_user_relevant_frame(1);
1812 }
1813 info!("Leaving {}", ecx.frame().instance());
1817 interp_ok(())
1818 }
1819
1820 #[inline(always)]
1821 fn after_stack_pop(
1822 ecx: &mut InterpCx<'tcx, Self>,
1823 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1824 unwinding: bool,
1825 ) -> InterpResult<'tcx, ReturnAction> {
1826 let res = {
1827 let mut frame = frame;
1829 let timing = frame.extra.timing.take();
1830 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1831 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1832 profiler.finish_recording_interval_event(timing.unwrap());
1833 }
1834 res
1835 };
1836 if !ecx.active_thread_stack().is_empty() {
1839 info!("Continuing in {}", ecx.frame().instance());
1840 }
1841 res
1842 }
1843
1844 fn after_local_read(
1845 ecx: &InterpCx<'tcx, Self>,
1846 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1847 local: mir::Local,
1848 ) -> InterpResult<'tcx> {
1849 if let Some(data_race) = &frame.extra.data_race {
1850 let _trace = enter_trace_span!(data_race::after_local_read);
1851 data_race.local_read(local, &ecx.machine);
1852 }
1853 interp_ok(())
1854 }
1855
1856 fn after_local_write(
1857 ecx: &mut InterpCx<'tcx, Self>,
1858 local: mir::Local,
1859 storage_live: bool,
1860 ) -> InterpResult<'tcx> {
1861 if let Some(data_race) = &ecx.frame().extra.data_race {
1862 let _trace = enter_trace_span!(data_race::after_local_write);
1863 data_race.local_write(local, storage_live, &ecx.machine);
1864 }
1865 interp_ok(())
1866 }
1867
1868 fn after_local_moved_to_memory(
1869 ecx: &mut InterpCx<'tcx, Self>,
1870 local: mir::Local,
1871 mplace: &MPlaceTy<'tcx>,
1872 ) -> InterpResult<'tcx> {
1873 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1874 panic!("after_local_allocated should only be called on fresh allocations");
1875 };
1876 let local_decl = &ecx.frame().body().local_decls[local];
1878 let span = local_decl.source_info.span;
1879 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1880 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1882 if let Some(data_race) =
1883 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1884 {
1885 let _trace = enter_trace_span!(data_race::after_local_moved_to_memory);
1886 data_race.local_moved_to_memory(
1887 local,
1888 alloc_info.data_race.as_vclocks_mut().unwrap(),
1889 machine,
1890 );
1891 }
1892 interp_ok(())
1893 }
1894
1895 fn get_global_alloc_salt(
1896 ecx: &InterpCx<'tcx, Self>,
1897 instance: Option<ty::Instance<'tcx>>,
1898 ) -> usize {
1899 let unique = if let Some(instance) = instance {
1900 let is_generic = instance
1913 .args
1914 .into_iter()
1915 .any(|arg| !matches!(arg.kind(), ty::GenericArgKind::Lifetime(_)));
1916 let can_be_inlined = matches!(
1917 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1918 InliningThreshold::Always
1919 ) || !matches!(
1920 ecx.tcx.codegen_instance_attrs(instance.def).inline,
1921 InlineAttr::Never
1922 );
1923 !is_generic && !can_be_inlined
1924 } else {
1925 false
1927 };
1928 if unique {
1930 CTFE_ALLOC_SALT
1931 } else {
1932 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1933 }
1934 }
1935
1936 fn cached_union_data_range<'e>(
1937 ecx: &'e mut InterpCx<'tcx, Self>,
1938 ty: Ty<'tcx>,
1939 compute_range: impl FnOnce() -> RangeSet,
1940 ) -> Cow<'e, RangeSet> {
1941 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1942 }
1943
1944 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams {
1945 use crate::alloc::MiriAllocParams;
1946
1947 match &self.allocator {
1948 Some(alloc) => MiriAllocParams::Isolated(alloc.clone()),
1949 None => MiriAllocParams::Global,
1950 }
1951 }
1952
1953 fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
1954 #[cfg(feature = "tracing")]
1955 {
1956 span().entered()
1957 }
1958 #[cfg(not(feature = "tracing"))]
1959 #[expect(clippy::unused_unit)]
1960 {
1961 let _ = span; ()
1963 }
1964 }
1965}
1966
1967pub trait MachineCallback<'tcx, T>: VisitProvenance {
1969 fn call(
1971 self: Box<Self>,
1972 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1973 arg: T,
1974 ) -> InterpResult<'tcx>;
1975}
1976
1977pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1979
1980#[macro_export]
1997macro_rules! callback {
1998 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1999 { $($name:ident: $type:ty),* $(,)? }
2000 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
2001 struct Callback<$tcx, $($lft),*> {
2002 $($name: $type,)*
2003 _phantom: std::marker::PhantomData<&$tcx ()>,
2004 }
2005
2006 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
2007 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
2008 $(
2009 self.$name.visit_provenance(_visit);
2010 )*
2011 }
2012 }
2013
2014 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
2015 fn call(
2016 self: Box<Self>,
2017 $this: &mut MiriInterpCx<$tcx>,
2018 $arg: $arg_ty
2019 ) -> InterpResult<$tcx> {
2020 #[allow(unused_variables)]
2021 let Callback { $($name,)* _phantom } = *self;
2022 $body
2023 }
2024 }
2025
2026 Box::new(Callback {
2027 $($name,)*
2028 _phantom: std::marker::PhantomData
2029 })
2030 }};
2031}