1use std::any::Any;
5use std::borrow::Cow;
6use std::cell::{Cell, RefCell};
7use std::path::Path;
8use std::rc::Rc;
9use std::{fmt, process};
10
11use rand::rngs::StdRng;
12use rand::{Rng, SeedableRng};
13use rustc_abi::{Align, ExternAbi, Size};
14use rustc_apfloat::{Float, FloatConvert};
15use rustc_ast::expand::allocator::{self, SpecialAllocatorMethod};
16use rustc_data_structures::either::Either;
17use rustc_data_structures::fx::{FxHashMap, FxHashSet};
18#[allow(unused)]
19use rustc_data_structures::static_assert_size;
20use rustc_hir::attrs::InlineAttr;
21use rustc_log::tracing;
22use rustc_middle::middle::codegen_fn_attrs::TargetFeatureKind;
23use rustc_middle::mir;
24use rustc_middle::query::TyCtxtAt;
25use rustc_middle::ty::layout::{
26 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
27};
28use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
29use rustc_session::config::InliningThreshold;
30use rustc_span::def_id::{CrateNum, DefId};
31use rustc_span::{Span, SpanData, Symbol};
32use rustc_symbol_mangling::mangle_internal_symbol;
33use rustc_target::callconv::FnAbi;
34
35use crate::alloc_addresses::EvalContextExt;
36use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
37use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
38use crate::concurrency::{
39 AllocDataRaceHandler, GenmcCtx, GenmcEvalContextExt as _, GlobalDataRaceHandler, weak_memory,
40};
41use crate::*;
42
43pub const SIGRTMIN: i32 = 34;
47
48pub const SIGRTMAX: i32 = 42;
52
53const ADDRS_PER_ANON_GLOBAL: usize = 32;
57
58#[derive(Copy, Clone, Debug, PartialEq)]
59pub enum AlignmentCheck {
60 None,
62 Symbolic,
64 Int,
66}
67
68#[derive(Copy, Clone, Debug, PartialEq)]
69pub enum RejectOpWith {
70 Abort,
72
73 NoWarning,
77
78 Warning,
80
81 WarningWithoutBacktrace,
83}
84
85#[derive(Copy, Clone, Debug, PartialEq)]
86pub enum IsolatedOp {
87 Reject(RejectOpWith),
92
93 Allow,
95}
96
97#[derive(Debug, Copy, Clone, PartialEq, Eq)]
98pub enum BacktraceStyle {
99 Short,
101 Full,
103 Off,
105}
106
107#[derive(Debug, Copy, Clone, PartialEq, Eq)]
108pub enum ValidationMode {
109 No,
111 Shallow,
113 Deep,
115}
116
117#[derive(Debug, Copy, Clone, PartialEq, Eq)]
118pub enum FloatRoundingErrorMode {
119 Random,
121 None,
123 Max,
125}
126
127pub struct FrameExtra<'tcx> {
129 pub borrow_tracker: Option<borrow_tracker::FrameState>,
131
132 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
136
137 pub timing: Option<measureme::DetachedTiming>,
141
142 pub is_user_relevant: bool,
147
148 pub data_race: Option<data_race::FrameState>,
150}
151
152impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
153 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
154 let FrameExtra { borrow_tracker, catch_unwind, timing: _, is_user_relevant, data_race } =
156 self;
157 f.debug_struct("FrameData")
158 .field("borrow_tracker", borrow_tracker)
159 .field("catch_unwind", catch_unwind)
160 .field("is_user_relevant", is_user_relevant)
161 .field("data_race", data_race)
162 .finish()
163 }
164}
165
166impl VisitProvenance for FrameExtra<'_> {
167 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
168 let FrameExtra {
169 catch_unwind,
170 borrow_tracker,
171 timing: _,
172 is_user_relevant: _,
173 data_race: _,
174 } = self;
175
176 catch_unwind.visit_provenance(visit);
177 borrow_tracker.visit_provenance(visit);
178 }
179}
180
181#[derive(Debug, Copy, Clone, PartialEq, Eq)]
183pub enum MiriMemoryKind {
184 Rust,
186 Miri,
188 C,
190 WinHeap,
192 WinLocal,
194 Machine,
197 Runtime,
200 Global,
203 ExternStatic,
206 Tls,
209 Mmap,
211}
212
213impl From<MiriMemoryKind> for MemoryKind {
214 #[inline(always)]
215 fn from(kind: MiriMemoryKind) -> MemoryKind {
216 MemoryKind::Machine(kind)
217 }
218}
219
220impl MayLeak for MiriMemoryKind {
221 #[inline(always)]
222 fn may_leak(self) -> bool {
223 use self::MiriMemoryKind::*;
224 match self {
225 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
226 Machine | Global | ExternStatic | Tls | Mmap => true,
227 }
228 }
229}
230
231impl MiriMemoryKind {
232 fn should_save_allocation_span(self) -> bool {
234 use self::MiriMemoryKind::*;
235 match self {
236 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
238 Machine | Global | ExternStatic | Tls | Runtime => false,
240 }
241 }
242}
243
244impl fmt::Display for MiriMemoryKind {
245 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
246 use self::MiriMemoryKind::*;
247 match self {
248 Rust => write!(f, "Rust heap"),
249 Miri => write!(f, "Miri bare-metal heap"),
250 C => write!(f, "C heap"),
251 WinHeap => write!(f, "Windows heap"),
252 WinLocal => write!(f, "Windows local memory"),
253 Machine => write!(f, "machine-managed memory"),
254 Runtime => write!(f, "language runtime memory"),
255 Global => write!(f, "global (static or const)"),
256 ExternStatic => write!(f, "extern static"),
257 Tls => write!(f, "thread-local static"),
258 Mmap => write!(f, "mmap"),
259 }
260 }
261}
262
263pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
264
265#[derive(Clone, Copy, PartialEq, Eq, Hash)]
271pub enum Provenance {
272 Concrete {
275 alloc_id: AllocId,
276 tag: BorTag,
278 },
279 Wildcard,
296}
297
298#[derive(Copy, Clone, PartialEq)]
300pub enum ProvenanceExtra {
301 Concrete(BorTag),
302 Wildcard,
303}
304
305#[cfg(target_pointer_width = "64")]
306static_assert_size!(StrictPointer, 24);
307#[cfg(target_pointer_width = "64")]
311static_assert_size!(Scalar, 32);
312
313impl fmt::Debug for Provenance {
314 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
315 match self {
316 Provenance::Concrete { alloc_id, tag } => {
317 if f.alternate() {
319 write!(f, "[{alloc_id:#?}]")?;
320 } else {
321 write!(f, "[{alloc_id:?}]")?;
322 }
323 write!(f, "{tag:?}")?;
325 }
326 Provenance::Wildcard => {
327 write!(f, "[wildcard]")?;
328 }
329 }
330 Ok(())
331 }
332}
333
334impl interpret::Provenance for Provenance {
335 const OFFSET_IS_ADDR: bool = true;
337
338 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
340
341 fn get_alloc_id(self) -> Option<AllocId> {
342 match self {
343 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
344 Provenance::Wildcard => None,
345 }
346 }
347
348 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
349 let (prov, addr) = ptr.into_raw_parts(); write!(f, "{:#x}", addr.bytes())?;
351 if f.alternate() {
352 write!(f, "{prov:#?}")?;
353 } else {
354 write!(f, "{prov:?}")?;
355 }
356 Ok(())
357 }
358
359 fn join(left: Self, right: Self) -> Option<Self> {
360 match (left, right) {
361 (
363 Provenance::Concrete { alloc_id: left_alloc, tag: left_tag },
364 Provenance::Concrete { alloc_id: right_alloc, tag: right_tag },
365 ) if left_alloc == right_alloc && left_tag == right_tag => Some(left),
366 (Provenance::Wildcard, o) | (o, Provenance::Wildcard) => Some(o),
369 _ => None,
371 }
372 }
373}
374
375impl fmt::Debug for ProvenanceExtra {
376 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
377 match self {
378 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
379 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
380 }
381 }
382}
383
384impl ProvenanceExtra {
385 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
386 match self {
387 ProvenanceExtra::Concrete(pid) => f(pid),
388 ProvenanceExtra::Wildcard => None,
389 }
390 }
391}
392
393#[derive(Debug)]
395pub struct AllocExtra<'tcx> {
396 pub borrow_tracker: Option<borrow_tracker::AllocState>,
398 pub data_race: AllocDataRaceHandler,
402 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
407 pub sync: FxHashMap<Size, Box<dyn Any>>,
412}
413
414impl<'tcx> Clone for AllocExtra<'tcx> {
417 fn clone(&self) -> Self {
418 panic!("our allocations should never be cloned");
419 }
420}
421
422impl VisitProvenance for AllocExtra<'_> {
423 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
424 let AllocExtra { borrow_tracker, data_race, backtrace: _, sync: _ } = self;
425
426 borrow_tracker.visit_provenance(visit);
427 data_race.visit_provenance(visit);
428 }
429}
430
431pub struct PrimitiveLayouts<'tcx> {
433 pub unit: TyAndLayout<'tcx>,
434 pub i8: TyAndLayout<'tcx>,
435 pub i16: TyAndLayout<'tcx>,
436 pub i32: TyAndLayout<'tcx>,
437 pub i64: TyAndLayout<'tcx>,
438 pub i128: TyAndLayout<'tcx>,
439 pub isize: TyAndLayout<'tcx>,
440 pub u8: TyAndLayout<'tcx>,
441 pub u16: TyAndLayout<'tcx>,
442 pub u32: TyAndLayout<'tcx>,
443 pub u64: TyAndLayout<'tcx>,
444 pub u128: TyAndLayout<'tcx>,
445 pub usize: TyAndLayout<'tcx>,
446 pub bool: TyAndLayout<'tcx>,
447 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
450
451impl<'tcx> PrimitiveLayouts<'tcx> {
452 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
453 let tcx = layout_cx.tcx();
454 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
455 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
456 Ok(Self {
457 unit: layout_cx.layout_of(tcx.types.unit)?,
458 i8: layout_cx.layout_of(tcx.types.i8)?,
459 i16: layout_cx.layout_of(tcx.types.i16)?,
460 i32: layout_cx.layout_of(tcx.types.i32)?,
461 i64: layout_cx.layout_of(tcx.types.i64)?,
462 i128: layout_cx.layout_of(tcx.types.i128)?,
463 isize: layout_cx.layout_of(tcx.types.isize)?,
464 u8: layout_cx.layout_of(tcx.types.u8)?,
465 u16: layout_cx.layout_of(tcx.types.u16)?,
466 u32: layout_cx.layout_of(tcx.types.u32)?,
467 u64: layout_cx.layout_of(tcx.types.u64)?,
468 u128: layout_cx.layout_of(tcx.types.u128)?,
469 usize: layout_cx.layout_of(tcx.types.usize)?,
470 bool: layout_cx.layout_of(tcx.types.bool)?,
471 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
472 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
473 })
474 }
475
476 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
477 match size.bits() {
478 8 => Some(self.u8),
479 16 => Some(self.u16),
480 32 => Some(self.u32),
481 64 => Some(self.u64),
482 128 => Some(self.u128),
483 _ => None,
484 }
485 }
486
487 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
488 match size.bits() {
489 8 => Some(self.i8),
490 16 => Some(self.i16),
491 32 => Some(self.i32),
492 64 => Some(self.i64),
493 128 => Some(self.i128),
494 _ => None,
495 }
496 }
497}
498
499pub struct MiriMachine<'tcx> {
504 pub tcx: TyCtxt<'tcx>,
506
507 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
509
510 pub data_race: GlobalDataRaceHandler,
516
517 pub alloc_addresses: alloc_addresses::GlobalState,
519
520 pub(crate) env_vars: EnvVars<'tcx>,
522
523 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
525
526 pub(crate) argc: Option<Pointer>,
530 pub(crate) argv: Option<Pointer>,
531 pub(crate) cmd_line: Option<Pointer>,
532
533 pub(crate) tls: TlsData<'tcx>,
535
536 pub(crate) isolated_op: IsolatedOp,
540
541 pub(crate) validation: ValidationMode,
543
544 pub(crate) fds: shims::FdTable,
546 pub(crate) dirs: shims::DirTable,
548
549 pub(crate) epoll_interests: shims::EpollInterestTable,
551
552 pub(crate) monotonic_clock: MonotonicClock,
554
555 pub(crate) threads: ThreadManager<'tcx>,
557
558 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
562
563 pub(crate) layouts: PrimitiveLayouts<'tcx>,
565
566 pub(crate) static_roots: Vec<AllocId>,
568
569 profiler: Option<measureme::Profiler>,
572 string_cache: FxHashMap<String, measureme::StringId>,
575
576 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
579
580 pub(crate) backtrace_style: BacktraceStyle,
582
583 pub(crate) local_crates: Vec<CrateNum>,
585
586 extern_statics: FxHashMap<Symbol, StrictPointer>,
588
589 pub(crate) rng: RefCell<StdRng>,
592
593 pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>,
595
596 tracked_alloc_ids: FxHashSet<AllocId>,
599 track_alloc_accesses: bool,
601
602 pub(crate) check_alignment: AlignmentCheck,
604
605 pub(crate) cmpxchg_weak_failure_rate: f64,
607
608 pub(crate) preemption_rate: f64,
610
611 pub(crate) report_progress: Option<u32>,
613 pub(crate) basic_block_count: u64,
615
616 #[cfg(all(unix, feature = "native-lib"))]
618 pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>,
619 #[cfg(not(all(unix, feature = "native-lib")))]
620 pub native_lib: Vec<!>,
621
622 pub(crate) gc_interval: u32,
624 pub(crate) since_gc: u32,
626
627 pub(crate) num_cpus: u32,
629
630 pub(crate) page_size: u64,
632 pub(crate) stack_addr: u64,
633 pub(crate) stack_size: u64,
634
635 pub(crate) collect_leak_backtraces: bool,
637
638 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
641
642 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
649
650 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
652
653 pub(crate) pthread_mutex_sanity: Cell<bool>,
655 pub(crate) pthread_rwlock_sanity: Cell<bool>,
656 pub(crate) pthread_condvar_sanity: Cell<bool>,
657
658 pub(crate) allocator_shim_symbols: FxHashMap<Symbol, Either<Symbol, SpecialAllocatorMethod>>,
662 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
664
665 pub force_intrinsic_fallback: bool,
667
668 pub float_nondet: bool,
670 pub float_rounding_error: FloatRoundingErrorMode,
672
673 pub short_fd_operations: bool,
675}
676
677impl<'tcx> MiriMachine<'tcx> {
678 pub(crate) fn new(
682 config: &MiriConfig,
683 layout_cx: LayoutCx<'tcx>,
684 genmc_ctx: Option<Rc<GenmcCtx>>,
685 ) -> Self {
686 let tcx = layout_cx.tcx();
687 let local_crates = helpers::get_local_crates(tcx);
688 let layouts =
689 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
690 let profiler = config.measureme_out.as_ref().map(|out| {
691 let crate_name =
692 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
693 let pid = process::id();
694 let filename = format!("{crate_name}-{pid:07}");
699 let path = Path::new(out).join(filename);
700 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
701 });
702 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
703 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
704 let data_race = if config.genmc_config.is_some() {
705 GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap())
707 } else if config.data_race_detector {
708 GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config)))
709 } else {
710 GlobalDataRaceHandler::None
711 };
712 let page_size = if let Some(page_size) = config.page_size {
716 page_size
717 } else {
718 let target = &tcx.sess.target;
719 match target.arch.as_ref() {
720 "wasm32" | "wasm64" => 64 * 1024, "aarch64" => {
722 if target.options.vendor.as_ref() == "apple" {
723 16 * 1024
727 } else {
728 4 * 1024
729 }
730 }
731 _ => 4 * 1024,
732 }
733 };
734 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
736 let stack_size =
737 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
738 assert!(
739 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
740 "miri only supports up to {} CPUs, but {} were configured",
741 cpu_affinity::MAX_CPUS,
742 config.num_cpus
743 );
744 let threads = ThreadManager::new(config);
745 let mut thread_cpu_affinity = FxHashMap::default();
746 if matches!(&*tcx.sess.target.os, "linux" | "freebsd" | "android") {
747 thread_cpu_affinity
748 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
749 }
750 let alloc_addresses =
751 RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr, tcx));
752 MiriMachine {
753 tcx,
754 borrow_tracker,
755 data_race,
756 alloc_addresses,
757 env_vars: EnvVars::default(),
759 main_fn_ret_place: None,
760 argc: None,
761 argv: None,
762 cmd_line: None,
763 tls: TlsData::default(),
764 isolated_op: config.isolated_op,
765 validation: config.validation,
766 fds: shims::FdTable::init(config.mute_stdout_stderr),
767 epoll_interests: shims::EpollInterestTable::new(),
768 dirs: Default::default(),
769 layouts,
770 threads,
771 thread_cpu_affinity,
772 static_roots: Vec::new(),
773 profiler,
774 string_cache: Default::default(),
775 exported_symbols_cache: FxHashMap::default(),
776 backtrace_style: config.backtrace_style,
777 local_crates,
778 extern_statics: FxHashMap::default(),
779 rng: RefCell::new(rng),
780 allocator: (!config.native_lib.is_empty())
781 .then(|| Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new()))),
782 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
783 track_alloc_accesses: config.track_alloc_accesses,
784 check_alignment: config.check_alignment,
785 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
786 preemption_rate: config.preemption_rate,
787 report_progress: config.report_progress,
788 basic_block_count: 0,
789 monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow),
790 #[cfg(all(unix, feature = "native-lib"))]
791 native_lib: config.native_lib.iter().map(|lib_file_path| {
792 let host_triple = rustc_session::config::host_tuple();
793 let target_triple = tcx.sess.opts.target_triple.tuple();
794 if host_triple != target_triple {
796 panic!(
797 "calling native C functions in linked .so file requires host and target to be the same: \
798 host={host_triple}, target={target_triple}",
799 );
800 }
801 (
805 unsafe {
806 libloading::Library::new(lib_file_path)
807 .expect("failed to read specified extern shared object file")
808 },
809 lib_file_path.clone(),
810 )
811 }).collect(),
812 #[cfg(not(all(unix, feature = "native-lib")))]
813 native_lib: config.native_lib.iter().map(|_| {
814 panic!("calling functions from native libraries via FFI is not supported in this build of Miri")
815 }).collect(),
816 gc_interval: config.gc_interval,
817 since_gc: 0,
818 num_cpus: config.num_cpus,
819 page_size,
820 stack_addr,
821 stack_size,
822 collect_leak_backtraces: config.collect_leak_backtraces,
823 allocation_spans: RefCell::new(FxHashMap::default()),
824 symbolic_alignment: RefCell::new(FxHashMap::default()),
825 union_data_ranges: FxHashMap::default(),
826 pthread_mutex_sanity: Cell::new(false),
827 pthread_rwlock_sanity: Cell::new(false),
828 pthread_condvar_sanity: Cell::new(false),
829 allocator_shim_symbols: Self::allocator_shim_symbols(tcx),
830 mangle_internal_symbol_cache: Default::default(),
831 force_intrinsic_fallback: config.force_intrinsic_fallback,
832 float_nondet: config.float_nondet,
833 float_rounding_error: config.float_rounding_error,
834 short_fd_operations: config.short_fd_operations,
835 }
836 }
837
838 fn allocator_shim_symbols(
839 tcx: TyCtxt<'tcx>,
840 ) -> FxHashMap<Symbol, Either<Symbol, SpecialAllocatorMethod>> {
841 use rustc_codegen_ssa::base::allocator_shim_contents;
842
843 let Some(kind) = tcx.allocator_kind(()) else {
846 return Default::default();
847 };
848 let methods = allocator_shim_contents(tcx, kind);
849 let mut symbols = FxHashMap::default();
850 for method in methods {
851 let from_name = Symbol::intern(&mangle_internal_symbol(
852 tcx,
853 &allocator::global_fn_name(method.name),
854 ));
855 let to = match method.special {
856 Some(special) => Either::Right(special),
857 None =>
858 Either::Left(Symbol::intern(&mangle_internal_symbol(
859 tcx,
860 &allocator::default_fn_name(method.name),
861 ))),
862 };
863 symbols.try_insert(from_name, to).unwrap();
864 }
865 symbols
866 }
867
868 pub(crate) fn late_init(
869 ecx: &mut MiriInterpCx<'tcx>,
870 config: &MiriConfig,
871 on_main_stack_empty: StackEmptyCallback<'tcx>,
872 ) -> InterpResult<'tcx> {
873 EnvVars::init(ecx, config)?;
874 MiriMachine::init_extern_statics(ecx)?;
875 ThreadManager::init(ecx, on_main_stack_empty);
876 interp_ok(())
877 }
878
879 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
880 let ptr = ptr.into_pointer_or_addr().unwrap();
882 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
883 }
884
885 pub(crate) fn communicate(&self) -> bool {
886 self.isolated_op == IsolatedOp::Allow
887 }
888
889 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
891 let def_id = frame.instance.def_id();
892 def_id.is_local() || self.local_crates.contains(&def_id.krate)
893 }
894
895 pub(crate) fn handle_abnormal_termination(&mut self) {
897 drop(self.profiler.take());
902 }
903
904 pub(crate) fn page_align(&self) -> Align {
905 Align::from_bytes(self.page_size).unwrap()
906 }
907
908 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
909 self.allocation_spans
910 .borrow()
911 .get(&alloc_id)
912 .map(|(allocated, _deallocated)| allocated.data())
913 }
914
915 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
916 self.allocation_spans
917 .borrow()
918 .get(&alloc_id)
919 .and_then(|(_allocated, deallocated)| *deallocated)
920 .map(Span::data)
921 }
922
923 fn init_allocation(
924 ecx: &MiriInterpCx<'tcx>,
925 id: AllocId,
926 kind: MemoryKind,
927 size: Size,
928 align: Align,
929 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
930 if ecx.machine.tracked_alloc_ids.contains(&id) {
931 ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id, size, align, kind));
932 }
933
934 let borrow_tracker = ecx
935 .machine
936 .borrow_tracker
937 .as_ref()
938 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
939
940 let data_race = match &ecx.machine.data_race {
941 GlobalDataRaceHandler::None => AllocDataRaceHandler::None,
942 GlobalDataRaceHandler::Vclocks(data_race) =>
943 AllocDataRaceHandler::Vclocks(
944 data_race::AllocState::new_allocation(
945 data_race,
946 &ecx.machine.threads,
947 size,
948 kind,
949 ecx.machine.current_user_relevant_span(),
950 ),
951 data_race.weak_memory.then(weak_memory::AllocState::new_allocation),
952 ),
953 GlobalDataRaceHandler::Genmc(_genmc_ctx) => {
954 AllocDataRaceHandler::Genmc
957 }
958 };
959
960 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
964 None
965 } else {
966 Some(ecx.generate_stacktrace())
967 };
968
969 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
970 ecx.machine
971 .allocation_spans
972 .borrow_mut()
973 .insert(id, (ecx.machine.current_user_relevant_span(), None));
974 }
975
976 interp_ok(AllocExtra { borrow_tracker, data_race, backtrace, sync: FxHashMap::default() })
977 }
978}
979
980impl VisitProvenance for MiriMachine<'_> {
981 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
982 #[rustfmt::skip]
983 let MiriMachine {
984 threads,
985 thread_cpu_affinity: _,
986 tls,
987 env_vars,
988 main_fn_ret_place,
989 argc,
990 argv,
991 cmd_line,
992 extern_statics,
993 dirs,
994 borrow_tracker,
995 data_race,
996 alloc_addresses,
997 fds,
998 epoll_interests:_,
999 tcx: _,
1000 isolated_op: _,
1001 validation: _,
1002 monotonic_clock: _,
1003 layouts: _,
1004 static_roots: _,
1005 profiler: _,
1006 string_cache: _,
1007 exported_symbols_cache: _,
1008 backtrace_style: _,
1009 local_crates: _,
1010 rng: _,
1011 allocator: _,
1012 tracked_alloc_ids: _,
1013 track_alloc_accesses: _,
1014 check_alignment: _,
1015 cmpxchg_weak_failure_rate: _,
1016 preemption_rate: _,
1017 report_progress: _,
1018 basic_block_count: _,
1019 native_lib: _,
1020 gc_interval: _,
1021 since_gc: _,
1022 num_cpus: _,
1023 page_size: _,
1024 stack_addr: _,
1025 stack_size: _,
1026 collect_leak_backtraces: _,
1027 allocation_spans: _,
1028 symbolic_alignment: _,
1029 union_data_ranges: _,
1030 pthread_mutex_sanity: _,
1031 pthread_rwlock_sanity: _,
1032 pthread_condvar_sanity: _,
1033 allocator_shim_symbols: _,
1034 mangle_internal_symbol_cache: _,
1035 force_intrinsic_fallback: _,
1036 float_nondet: _,
1037 float_rounding_error: _,
1038 short_fd_operations: _,
1039 } = self;
1040
1041 threads.visit_provenance(visit);
1042 tls.visit_provenance(visit);
1043 env_vars.visit_provenance(visit);
1044 dirs.visit_provenance(visit);
1045 fds.visit_provenance(visit);
1046 data_race.visit_provenance(visit);
1047 borrow_tracker.visit_provenance(visit);
1048 alloc_addresses.visit_provenance(visit);
1049 main_fn_ret_place.visit_provenance(visit);
1050 argc.visit_provenance(visit);
1051 argv.visit_provenance(visit);
1052 cmd_line.visit_provenance(visit);
1053 for ptr in extern_statics.values() {
1054 ptr.visit_provenance(visit);
1055 }
1056 }
1057}
1058
1059pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
1061
1062pub trait MiriInterpCxExt<'tcx> {
1064 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
1065 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
1066}
1067impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
1068 #[inline(always)]
1069 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
1070 self
1071 }
1072 #[inline(always)]
1073 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
1074 self
1075 }
1076}
1077
1078impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
1080 type MemoryKind = MiriMemoryKind;
1081 type ExtraFnVal = DynSym;
1082
1083 type FrameExtra = FrameExtra<'tcx>;
1084 type AllocExtra = AllocExtra<'tcx>;
1085
1086 type Provenance = Provenance;
1087 type ProvenanceExtra = ProvenanceExtra;
1088 type Bytes = MiriAllocBytes;
1089
1090 type MemoryMap =
1091 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
1092
1093 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
1094
1095 const PANIC_ON_ALLOC_FAIL: bool = false;
1096
1097 #[inline(always)]
1098 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
1099 ecx.machine.check_alignment != AlignmentCheck::None
1100 }
1101
1102 #[inline(always)]
1103 fn alignment_check(
1104 ecx: &MiriInterpCx<'tcx>,
1105 alloc_id: AllocId,
1106 alloc_align: Align,
1107 alloc_kind: AllocKind,
1108 offset: Size,
1109 align: Align,
1110 ) -> Option<Misalignment> {
1111 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1112 return None;
1114 }
1115 if alloc_kind != AllocKind::LiveData {
1116 return None;
1118 }
1119 let (promised_offset, promised_align) = ecx
1121 .machine
1122 .symbolic_alignment
1123 .borrow()
1124 .get(&alloc_id)
1125 .copied()
1126 .unwrap_or((Size::ZERO, alloc_align));
1127 if promised_align < align {
1128 Some(Misalignment { has: promised_align, required: align })
1130 } else {
1131 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1133 if distance.is_multiple_of(align.bytes()) {
1135 None
1137 } else {
1138 let distance_pow2 = 1 << distance.trailing_zeros();
1140 Some(Misalignment {
1141 has: Align::from_bytes(distance_pow2).unwrap(),
1142 required: align,
1143 })
1144 }
1145 }
1146 }
1147
1148 #[inline(always)]
1149 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1150 ecx.machine.validation != ValidationMode::No
1151 }
1152 #[inline(always)]
1153 fn enforce_validity_recursively(
1154 ecx: &InterpCx<'tcx, Self>,
1155 _layout: TyAndLayout<'tcx>,
1156 ) -> bool {
1157 ecx.machine.validation == ValidationMode::Deep
1158 }
1159
1160 #[inline(always)]
1161 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1162 !ecx.tcx.sess.overflow_checks()
1163 }
1164
1165 fn check_fn_target_features(
1166 ecx: &MiriInterpCx<'tcx>,
1167 instance: ty::Instance<'tcx>,
1168 ) -> InterpResult<'tcx> {
1169 let attrs = ecx.tcx.codegen_instance_attrs(instance.def);
1170 if attrs
1171 .target_features
1172 .iter()
1173 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1174 {
1175 let unavailable = attrs
1176 .target_features
1177 .iter()
1178 .filter(|&feature| {
1179 feature.kind != TargetFeatureKind::Implied
1180 && !ecx.tcx.sess.target_features.contains(&feature.name)
1181 })
1182 .fold(String::new(), |mut s, feature| {
1183 if !s.is_empty() {
1184 s.push_str(", ");
1185 }
1186 s.push_str(feature.name.as_str());
1187 s
1188 });
1189 let msg = format!(
1190 "calling a function that requires unavailable target features: {unavailable}"
1191 );
1192 if ecx.tcx.sess.target.is_like_wasm {
1195 throw_machine_stop!(TerminationInfo::Abort(msg));
1196 } else {
1197 throw_ub_format!("{msg}");
1198 }
1199 }
1200 interp_ok(())
1201 }
1202
1203 #[inline(always)]
1204 fn find_mir_or_eval_fn(
1205 ecx: &mut MiriInterpCx<'tcx>,
1206 instance: ty::Instance<'tcx>,
1207 abi: &FnAbi<'tcx, Ty<'tcx>>,
1208 args: &[FnArg<'tcx>],
1209 dest: &PlaceTy<'tcx>,
1210 ret: Option<mir::BasicBlock>,
1211 unwind: mir::UnwindAction,
1212 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1213 if ecx.tcx.is_foreign_item(instance.def_id()) {
1215 let _trace = enter_trace_span!("emulate_foreign_item");
1216 let args = ecx.copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1224 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1225 }
1226
1227 if ecx.machine.data_race.as_genmc_ref().is_some()
1228 && ecx.genmc_intercept_function(instance, args, dest)?
1229 {
1230 ecx.return_to_block(ret)?;
1231 return interp_ok(None);
1232 }
1233
1234 let _trace = enter_trace_span!("load_mir");
1236 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1237 }
1238
1239 #[inline(always)]
1240 fn call_extra_fn(
1241 ecx: &mut MiriInterpCx<'tcx>,
1242 fn_val: DynSym,
1243 abi: &FnAbi<'tcx, Ty<'tcx>>,
1244 args: &[FnArg<'tcx>],
1245 dest: &PlaceTy<'tcx>,
1246 ret: Option<mir::BasicBlock>,
1247 unwind: mir::UnwindAction,
1248 ) -> InterpResult<'tcx> {
1249 let args = ecx.copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1251 }
1252
1253 #[inline(always)]
1254 fn call_intrinsic(
1255 ecx: &mut MiriInterpCx<'tcx>,
1256 instance: ty::Instance<'tcx>,
1257 args: &[OpTy<'tcx>],
1258 dest: &PlaceTy<'tcx>,
1259 ret: Option<mir::BasicBlock>,
1260 unwind: mir::UnwindAction,
1261 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1262 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1263 }
1264
1265 #[inline(always)]
1266 fn assert_panic(
1267 ecx: &mut MiriInterpCx<'tcx>,
1268 msg: &mir::AssertMessage<'tcx>,
1269 unwind: mir::UnwindAction,
1270 ) -> InterpResult<'tcx> {
1271 ecx.assert_panic(msg, unwind)
1272 }
1273
1274 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1275 ecx.start_panic_nounwind(msg)
1276 }
1277
1278 fn unwind_terminate(
1279 ecx: &mut InterpCx<'tcx, Self>,
1280 reason: mir::UnwindTerminateReason,
1281 ) -> InterpResult<'tcx> {
1282 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1284 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1285 ecx.call_function(
1286 panic,
1287 ExternAbi::Rust,
1288 &[],
1289 None,
1290 ReturnContinuation::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1291 )?;
1292 interp_ok(())
1293 }
1294
1295 #[inline(always)]
1296 fn binary_ptr_op(
1297 ecx: &MiriInterpCx<'tcx>,
1298 bin_op: mir::BinOp,
1299 left: &ImmTy<'tcx>,
1300 right: &ImmTy<'tcx>,
1301 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1302 ecx.binary_ptr_op(bin_op, left, right)
1303 }
1304
1305 #[inline(always)]
1306 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1307 ecx: &InterpCx<'tcx, Self>,
1308 inputs: &[F1],
1309 ) -> F2 {
1310 ecx.generate_nan(inputs)
1311 }
1312
1313 #[inline(always)]
1314 fn apply_float_nondet(
1315 ecx: &mut InterpCx<'tcx, Self>,
1316 val: ImmTy<'tcx>,
1317 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1318 crate::math::apply_random_float_error_to_imm(ecx, val, 4)
1319 }
1320
1321 #[inline(always)]
1322 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1323 ecx.equal_float_min_max(a, b)
1324 }
1325
1326 #[inline(always)]
1327 fn float_fuse_mul_add(ecx: &mut InterpCx<'tcx, Self>) -> bool {
1328 ecx.machine.float_nondet && ecx.machine.rng.get_mut().random()
1329 }
1330
1331 #[inline(always)]
1332 fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1333 interp_ok(ecx.tcx.sess.ub_checks())
1334 }
1335
1336 #[inline(always)]
1337 fn contract_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1338 interp_ok(ecx.tcx.sess.contract_checks())
1339 }
1340
1341 #[inline(always)]
1342 fn thread_local_static_pointer(
1343 ecx: &mut MiriInterpCx<'tcx>,
1344 def_id: DefId,
1345 ) -> InterpResult<'tcx, StrictPointer> {
1346 ecx.get_or_create_thread_local_alloc(def_id)
1347 }
1348
1349 fn extern_static_pointer(
1350 ecx: &MiriInterpCx<'tcx>,
1351 def_id: DefId,
1352 ) -> InterpResult<'tcx, StrictPointer> {
1353 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1354 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1355 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1359 panic!("extern_statics cannot contain wildcards")
1360 };
1361 let info = ecx.get_alloc_info(alloc_id);
1362 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1363 let extern_decl_layout =
1364 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1365 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1366 throw_unsup_format!(
1367 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1368 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1369 but Miri emulates it via an extern static shim \
1370 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1371 name = ecx.tcx.def_path_str(def_id),
1372 krate = ecx.tcx.crate_name(def_id.krate),
1373 decl_size = extern_decl_layout.size.bytes(),
1374 decl_align = extern_decl_layout.align.bytes(),
1375 shim_size = info.size.bytes(),
1376 shim_align = info.align.bytes(),
1377 )
1378 }
1379 interp_ok(ptr)
1380 } else {
1381 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1382 }
1383 }
1384
1385 fn init_local_allocation(
1386 ecx: &MiriInterpCx<'tcx>,
1387 id: AllocId,
1388 kind: MemoryKind,
1389 size: Size,
1390 align: Align,
1391 ) -> InterpResult<'tcx, Self::AllocExtra> {
1392 assert!(kind != MiriMemoryKind::Global.into());
1393 MiriMachine::init_allocation(ecx, id, kind, size, align)
1394 }
1395
1396 fn adjust_alloc_root_pointer(
1397 ecx: &MiriInterpCx<'tcx>,
1398 ptr: interpret::Pointer<CtfeProvenance>,
1399 kind: Option<MemoryKind>,
1400 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1401 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1402 let alloc_id = ptr.provenance.alloc_id();
1403 if cfg!(debug_assertions) {
1404 match ecx.tcx.try_get_global_alloc(alloc_id) {
1406 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1407 panic!("adjust_alloc_root_pointer called on thread-local static")
1408 }
1409 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1410 panic!("adjust_alloc_root_pointer called on extern static")
1411 }
1412 _ => {}
1413 }
1414 }
1415 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1417 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1418 } else {
1419 BorTag::default()
1421 };
1422 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1423 }
1424
1425 #[inline(always)]
1427 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1428 ecx.ptr_from_addr_cast(addr)
1429 }
1430
1431 #[inline(always)]
1435 fn expose_provenance(
1436 ecx: &InterpCx<'tcx, Self>,
1437 provenance: Self::Provenance,
1438 ) -> InterpResult<'tcx> {
1439 ecx.expose_provenance(provenance)
1440 }
1441
1442 fn ptr_get_alloc(
1454 ecx: &MiriInterpCx<'tcx>,
1455 ptr: StrictPointer,
1456 size: i64,
1457 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1458 let rel = ecx.ptr_get_alloc(ptr, size);
1459
1460 rel.map(|(alloc_id, size)| {
1461 let tag = match ptr.provenance {
1462 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1463 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1464 };
1465 (alloc_id, size, tag)
1466 })
1467 }
1468
1469 fn adjust_global_allocation<'b>(
1478 ecx: &InterpCx<'tcx, Self>,
1479 id: AllocId,
1480 alloc: &'b Allocation,
1481 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1482 {
1483 let alloc = alloc.adjust_from_tcx(
1484 &ecx.tcx,
1485 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1486 |ptr| ecx.global_root_pointer(ptr),
1487 )?;
1488 let kind = MiriMemoryKind::Global.into();
1489 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1490 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1491 }
1492
1493 #[inline(always)]
1494 fn before_memory_read(
1495 _tcx: TyCtxtAt<'tcx>,
1496 machine: &Self,
1497 alloc_extra: &AllocExtra<'tcx>,
1498 ptr: Pointer,
1499 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1500 range: AllocRange,
1501 ) -> InterpResult<'tcx> {
1502 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1503 machine
1504 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Read));
1505 }
1506 match &machine.data_race {
1508 GlobalDataRaceHandler::None => {}
1509 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1510 genmc_ctx.memory_load(machine, ptr.addr(), range.size)?,
1511 GlobalDataRaceHandler::Vclocks(_data_race) => {
1512 let _trace = enter_trace_span!(data_race::before_memory_read);
1513 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) = &alloc_extra.data_race
1514 else {
1515 unreachable!();
1516 };
1517 data_race.read(alloc_id, range, NaReadType::Read, None, machine)?;
1518 if let Some(weak_memory) = weak_memory {
1519 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1520 }
1521 }
1522 }
1523 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1524 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1525 }
1526 interp_ok(())
1527 }
1528
1529 #[inline(always)]
1530 fn before_memory_write(
1531 _tcx: TyCtxtAt<'tcx>,
1532 machine: &mut Self,
1533 alloc_extra: &mut AllocExtra<'tcx>,
1534 ptr: Pointer,
1535 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1536 range: AllocRange,
1537 ) -> InterpResult<'tcx> {
1538 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1539 machine
1540 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Write));
1541 }
1542 match &machine.data_race {
1543 GlobalDataRaceHandler::None => {}
1544 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1545 genmc_ctx.memory_store(machine, ptr.addr(), range.size)?,
1546 GlobalDataRaceHandler::Vclocks(_global_state) => {
1547 let _trace = enter_trace_span!(data_race::before_memory_write);
1548 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) =
1549 &mut alloc_extra.data_race
1550 else {
1551 unreachable!()
1552 };
1553 data_race.write(alloc_id, range, NaWriteType::Write, None, machine)?;
1554 if let Some(weak_memory) = weak_memory {
1555 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1556 }
1557 }
1558 }
1559 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1560 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1561 }
1562 interp_ok(())
1563 }
1564
1565 #[inline(always)]
1566 fn before_memory_deallocation(
1567 _tcx: TyCtxtAt<'tcx>,
1568 machine: &mut Self,
1569 alloc_extra: &mut AllocExtra<'tcx>,
1570 ptr: Pointer,
1571 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1572 size: Size,
1573 align: Align,
1574 kind: MemoryKind,
1575 ) -> InterpResult<'tcx> {
1576 if machine.tracked_alloc_ids.contains(&alloc_id) {
1577 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1578 }
1579 match &machine.data_race {
1580 GlobalDataRaceHandler::None => {}
1581 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1582 genmc_ctx.handle_dealloc(machine, alloc_id, ptr.addr(), kind)?,
1583 GlobalDataRaceHandler::Vclocks(_global_state) => {
1584 let _trace = enter_trace_span!(data_race::before_memory_deallocation);
1585 let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap();
1586 data_race.write(
1587 alloc_id,
1588 alloc_range(Size::ZERO, size),
1589 NaWriteType::Deallocate,
1590 None,
1591 machine,
1592 )?;
1593 }
1594 }
1595 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1596 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1597 }
1598 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1599 {
1600 *deallocated_at = Some(machine.current_user_relevant_span());
1601 }
1602 machine.free_alloc_id(alloc_id, size, align, kind);
1603 interp_ok(())
1604 }
1605
1606 #[inline(always)]
1607 fn retag_ptr_value(
1608 ecx: &mut InterpCx<'tcx, Self>,
1609 kind: mir::RetagKind,
1610 val: &ImmTy<'tcx>,
1611 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1612 if ecx.machine.borrow_tracker.is_some() {
1613 ecx.retag_ptr_value(kind, val)
1614 } else {
1615 interp_ok(val.clone())
1616 }
1617 }
1618
1619 #[inline(always)]
1620 fn retag_place_contents(
1621 ecx: &mut InterpCx<'tcx, Self>,
1622 kind: mir::RetagKind,
1623 place: &PlaceTy<'tcx>,
1624 ) -> InterpResult<'tcx> {
1625 if ecx.machine.borrow_tracker.is_some() {
1626 ecx.retag_place_contents(kind, place)?;
1627 }
1628 interp_ok(())
1629 }
1630
1631 fn protect_in_place_function_argument(
1632 ecx: &mut InterpCx<'tcx, Self>,
1633 place: &MPlaceTy<'tcx>,
1634 ) -> InterpResult<'tcx> {
1635 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1638 ecx.protect_place(place)?
1639 } else {
1640 place.clone()
1642 };
1643 ecx.write_uninit(&protected_place)?;
1648 interp_ok(())
1650 }
1651
1652 #[inline(always)]
1653 fn init_frame(
1654 ecx: &mut InterpCx<'tcx, Self>,
1655 frame: Frame<'tcx, Provenance>,
1656 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1657 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1659 let fn_name = frame.instance().to_string();
1660 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1661 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1662
1663 Some(profiler.start_recording_interval_event_detached(
1664 *name,
1665 measureme::EventId::from_label(*name),
1666 ecx.active_thread().to_u32(),
1667 ))
1668 } else {
1669 None
1670 };
1671
1672 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1673
1674 let extra = FrameExtra {
1675 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1676 catch_unwind: None,
1677 timing,
1678 is_user_relevant: ecx.machine.is_user_relevant(&frame),
1679 data_race: ecx
1680 .machine
1681 .data_race
1682 .as_vclocks_ref()
1683 .map(|_| data_race::FrameState::default()),
1684 };
1685
1686 interp_ok(frame.with_extra(extra))
1687 }
1688
1689 fn stack<'a>(
1690 ecx: &'a InterpCx<'tcx, Self>,
1691 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1692 ecx.active_thread_stack()
1693 }
1694
1695 fn stack_mut<'a>(
1696 ecx: &'a mut InterpCx<'tcx, Self>,
1697 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1698 ecx.active_thread_stack_mut()
1699 }
1700
1701 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1702 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1704 if let Some(report_progress) = ecx.machine.report_progress {
1706 if ecx.machine.basic_block_count.is_multiple_of(u64::from(report_progress)) {
1707 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1708 block_count: ecx.machine.basic_block_count,
1709 });
1710 }
1711 }
1712
1713 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1718 ecx.machine.since_gc = 0;
1719 ecx.run_provenance_gc();
1720 }
1721
1722 ecx.maybe_preempt_active_thread();
1725
1726 ecx.machine.monotonic_clock.tick();
1728
1729 interp_ok(())
1730 }
1731
1732 #[inline(always)]
1733 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1734 if ecx.frame().extra.is_user_relevant {
1735 let stack_len = ecx.active_thread_stack().len();
1738 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1739 }
1740 interp_ok(())
1741 }
1742
1743 fn before_stack_pop(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1744 let frame = ecx.frame();
1745 if ecx.machine.borrow_tracker.is_some() {
1748 ecx.on_stack_pop(frame)?;
1749 }
1750 if frame.extra.is_user_relevant {
1751 ecx.active_thread_mut().recompute_top_user_relevant_frame(1);
1757 }
1758 info!("Leaving {}", ecx.frame().instance());
1762 interp_ok(())
1763 }
1764
1765 #[inline(always)]
1766 fn after_stack_pop(
1767 ecx: &mut InterpCx<'tcx, Self>,
1768 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1769 unwinding: bool,
1770 ) -> InterpResult<'tcx, ReturnAction> {
1771 let res = {
1772 let mut frame = frame;
1774 let timing = frame.extra.timing.take();
1775 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1776 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1777 profiler.finish_recording_interval_event(timing.unwrap());
1778 }
1779 res
1780 };
1781 if !ecx.active_thread_stack().is_empty() {
1784 info!("Continuing in {}", ecx.frame().instance());
1785 }
1786 res
1787 }
1788
1789 fn after_local_read(
1790 ecx: &InterpCx<'tcx, Self>,
1791 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1792 local: mir::Local,
1793 ) -> InterpResult<'tcx> {
1794 if let Some(data_race) = &frame.extra.data_race {
1795 let _trace = enter_trace_span!(data_race::after_local_read);
1796 data_race.local_read(local, &ecx.machine);
1797 }
1798 interp_ok(())
1799 }
1800
1801 fn after_local_write(
1802 ecx: &mut InterpCx<'tcx, Self>,
1803 local: mir::Local,
1804 storage_live: bool,
1805 ) -> InterpResult<'tcx> {
1806 if let Some(data_race) = &ecx.frame().extra.data_race {
1807 let _trace = enter_trace_span!(data_race::after_local_write);
1808 data_race.local_write(local, storage_live, &ecx.machine);
1809 }
1810 interp_ok(())
1811 }
1812
1813 fn after_local_moved_to_memory(
1814 ecx: &mut InterpCx<'tcx, Self>,
1815 local: mir::Local,
1816 mplace: &MPlaceTy<'tcx>,
1817 ) -> InterpResult<'tcx> {
1818 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1819 panic!("after_local_allocated should only be called on fresh allocations");
1820 };
1821 let local_decl = &ecx.frame().body().local_decls[local];
1823 let span = local_decl.source_info.span;
1824 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1825 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1827 if let Some(data_race) =
1828 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1829 {
1830 let _trace = enter_trace_span!(data_race::after_local_moved_to_memory);
1831 data_race.local_moved_to_memory(
1832 local,
1833 alloc_info.data_race.as_vclocks_mut().unwrap(),
1834 machine,
1835 );
1836 }
1837 interp_ok(())
1838 }
1839
1840 fn get_global_alloc_salt(
1841 ecx: &InterpCx<'tcx, Self>,
1842 instance: Option<ty::Instance<'tcx>>,
1843 ) -> usize {
1844 let unique = if let Some(instance) = instance {
1845 let is_generic = instance
1858 .args
1859 .into_iter()
1860 .any(|arg| !matches!(arg.kind(), ty::GenericArgKind::Lifetime(_)));
1861 let can_be_inlined = matches!(
1862 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1863 InliningThreshold::Always
1864 ) || !matches!(
1865 ecx.tcx.codegen_instance_attrs(instance.def).inline,
1866 InlineAttr::Never
1867 );
1868 !is_generic && !can_be_inlined
1869 } else {
1870 false
1872 };
1873 if unique {
1875 CTFE_ALLOC_SALT
1876 } else {
1877 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1878 }
1879 }
1880
1881 fn cached_union_data_range<'e>(
1882 ecx: &'e mut InterpCx<'tcx, Self>,
1883 ty: Ty<'tcx>,
1884 compute_range: impl FnOnce() -> RangeSet,
1885 ) -> Cow<'e, RangeSet> {
1886 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1887 }
1888
1889 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams {
1890 use crate::alloc::MiriAllocParams;
1891
1892 match &self.allocator {
1893 Some(alloc) => MiriAllocParams::Isolated(alloc.clone()),
1894 None => MiriAllocParams::Global,
1895 }
1896 }
1897
1898 fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
1899 #[cfg(feature = "tracing")]
1900 {
1901 span().entered()
1902 }
1903 #[cfg(not(feature = "tracing"))]
1904 #[expect(clippy::unused_unit)]
1905 {
1906 let _ = span; ()
1908 }
1909 }
1910}
1911
1912pub trait MachineCallback<'tcx, T>: VisitProvenance {
1914 fn call(
1916 self: Box<Self>,
1917 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1918 arg: T,
1919 ) -> InterpResult<'tcx>;
1920}
1921
1922pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1924
1925#[macro_export]
1942macro_rules! callback {
1943 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1944 { $($name:ident: $type:ty),* $(,)? }
1945 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
1946 struct Callback<$tcx, $($lft),*> {
1947 $($name: $type,)*
1948 _phantom: std::marker::PhantomData<&$tcx ()>,
1949 }
1950
1951 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
1952 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
1953 $(
1954 self.$name.visit_provenance(_visit);
1955 )*
1956 }
1957 }
1958
1959 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
1960 fn call(
1961 self: Box<Self>,
1962 $this: &mut MiriInterpCx<$tcx>,
1963 $arg: $arg_ty
1964 ) -> InterpResult<$tcx> {
1965 #[allow(unused_variables)]
1966 let Callback { $($name,)* _phantom } = *self;
1967 $body
1968 }
1969 }
1970
1971 Box::new(Callback {
1972 $($name,)*
1973 _phantom: std::marker::PhantomData
1974 })
1975 }};
1976}