1use std::any::Any;
5use std::borrow::Cow;
6use std::cell::{Cell, RefCell};
7use std::path::Path;
8use std::rc::Rc;
9use std::{fmt, process};
10
11use rand::rngs::StdRng;
12use rand::{Rng, SeedableRng};
13use rustc_abi::{Align, ExternAbi, Size};
14use rustc_apfloat::{Float, FloatConvert};
15use rustc_hir::attrs::InlineAttr;
16use rustc_data_structures::fx::{FxHashMap, FxHashSet};
17#[allow(unused)]
18use rustc_data_structures::static_assert_size;
19use rustc_middle::mir;
20use rustc_middle::query::TyCtxtAt;
21use rustc_middle::ty::layout::{
22 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
23};
24use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
25use rustc_session::config::InliningThreshold;
26use rustc_span::def_id::{CrateNum, DefId};
27use rustc_span::{Span, SpanData, Symbol};
28use rustc_target::callconv::FnAbi;
29
30use crate::alloc_addresses::EvalContextExt;
31use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
32use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
33use crate::concurrency::{AllocDataRaceHandler, GenmcCtx, GlobalDataRaceHandler, weak_memory};
34use crate::*;
35
36pub const SIGRTMIN: i32 = 34;
40
41pub const SIGRTMAX: i32 = 42;
45
46const ADDRS_PER_ANON_GLOBAL: usize = 32;
50
51pub struct FrameExtra<'tcx> {
53 pub borrow_tracker: Option<borrow_tracker::FrameState>,
55
56 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
60
61 pub timing: Option<measureme::DetachedTiming>,
65
66 pub is_user_relevant: bool,
71
72 pub data_race: Option<data_race::FrameState>,
74}
75
76impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
77 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
78 let FrameExtra { borrow_tracker, catch_unwind, timing: _, is_user_relevant, data_race } =
80 self;
81 f.debug_struct("FrameData")
82 .field("borrow_tracker", borrow_tracker)
83 .field("catch_unwind", catch_unwind)
84 .field("is_user_relevant", is_user_relevant)
85 .field("data_race", data_race)
86 .finish()
87 }
88}
89
90impl VisitProvenance for FrameExtra<'_> {
91 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
92 let FrameExtra {
93 catch_unwind,
94 borrow_tracker,
95 timing: _,
96 is_user_relevant: _,
97 data_race: _,
98 } = self;
99
100 catch_unwind.visit_provenance(visit);
101 borrow_tracker.visit_provenance(visit);
102 }
103}
104
105#[derive(Debug, Copy, Clone, PartialEq, Eq)]
107pub enum MiriMemoryKind {
108 Rust,
110 Miri,
112 C,
114 WinHeap,
116 WinLocal,
118 Machine,
121 Runtime,
124 Global,
127 ExternStatic,
130 Tls,
133 Mmap,
135}
136
137impl From<MiriMemoryKind> for MemoryKind {
138 #[inline(always)]
139 fn from(kind: MiriMemoryKind) -> MemoryKind {
140 MemoryKind::Machine(kind)
141 }
142}
143
144impl MayLeak for MiriMemoryKind {
145 #[inline(always)]
146 fn may_leak(self) -> bool {
147 use self::MiriMemoryKind::*;
148 match self {
149 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
150 Machine | Global | ExternStatic | Tls | Mmap => true,
151 }
152 }
153}
154
155impl MiriMemoryKind {
156 fn should_save_allocation_span(self) -> bool {
158 use self::MiriMemoryKind::*;
159 match self {
160 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
162 Machine | Global | ExternStatic | Tls | Runtime => false,
164 }
165 }
166}
167
168impl fmt::Display for MiriMemoryKind {
169 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
170 use self::MiriMemoryKind::*;
171 match self {
172 Rust => write!(f, "Rust heap"),
173 Miri => write!(f, "Miri bare-metal heap"),
174 C => write!(f, "C heap"),
175 WinHeap => write!(f, "Windows heap"),
176 WinLocal => write!(f, "Windows local memory"),
177 Machine => write!(f, "machine-managed memory"),
178 Runtime => write!(f, "language runtime memory"),
179 Global => write!(f, "global (static or const)"),
180 ExternStatic => write!(f, "extern static"),
181 Tls => write!(f, "thread-local static"),
182 Mmap => write!(f, "mmap"),
183 }
184 }
185}
186
187pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
188
189#[derive(Clone, Copy, PartialEq, Eq, Hash)]
195pub enum Provenance {
196 Concrete {
199 alloc_id: AllocId,
200 tag: BorTag,
202 },
203 Wildcard,
220}
221
222#[derive(Copy, Clone, PartialEq)]
224pub enum ProvenanceExtra {
225 Concrete(BorTag),
226 Wildcard,
227}
228
229#[cfg(target_pointer_width = "64")]
230static_assert_size!(StrictPointer, 24);
231#[cfg(target_pointer_width = "64")]
235static_assert_size!(Scalar, 32);
236
237impl fmt::Debug for Provenance {
238 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
239 match self {
240 Provenance::Concrete { alloc_id, tag } => {
241 if f.alternate() {
243 write!(f, "[{alloc_id:#?}]")?;
244 } else {
245 write!(f, "[{alloc_id:?}]")?;
246 }
247 write!(f, "{tag:?}")?;
249 }
250 Provenance::Wildcard => {
251 write!(f, "[wildcard]")?;
252 }
253 }
254 Ok(())
255 }
256}
257
258impl interpret::Provenance for Provenance {
259 const OFFSET_IS_ADDR: bool = true;
261
262 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
264
265 fn get_alloc_id(self) -> Option<AllocId> {
266 match self {
267 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
268 Provenance::Wildcard => None,
269 }
270 }
271
272 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
273 let (prov, addr) = ptr.into_raw_parts(); write!(f, "{:#x}", addr.bytes())?;
275 if f.alternate() {
276 write!(f, "{prov:#?}")?;
277 } else {
278 write!(f, "{prov:?}")?;
279 }
280 Ok(())
281 }
282
283 fn join(left: Option<Self>, right: Option<Self>) -> Option<Self> {
284 match (left, right) {
285 (
287 Some(Provenance::Concrete { alloc_id: left_alloc, tag: left_tag }),
288 Some(Provenance::Concrete { alloc_id: right_alloc, tag: right_tag }),
289 ) if left_alloc == right_alloc && left_tag == right_tag => left,
290 (Some(Provenance::Wildcard), o) | (o, Some(Provenance::Wildcard)) => o,
293 _ => None,
295 }
296 }
297}
298
299impl fmt::Debug for ProvenanceExtra {
300 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
301 match self {
302 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
303 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
304 }
305 }
306}
307
308impl ProvenanceExtra {
309 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
310 match self {
311 ProvenanceExtra::Concrete(pid) => f(pid),
312 ProvenanceExtra::Wildcard => None,
313 }
314 }
315}
316
317#[derive(Debug)]
319pub struct AllocExtra<'tcx> {
320 pub borrow_tracker: Option<borrow_tracker::AllocState>,
322 pub data_race: AllocDataRaceHandler,
326 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
331 pub sync: FxHashMap<Size, Box<dyn Any>>,
336}
337
338impl<'tcx> Clone for AllocExtra<'tcx> {
341 fn clone(&self) -> Self {
342 panic!("our allocations should never be cloned");
343 }
344}
345
346impl VisitProvenance for AllocExtra<'_> {
347 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
348 let AllocExtra { borrow_tracker, data_race, backtrace: _, sync: _ } = self;
349
350 borrow_tracker.visit_provenance(visit);
351 data_race.visit_provenance(visit);
352 }
353}
354
355pub struct PrimitiveLayouts<'tcx> {
357 pub unit: TyAndLayout<'tcx>,
358 pub i8: TyAndLayout<'tcx>,
359 pub i16: TyAndLayout<'tcx>,
360 pub i32: TyAndLayout<'tcx>,
361 pub i64: TyAndLayout<'tcx>,
362 pub i128: TyAndLayout<'tcx>,
363 pub isize: TyAndLayout<'tcx>,
364 pub u8: TyAndLayout<'tcx>,
365 pub u16: TyAndLayout<'tcx>,
366 pub u32: TyAndLayout<'tcx>,
367 pub u64: TyAndLayout<'tcx>,
368 pub u128: TyAndLayout<'tcx>,
369 pub usize: TyAndLayout<'tcx>,
370 pub bool: TyAndLayout<'tcx>,
371 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
374
375impl<'tcx> PrimitiveLayouts<'tcx> {
376 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
377 let tcx = layout_cx.tcx();
378 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
379 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
380 Ok(Self {
381 unit: layout_cx.layout_of(tcx.types.unit)?,
382 i8: layout_cx.layout_of(tcx.types.i8)?,
383 i16: layout_cx.layout_of(tcx.types.i16)?,
384 i32: layout_cx.layout_of(tcx.types.i32)?,
385 i64: layout_cx.layout_of(tcx.types.i64)?,
386 i128: layout_cx.layout_of(tcx.types.i128)?,
387 isize: layout_cx.layout_of(tcx.types.isize)?,
388 u8: layout_cx.layout_of(tcx.types.u8)?,
389 u16: layout_cx.layout_of(tcx.types.u16)?,
390 u32: layout_cx.layout_of(tcx.types.u32)?,
391 u64: layout_cx.layout_of(tcx.types.u64)?,
392 u128: layout_cx.layout_of(tcx.types.u128)?,
393 usize: layout_cx.layout_of(tcx.types.usize)?,
394 bool: layout_cx.layout_of(tcx.types.bool)?,
395 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
396 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
397 })
398 }
399
400 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
401 match size.bits() {
402 8 => Some(self.u8),
403 16 => Some(self.u16),
404 32 => Some(self.u32),
405 64 => Some(self.u64),
406 128 => Some(self.u128),
407 _ => None,
408 }
409 }
410
411 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
412 match size.bits() {
413 8 => Some(self.i8),
414 16 => Some(self.i16),
415 32 => Some(self.i32),
416 64 => Some(self.i64),
417 128 => Some(self.i128),
418 _ => None,
419 }
420 }
421}
422
423pub struct MiriMachine<'tcx> {
428 pub tcx: TyCtxt<'tcx>,
430
431 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
433
434 pub data_race: GlobalDataRaceHandler,
440
441 pub alloc_addresses: alloc_addresses::GlobalState,
443
444 pub(crate) env_vars: EnvVars<'tcx>,
446
447 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
449
450 pub(crate) argc: Option<Pointer>,
454 pub(crate) argv: Option<Pointer>,
455 pub(crate) cmd_line: Option<Pointer>,
456
457 pub(crate) tls: TlsData<'tcx>,
459
460 pub(crate) isolated_op: IsolatedOp,
464
465 pub(crate) validation: ValidationMode,
467
468 pub(crate) fds: shims::FdTable,
470 pub(crate) dirs: shims::DirTable,
472
473 pub(crate) epoll_interests: shims::EpollInterestTable,
475
476 pub(crate) monotonic_clock: MonotonicClock,
478
479 pub(crate) threads: ThreadManager<'tcx>,
481
482 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
486
487 pub(crate) layouts: PrimitiveLayouts<'tcx>,
489
490 pub(crate) static_roots: Vec<AllocId>,
492
493 profiler: Option<measureme::Profiler>,
496 string_cache: FxHashMap<String, measureme::StringId>,
499
500 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
503
504 pub(crate) backtrace_style: BacktraceStyle,
506
507 pub(crate) local_crates: Vec<CrateNum>,
509
510 extern_statics: FxHashMap<Symbol, StrictPointer>,
512
513 pub(crate) rng: RefCell<StdRng>,
516
517 pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>,
519
520 tracked_alloc_ids: FxHashSet<AllocId>,
523 track_alloc_accesses: bool,
525
526 pub(crate) check_alignment: AlignmentCheck,
528
529 pub(crate) cmpxchg_weak_failure_rate: f64,
531
532 pub(crate) preemption_rate: f64,
534
535 pub(crate) report_progress: Option<u32>,
537 pub(crate) basic_block_count: u64,
539
540 #[cfg(all(unix, feature = "native-lib"))]
542 pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>,
543 #[cfg(not(all(unix, feature = "native-lib")))]
544 pub native_lib: Vec<!>,
545
546 pub(crate) gc_interval: u32,
548 pub(crate) since_gc: u32,
550
551 pub(crate) num_cpus: u32,
553
554 pub(crate) page_size: u64,
556 pub(crate) stack_addr: u64,
557 pub(crate) stack_size: u64,
558
559 pub(crate) collect_leak_backtraces: bool,
561
562 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
565
566 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
573
574 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
576
577 pub(crate) pthread_mutex_sanity: Cell<bool>,
579 pub(crate) pthread_rwlock_sanity: Cell<bool>,
580 pub(crate) pthread_condvar_sanity: Cell<bool>,
581
582 pub(crate) sb_extern_type_warned: Cell<bool>,
584 #[allow(unused)]
586 pub(crate) native_call_mem_warned: Cell<bool>,
587 pub(crate) reject_in_isolation_warned: RefCell<FxHashSet<String>>,
589 pub(crate) int2ptr_warned: RefCell<FxHashSet<Span>>,
591
592 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
594
595 pub force_intrinsic_fallback: bool,
597
598 pub float_nondet: bool,
600 pub float_rounding_error: bool,
602}
603
604impl<'tcx> MiriMachine<'tcx> {
605 pub(crate) fn new(
609 config: &MiriConfig,
610 layout_cx: LayoutCx<'tcx>,
611 genmc_ctx: Option<Rc<GenmcCtx>>,
612 ) -> Self {
613 let tcx = layout_cx.tcx();
614 let local_crates = helpers::get_local_crates(tcx);
615 let layouts =
616 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
617 let profiler = config.measureme_out.as_ref().map(|out| {
618 let crate_name =
619 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
620 let pid = process::id();
621 let filename = format!("{crate_name}-{pid:07}");
626 let path = Path::new(out).join(filename);
627 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
628 });
629 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
630 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
631 let data_race = if config.genmc_config.is_some() {
632 GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap())
634 } else if config.data_race_detector {
635 GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config)))
636 } else {
637 GlobalDataRaceHandler::None
638 };
639 let page_size = if let Some(page_size) = config.page_size {
643 page_size
644 } else {
645 let target = &tcx.sess.target;
646 match target.arch.as_ref() {
647 "wasm32" | "wasm64" => 64 * 1024, "aarch64" => {
649 if target.options.vendor.as_ref() == "apple" {
650 16 * 1024
654 } else {
655 4 * 1024
656 }
657 }
658 _ => 4 * 1024,
659 }
660 };
661 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
663 let stack_size =
664 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
665 assert!(
666 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
667 "miri only supports up to {} CPUs, but {} were configured",
668 cpu_affinity::MAX_CPUS,
669 config.num_cpus
670 );
671 let threads = ThreadManager::new(config);
672 let mut thread_cpu_affinity = FxHashMap::default();
673 if matches!(&*tcx.sess.target.os, "linux" | "freebsd" | "android") {
674 thread_cpu_affinity
675 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
676 }
677 MiriMachine {
678 tcx,
679 borrow_tracker,
680 data_race,
681 alloc_addresses: RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr)),
682 env_vars: EnvVars::default(),
684 main_fn_ret_place: None,
685 argc: None,
686 argv: None,
687 cmd_line: None,
688 tls: TlsData::default(),
689 isolated_op: config.isolated_op,
690 validation: config.validation,
691 fds: shims::FdTable::init(config.mute_stdout_stderr),
692 epoll_interests: shims::EpollInterestTable::new(),
693 dirs: Default::default(),
694 layouts,
695 threads,
696 thread_cpu_affinity,
697 static_roots: Vec::new(),
698 profiler,
699 string_cache: Default::default(),
700 exported_symbols_cache: FxHashMap::default(),
701 backtrace_style: config.backtrace_style,
702 local_crates,
703 extern_statics: FxHashMap::default(),
704 rng: RefCell::new(rng),
705 allocator: if !config.native_lib.is_empty() {
706 Some(Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new())))
707 } else { None },
708 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
709 track_alloc_accesses: config.track_alloc_accesses,
710 check_alignment: config.check_alignment,
711 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
712 preemption_rate: config.preemption_rate,
713 report_progress: config.report_progress,
714 basic_block_count: 0,
715 monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow),
716 #[cfg(all(unix, feature = "native-lib"))]
717 native_lib: config.native_lib.iter().map(|lib_file_path| {
718 let host_triple = rustc_session::config::host_tuple();
719 let target_triple = tcx.sess.opts.target_triple.tuple();
720 if host_triple != target_triple {
722 panic!(
723 "calling native C functions in linked .so file requires host and target to be the same: \
724 host={host_triple}, target={target_triple}",
725 );
726 }
727 (
731 unsafe {
732 libloading::Library::new(lib_file_path)
733 .expect("failed to read specified extern shared object file")
734 },
735 lib_file_path.clone(),
736 )
737 }).collect(),
738 #[cfg(not(all(unix, feature = "native-lib")))]
739 native_lib: config.native_lib.iter().map(|_| {
740 panic!("calling functions from native libraries via FFI is not supported in this build of Miri")
741 }).collect(),
742 gc_interval: config.gc_interval,
743 since_gc: 0,
744 num_cpus: config.num_cpus,
745 page_size,
746 stack_addr,
747 stack_size,
748 collect_leak_backtraces: config.collect_leak_backtraces,
749 allocation_spans: RefCell::new(FxHashMap::default()),
750 symbolic_alignment: RefCell::new(FxHashMap::default()),
751 union_data_ranges: FxHashMap::default(),
752 pthread_mutex_sanity: Cell::new(false),
753 pthread_rwlock_sanity: Cell::new(false),
754 pthread_condvar_sanity: Cell::new(false),
755 sb_extern_type_warned: Cell::new(false),
756 native_call_mem_warned: Cell::new(false),
757 reject_in_isolation_warned: Default::default(),
758 int2ptr_warned: Default::default(),
759 mangle_internal_symbol_cache: Default::default(),
760 force_intrinsic_fallback: config.force_intrinsic_fallback,
761 float_nondet: config.float_nondet,
762 float_rounding_error: config.float_rounding_error,
763 }
764 }
765
766 pub(crate) fn late_init(
767 ecx: &mut MiriInterpCx<'tcx>,
768 config: &MiriConfig,
769 on_main_stack_empty: StackEmptyCallback<'tcx>,
770 ) -> InterpResult<'tcx> {
771 EnvVars::init(ecx, config)?;
772 MiriMachine::init_extern_statics(ecx)?;
773 ThreadManager::init(ecx, on_main_stack_empty);
774 interp_ok(())
775 }
776
777 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
778 let ptr = ptr.into_pointer_or_addr().unwrap();
780 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
781 }
782
783 pub(crate) fn communicate(&self) -> bool {
784 self.isolated_op == IsolatedOp::Allow
785 }
786
787 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
789 let def_id = frame.instance.def_id();
790 def_id.is_local() || self.local_crates.contains(&def_id.krate)
791 }
792
793 pub(crate) fn handle_abnormal_termination(&mut self) {
795 drop(self.profiler.take());
800 }
801
802 pub(crate) fn page_align(&self) -> Align {
803 Align::from_bytes(self.page_size).unwrap()
804 }
805
806 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
807 self.allocation_spans
808 .borrow()
809 .get(&alloc_id)
810 .map(|(allocated, _deallocated)| allocated.data())
811 }
812
813 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
814 self.allocation_spans
815 .borrow()
816 .get(&alloc_id)
817 .and_then(|(_allocated, deallocated)| *deallocated)
818 .map(Span::data)
819 }
820
821 fn init_allocation(
822 ecx: &MiriInterpCx<'tcx>,
823 id: AllocId,
824 kind: MemoryKind,
825 size: Size,
826 align: Align,
827 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
828 if ecx.machine.tracked_alloc_ids.contains(&id) {
829 ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id, size, align, kind));
830 }
831
832 let borrow_tracker = ecx
833 .machine
834 .borrow_tracker
835 .as_ref()
836 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
837
838 let data_race = match &ecx.machine.data_race {
839 GlobalDataRaceHandler::None => AllocDataRaceHandler::None,
840 GlobalDataRaceHandler::Vclocks(data_race) =>
841 AllocDataRaceHandler::Vclocks(
842 data_race::AllocState::new_allocation(
843 data_race,
844 &ecx.machine.threads,
845 size,
846 kind,
847 ecx.machine.current_span(),
848 ),
849 data_race.weak_memory.then(weak_memory::AllocState::new_allocation),
850 ),
851 GlobalDataRaceHandler::Genmc(_genmc_ctx) => {
852 AllocDataRaceHandler::Genmc
855 }
856 };
857
858 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
862 None
863 } else {
864 Some(ecx.generate_stacktrace())
865 };
866
867 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
868 ecx.machine
869 .allocation_spans
870 .borrow_mut()
871 .insert(id, (ecx.machine.current_span(), None));
872 }
873
874 interp_ok(AllocExtra { borrow_tracker, data_race, backtrace, sync: FxHashMap::default() })
875 }
876}
877
878impl VisitProvenance for MiriMachine<'_> {
879 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
880 #[rustfmt::skip]
881 let MiriMachine {
882 threads,
883 thread_cpu_affinity: _,
884 tls,
885 env_vars,
886 main_fn_ret_place,
887 argc,
888 argv,
889 cmd_line,
890 extern_statics,
891 dirs,
892 borrow_tracker,
893 data_race,
894 alloc_addresses,
895 fds,
896 epoll_interests:_,
897 tcx: _,
898 isolated_op: _,
899 validation: _,
900 monotonic_clock: _,
901 layouts: _,
902 static_roots: _,
903 profiler: _,
904 string_cache: _,
905 exported_symbols_cache: _,
906 backtrace_style: _,
907 local_crates: _,
908 rng: _,
909 allocator: _,
910 tracked_alloc_ids: _,
911 track_alloc_accesses: _,
912 check_alignment: _,
913 cmpxchg_weak_failure_rate: _,
914 preemption_rate: _,
915 report_progress: _,
916 basic_block_count: _,
917 native_lib: _,
918 gc_interval: _,
919 since_gc: _,
920 num_cpus: _,
921 page_size: _,
922 stack_addr: _,
923 stack_size: _,
924 collect_leak_backtraces: _,
925 allocation_spans: _,
926 symbolic_alignment: _,
927 union_data_ranges: _,
928 pthread_mutex_sanity: _,
929 pthread_rwlock_sanity: _,
930 pthread_condvar_sanity: _,
931 sb_extern_type_warned: _,
932 native_call_mem_warned: _,
933 reject_in_isolation_warned: _,
934 int2ptr_warned: _,
935 mangle_internal_symbol_cache: _,
936 force_intrinsic_fallback: _,
937 float_nondet: _,
938 float_rounding_error: _,
939 } = self;
940
941 threads.visit_provenance(visit);
942 tls.visit_provenance(visit);
943 env_vars.visit_provenance(visit);
944 dirs.visit_provenance(visit);
945 fds.visit_provenance(visit);
946 data_race.visit_provenance(visit);
947 borrow_tracker.visit_provenance(visit);
948 alloc_addresses.visit_provenance(visit);
949 main_fn_ret_place.visit_provenance(visit);
950 argc.visit_provenance(visit);
951 argv.visit_provenance(visit);
952 cmd_line.visit_provenance(visit);
953 for ptr in extern_statics.values() {
954 ptr.visit_provenance(visit);
955 }
956 }
957}
958
959pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
961
962pub trait MiriInterpCxExt<'tcx> {
964 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
965 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
966}
967impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
968 #[inline(always)]
969 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
970 self
971 }
972 #[inline(always)]
973 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
974 self
975 }
976}
977
978impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
980 type MemoryKind = MiriMemoryKind;
981 type ExtraFnVal = DynSym;
982
983 type FrameExtra = FrameExtra<'tcx>;
984 type AllocExtra = AllocExtra<'tcx>;
985
986 type Provenance = Provenance;
987 type ProvenanceExtra = ProvenanceExtra;
988 type Bytes = MiriAllocBytes;
989
990 type MemoryMap =
991 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
992
993 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
994
995 const PANIC_ON_ALLOC_FAIL: bool = false;
996
997 #[inline(always)]
998 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
999 ecx.machine.check_alignment != AlignmentCheck::None
1000 }
1001
1002 #[inline(always)]
1003 fn alignment_check(
1004 ecx: &MiriInterpCx<'tcx>,
1005 alloc_id: AllocId,
1006 alloc_align: Align,
1007 alloc_kind: AllocKind,
1008 offset: Size,
1009 align: Align,
1010 ) -> Option<Misalignment> {
1011 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1012 return None;
1014 }
1015 if alloc_kind != AllocKind::LiveData {
1016 return None;
1018 }
1019 let (promised_offset, promised_align) = ecx
1021 .machine
1022 .symbolic_alignment
1023 .borrow()
1024 .get(&alloc_id)
1025 .copied()
1026 .unwrap_or((Size::ZERO, alloc_align));
1027 if promised_align < align {
1028 Some(Misalignment { has: promised_align, required: align })
1030 } else {
1031 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1033 if distance.is_multiple_of(align.bytes()) {
1035 None
1037 } else {
1038 let distance_pow2 = 1 << distance.trailing_zeros();
1040 Some(Misalignment {
1041 has: Align::from_bytes(distance_pow2).unwrap(),
1042 required: align,
1043 })
1044 }
1045 }
1046 }
1047
1048 #[inline(always)]
1049 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1050 ecx.machine.validation != ValidationMode::No
1051 }
1052 #[inline(always)]
1053 fn enforce_validity_recursively(
1054 ecx: &InterpCx<'tcx, Self>,
1055 _layout: TyAndLayout<'tcx>,
1056 ) -> bool {
1057 ecx.machine.validation == ValidationMode::Deep
1058 }
1059
1060 #[inline(always)]
1061 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1062 !ecx.tcx.sess.overflow_checks()
1063 }
1064
1065 fn check_fn_target_features(
1066 ecx: &MiriInterpCx<'tcx>,
1067 instance: ty::Instance<'tcx>,
1068 ) -> InterpResult<'tcx> {
1069 let attrs = ecx.tcx.codegen_instance_attrs(instance.def);
1070 if attrs
1071 .target_features
1072 .iter()
1073 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1074 {
1075 let unavailable = attrs
1076 .target_features
1077 .iter()
1078 .filter(|&feature| {
1079 !feature.implied && !ecx.tcx.sess.target_features.contains(&feature.name)
1080 })
1081 .fold(String::new(), |mut s, feature| {
1082 if !s.is_empty() {
1083 s.push_str(", ");
1084 }
1085 s.push_str(feature.name.as_str());
1086 s
1087 });
1088 let msg = format!(
1089 "calling a function that requires unavailable target features: {unavailable}"
1090 );
1091 if ecx.tcx.sess.target.is_like_wasm {
1094 throw_machine_stop!(TerminationInfo::Abort(msg));
1095 } else {
1096 throw_ub_format!("{msg}");
1097 }
1098 }
1099 interp_ok(())
1100 }
1101
1102 #[inline(always)]
1103 fn find_mir_or_eval_fn(
1104 ecx: &mut MiriInterpCx<'tcx>,
1105 instance: ty::Instance<'tcx>,
1106 abi: &FnAbi<'tcx, Ty<'tcx>>,
1107 args: &[FnArg<'tcx, Provenance>],
1108 dest: &PlaceTy<'tcx>,
1109 ret: Option<mir::BasicBlock>,
1110 unwind: mir::UnwindAction,
1111 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1112 if ecx.tcx.is_foreign_item(instance.def_id()) {
1114 let args = ecx.copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1122 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1123 }
1124
1125 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1127 }
1128
1129 #[inline(always)]
1130 fn call_extra_fn(
1131 ecx: &mut MiriInterpCx<'tcx>,
1132 fn_val: DynSym,
1133 abi: &FnAbi<'tcx, Ty<'tcx>>,
1134 args: &[FnArg<'tcx, Provenance>],
1135 dest: &PlaceTy<'tcx>,
1136 ret: Option<mir::BasicBlock>,
1137 unwind: mir::UnwindAction,
1138 ) -> InterpResult<'tcx> {
1139 let args = ecx.copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1141 }
1142
1143 #[inline(always)]
1144 fn call_intrinsic(
1145 ecx: &mut MiriInterpCx<'tcx>,
1146 instance: ty::Instance<'tcx>,
1147 args: &[OpTy<'tcx>],
1148 dest: &PlaceTy<'tcx>,
1149 ret: Option<mir::BasicBlock>,
1150 unwind: mir::UnwindAction,
1151 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1152 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1153 }
1154
1155 #[inline(always)]
1156 fn assert_panic(
1157 ecx: &mut MiriInterpCx<'tcx>,
1158 msg: &mir::AssertMessage<'tcx>,
1159 unwind: mir::UnwindAction,
1160 ) -> InterpResult<'tcx> {
1161 ecx.assert_panic(msg, unwind)
1162 }
1163
1164 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1165 ecx.start_panic_nounwind(msg)
1166 }
1167
1168 fn unwind_terminate(
1169 ecx: &mut InterpCx<'tcx, Self>,
1170 reason: mir::UnwindTerminateReason,
1171 ) -> InterpResult<'tcx> {
1172 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1174 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1175 ecx.call_function(
1176 panic,
1177 ExternAbi::Rust,
1178 &[],
1179 None,
1180 ReturnContinuation::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1181 )?;
1182 interp_ok(())
1183 }
1184
1185 #[inline(always)]
1186 fn binary_ptr_op(
1187 ecx: &MiriInterpCx<'tcx>,
1188 bin_op: mir::BinOp,
1189 left: &ImmTy<'tcx>,
1190 right: &ImmTy<'tcx>,
1191 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1192 ecx.binary_ptr_op(bin_op, left, right)
1193 }
1194
1195 #[inline(always)]
1196 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1197 ecx: &InterpCx<'tcx, Self>,
1198 inputs: &[F1],
1199 ) -> F2 {
1200 ecx.generate_nan(inputs)
1201 }
1202
1203 #[inline(always)]
1204 fn apply_float_nondet(
1205 ecx: &mut InterpCx<'tcx, Self>,
1206 val: ImmTy<'tcx>,
1207 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1208 crate::math::apply_random_float_error_to_imm(ecx, val, 2 )
1209 }
1210
1211 #[inline(always)]
1212 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1213 ecx.equal_float_min_max(a, b)
1214 }
1215
1216 #[inline(always)]
1217 fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1218 interp_ok(ecx.tcx.sess.ub_checks())
1219 }
1220
1221 #[inline(always)]
1222 fn contract_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1223 interp_ok(ecx.tcx.sess.contract_checks())
1224 }
1225
1226 #[inline(always)]
1227 fn thread_local_static_pointer(
1228 ecx: &mut MiriInterpCx<'tcx>,
1229 def_id: DefId,
1230 ) -> InterpResult<'tcx, StrictPointer> {
1231 ecx.get_or_create_thread_local_alloc(def_id)
1232 }
1233
1234 fn extern_static_pointer(
1235 ecx: &MiriInterpCx<'tcx>,
1236 def_id: DefId,
1237 ) -> InterpResult<'tcx, StrictPointer> {
1238 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1239 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1240 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1244 panic!("extern_statics cannot contain wildcards")
1245 };
1246 let info = ecx.get_alloc_info(alloc_id);
1247 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1248 let extern_decl_layout =
1249 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1250 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1251 throw_unsup_format!(
1252 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1253 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1254 but Miri emulates it via an extern static shim \
1255 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1256 name = ecx.tcx.def_path_str(def_id),
1257 krate = ecx.tcx.crate_name(def_id.krate),
1258 decl_size = extern_decl_layout.size.bytes(),
1259 decl_align = extern_decl_layout.align.abi.bytes(),
1260 shim_size = info.size.bytes(),
1261 shim_align = info.align.bytes(),
1262 )
1263 }
1264 interp_ok(ptr)
1265 } else {
1266 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1267 }
1268 }
1269
1270 fn init_local_allocation(
1271 ecx: &MiriInterpCx<'tcx>,
1272 id: AllocId,
1273 kind: MemoryKind,
1274 size: Size,
1275 align: Align,
1276 ) -> InterpResult<'tcx, Self::AllocExtra> {
1277 assert!(kind != MiriMemoryKind::Global.into());
1278 MiriMachine::init_allocation(ecx, id, kind, size, align)
1279 }
1280
1281 fn adjust_alloc_root_pointer(
1282 ecx: &MiriInterpCx<'tcx>,
1283 ptr: interpret::Pointer<CtfeProvenance>,
1284 kind: Option<MemoryKind>,
1285 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1286 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1287 let alloc_id = ptr.provenance.alloc_id();
1288 if cfg!(debug_assertions) {
1289 match ecx.tcx.try_get_global_alloc(alloc_id) {
1291 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1292 panic!("adjust_alloc_root_pointer called on thread-local static")
1293 }
1294 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1295 panic!("adjust_alloc_root_pointer called on extern static")
1296 }
1297 _ => {}
1298 }
1299 }
1300 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1302 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1303 } else {
1304 BorTag::default()
1306 };
1307 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1308 }
1309
1310 #[inline(always)]
1312 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1313 ecx.ptr_from_addr_cast(addr)
1314 }
1315
1316 #[inline(always)]
1320 fn expose_provenance(
1321 ecx: &InterpCx<'tcx, Self>,
1322 provenance: Self::Provenance,
1323 ) -> InterpResult<'tcx> {
1324 ecx.expose_provenance(provenance)
1325 }
1326
1327 fn ptr_get_alloc(
1339 ecx: &MiriInterpCx<'tcx>,
1340 ptr: StrictPointer,
1341 size: i64,
1342 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1343 let rel = ecx.ptr_get_alloc(ptr, size);
1344
1345 rel.map(|(alloc_id, size)| {
1346 let tag = match ptr.provenance {
1347 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1348 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1349 };
1350 (alloc_id, size, tag)
1351 })
1352 }
1353
1354 fn adjust_global_allocation<'b>(
1363 ecx: &InterpCx<'tcx, Self>,
1364 id: AllocId,
1365 alloc: &'b Allocation,
1366 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1367 {
1368 let alloc = alloc.adjust_from_tcx(
1369 &ecx.tcx,
1370 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1371 |ptr| ecx.global_root_pointer(ptr),
1372 )?;
1373 let kind = MiriMemoryKind::Global.into();
1374 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1375 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1376 }
1377
1378 #[inline(always)]
1379 fn before_memory_read(
1380 _tcx: TyCtxtAt<'tcx>,
1381 machine: &Self,
1382 alloc_extra: &AllocExtra<'tcx>,
1383 ptr: Pointer,
1384 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1385 range: AllocRange,
1386 ) -> InterpResult<'tcx> {
1387 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1388 machine
1389 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Read));
1390 }
1391 match &machine.data_race {
1393 GlobalDataRaceHandler::None => {}
1394 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1395 genmc_ctx.memory_load(machine, ptr.addr(), range.size)?,
1396 GlobalDataRaceHandler::Vclocks(_data_race) => {
1397 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) = &alloc_extra.data_race
1398 else {
1399 unreachable!();
1400 };
1401 data_race.read(alloc_id, range, NaReadType::Read, None, machine)?;
1402 if let Some(weak_memory) = weak_memory {
1403 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1404 }
1405 }
1406 }
1407 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1408 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1409 }
1410 interp_ok(())
1411 }
1412
1413 #[inline(always)]
1414 fn before_memory_write(
1415 _tcx: TyCtxtAt<'tcx>,
1416 machine: &mut Self,
1417 alloc_extra: &mut AllocExtra<'tcx>,
1418 ptr: Pointer,
1419 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1420 range: AllocRange,
1421 ) -> InterpResult<'tcx> {
1422 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1423 machine
1424 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Write));
1425 }
1426 match &machine.data_race {
1427 GlobalDataRaceHandler::None => {}
1428 GlobalDataRaceHandler::Genmc(genmc_ctx) => {
1429 genmc_ctx.memory_store(machine, ptr.addr(), range.size)?;
1430 }
1431 GlobalDataRaceHandler::Vclocks(_global_state) => {
1432 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) =
1433 &mut alloc_extra.data_race
1434 else {
1435 unreachable!()
1436 };
1437 data_race.write(alloc_id, range, NaWriteType::Write, None, machine)?;
1438 if let Some(weak_memory) = weak_memory {
1439 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1440 }
1441 }
1442 }
1443 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1444 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1445 }
1446 interp_ok(())
1447 }
1448
1449 #[inline(always)]
1450 fn before_memory_deallocation(
1451 _tcx: TyCtxtAt<'tcx>,
1452 machine: &mut Self,
1453 alloc_extra: &mut AllocExtra<'tcx>,
1454 ptr: Pointer,
1455 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1456 size: Size,
1457 align: Align,
1458 kind: MemoryKind,
1459 ) -> InterpResult<'tcx> {
1460 if machine.tracked_alloc_ids.contains(&alloc_id) {
1461 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1462 }
1463 match &machine.data_race {
1464 GlobalDataRaceHandler::None => {}
1465 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1466 genmc_ctx.handle_dealloc(machine, ptr.addr(), size, align, kind)?,
1467 GlobalDataRaceHandler::Vclocks(_global_state) => {
1468 let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap();
1469 data_race.write(
1470 alloc_id,
1471 alloc_range(Size::ZERO, size),
1472 NaWriteType::Deallocate,
1473 None,
1474 machine,
1475 )?;
1476 }
1477 }
1478 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1479 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1480 }
1481 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1482 {
1483 *deallocated_at = Some(machine.current_span());
1484 }
1485 machine.free_alloc_id(alloc_id, size, align, kind);
1486 interp_ok(())
1487 }
1488
1489 #[inline(always)]
1490 fn retag_ptr_value(
1491 ecx: &mut InterpCx<'tcx, Self>,
1492 kind: mir::RetagKind,
1493 val: &ImmTy<'tcx>,
1494 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1495 if ecx.machine.borrow_tracker.is_some() {
1496 ecx.retag_ptr_value(kind, val)
1497 } else {
1498 interp_ok(val.clone())
1499 }
1500 }
1501
1502 #[inline(always)]
1503 fn retag_place_contents(
1504 ecx: &mut InterpCx<'tcx, Self>,
1505 kind: mir::RetagKind,
1506 place: &PlaceTy<'tcx>,
1507 ) -> InterpResult<'tcx> {
1508 if ecx.machine.borrow_tracker.is_some() {
1509 ecx.retag_place_contents(kind, place)?;
1510 }
1511 interp_ok(())
1512 }
1513
1514 fn protect_in_place_function_argument(
1515 ecx: &mut InterpCx<'tcx, Self>,
1516 place: &MPlaceTy<'tcx>,
1517 ) -> InterpResult<'tcx> {
1518 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1521 ecx.protect_place(place)?
1522 } else {
1523 place.clone()
1525 };
1526 ecx.write_uninit(&protected_place)?;
1531 interp_ok(())
1533 }
1534
1535 #[inline(always)]
1536 fn init_frame(
1537 ecx: &mut InterpCx<'tcx, Self>,
1538 frame: Frame<'tcx, Provenance>,
1539 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1540 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1542 let fn_name = frame.instance().to_string();
1543 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1544 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1545
1546 Some(profiler.start_recording_interval_event_detached(
1547 *name,
1548 measureme::EventId::from_label(*name),
1549 ecx.active_thread().to_u32(),
1550 ))
1551 } else {
1552 None
1553 };
1554
1555 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1556
1557 let extra = FrameExtra {
1558 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1559 catch_unwind: None,
1560 timing,
1561 is_user_relevant: ecx.machine.is_user_relevant(&frame),
1562 data_race: ecx
1563 .machine
1564 .data_race
1565 .as_vclocks_ref()
1566 .map(|_| data_race::FrameState::default()),
1567 };
1568
1569 interp_ok(frame.with_extra(extra))
1570 }
1571
1572 fn stack<'a>(
1573 ecx: &'a InterpCx<'tcx, Self>,
1574 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1575 ecx.active_thread_stack()
1576 }
1577
1578 fn stack_mut<'a>(
1579 ecx: &'a mut InterpCx<'tcx, Self>,
1580 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1581 ecx.active_thread_stack_mut()
1582 }
1583
1584 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1585 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1587 if let Some(report_progress) = ecx.machine.report_progress {
1589 if ecx.machine.basic_block_count.is_multiple_of(u64::from(report_progress)) {
1590 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1591 block_count: ecx.machine.basic_block_count,
1592 });
1593 }
1594 }
1595
1596 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1601 ecx.machine.since_gc = 0;
1602 ecx.run_provenance_gc();
1603 }
1604
1605 ecx.maybe_preempt_active_thread();
1608
1609 ecx.machine.monotonic_clock.tick();
1611
1612 interp_ok(())
1613 }
1614
1615 #[inline(always)]
1616 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1617 if ecx.frame().extra.is_user_relevant {
1618 let stack_len = ecx.active_thread_stack().len();
1621 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1622 }
1623 interp_ok(())
1624 }
1625
1626 fn before_stack_pop(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1627 let frame = ecx.frame();
1628 if ecx.machine.borrow_tracker.is_some() {
1631 ecx.on_stack_pop(frame)?;
1632 }
1633 if frame.extra.is_user_relevant {
1634 ecx.active_thread_mut().recompute_top_user_relevant_frame(1);
1640 }
1641 info!("Leaving {}", ecx.frame().instance());
1645 interp_ok(())
1646 }
1647
1648 #[inline(always)]
1649 fn after_stack_pop(
1650 ecx: &mut InterpCx<'tcx, Self>,
1651 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1652 unwinding: bool,
1653 ) -> InterpResult<'tcx, ReturnAction> {
1654 let res = {
1655 let mut frame = frame;
1657 let timing = frame.extra.timing.take();
1658 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1659 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1660 profiler.finish_recording_interval_event(timing.unwrap());
1661 }
1662 res
1663 };
1664 if !ecx.active_thread_stack().is_empty() {
1667 info!("Continuing in {}", ecx.frame().instance());
1668 }
1669 res
1670 }
1671
1672 fn after_local_read(
1673 ecx: &InterpCx<'tcx, Self>,
1674 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1675 local: mir::Local,
1676 ) -> InterpResult<'tcx> {
1677 if let Some(data_race) = &frame.extra.data_race {
1678 data_race.local_read(local, &ecx.machine);
1679 }
1680 interp_ok(())
1681 }
1682
1683 fn after_local_write(
1684 ecx: &mut InterpCx<'tcx, Self>,
1685 local: mir::Local,
1686 storage_live: bool,
1687 ) -> InterpResult<'tcx> {
1688 if let Some(data_race) = &ecx.frame().extra.data_race {
1689 data_race.local_write(local, storage_live, &ecx.machine);
1690 }
1691 interp_ok(())
1692 }
1693
1694 fn after_local_moved_to_memory(
1695 ecx: &mut InterpCx<'tcx, Self>,
1696 local: mir::Local,
1697 mplace: &MPlaceTy<'tcx>,
1698 ) -> InterpResult<'tcx> {
1699 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1700 panic!("after_local_allocated should only be called on fresh allocations");
1701 };
1702 let local_decl = &ecx.frame().body().local_decls[local];
1704 let span = local_decl.source_info.span;
1705 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1706 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1708 if let Some(data_race) =
1709 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1710 {
1711 data_race.local_moved_to_memory(
1712 local,
1713 alloc_info.data_race.as_vclocks_mut().unwrap(),
1714 machine,
1715 );
1716 }
1717 interp_ok(())
1718 }
1719
1720 fn get_global_alloc_salt(
1721 ecx: &InterpCx<'tcx, Self>,
1722 instance: Option<ty::Instance<'tcx>>,
1723 ) -> usize {
1724 let unique = if let Some(instance) = instance {
1725 let is_generic = instance
1738 .args
1739 .into_iter()
1740 .any(|arg| !matches!(arg.kind(), ty::GenericArgKind::Lifetime(_)));
1741 let can_be_inlined = matches!(
1742 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1743 InliningThreshold::Always
1744 ) || !matches!(
1745 ecx.tcx.codegen_instance_attrs(instance.def).inline,
1746 InlineAttr::Never
1747 );
1748 !is_generic && !can_be_inlined
1749 } else {
1750 false
1752 };
1753 if unique {
1755 CTFE_ALLOC_SALT
1756 } else {
1757 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1758 }
1759 }
1760
1761 fn cached_union_data_range<'e>(
1762 ecx: &'e mut InterpCx<'tcx, Self>,
1763 ty: Ty<'tcx>,
1764 compute_range: impl FnOnce() -> RangeSet,
1765 ) -> Cow<'e, RangeSet> {
1766 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1767 }
1768
1769 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams {
1770 use crate::alloc::MiriAllocParams;
1771
1772 match &self.allocator {
1773 Some(alloc) => MiriAllocParams::Isolated(alloc.clone()),
1774 None => MiriAllocParams::Global,
1775 }
1776 }
1777
1778 fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
1779 #[cfg(feature = "tracing")]
1780 {
1781 span().entered()
1782 }
1783 #[cfg(not(feature = "tracing"))]
1784 #[expect(clippy::unused_unit)]
1785 {
1786 let _ = span; ()
1788 }
1789 }
1790}
1791
1792pub trait MachineCallback<'tcx, T>: VisitProvenance {
1794 fn call(
1796 self: Box<Self>,
1797 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1798 arg: T,
1799 ) -> InterpResult<'tcx>;
1800}
1801
1802pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1804
1805#[macro_export]
1822macro_rules! callback {
1823 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1824 { $($name:ident: $type:ty),* $(,)? }
1825 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
1826 struct Callback<$tcx, $($lft),*> {
1827 $($name: $type,)*
1828 _phantom: std::marker::PhantomData<&$tcx ()>,
1829 }
1830
1831 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
1832 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
1833 $(
1834 self.$name.visit_provenance(_visit);
1835 )*
1836 }
1837 }
1838
1839 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
1840 fn call(
1841 self: Box<Self>,
1842 $this: &mut MiriInterpCx<$tcx>,
1843 $arg: $arg_ty
1844 ) -> InterpResult<$tcx> {
1845 #[allow(unused_variables)]
1846 let Callback { $($name,)* _phantom } = *self;
1847 $body
1848 }
1849 }
1850
1851 Box::new(Callback {
1852 $($name,)*
1853 _phantom: std::marker::PhantomData
1854 })
1855 }};
1856}