miri/
helpers.rs

1use std::num::NonZero;
2use std::sync::Mutex;
3use std::time::Duration;
4use std::{cmp, iter};
5
6use rand::RngCore;
7use rustc_abi::{Align, ExternAbi, FieldIdx, FieldsShape, Size, Variants};
8use rustc_apfloat::Float;
9use rustc_hash::FxHashSet;
10use rustc_hir::Safety;
11use rustc_hir::def::{DefKind, Namespace};
12use rustc_hir::def_id::{CRATE_DEF_INDEX, CrateNum, DefId, LOCAL_CRATE};
13use rustc_index::IndexVec;
14use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
15use rustc_middle::middle::dependency_format::Linkage;
16use rustc_middle::middle::exported_symbols::ExportedSymbol;
17use rustc_middle::ty::layout::{LayoutOf, MaybeResult, TyAndLayout};
18use rustc_middle::ty::{self, IntTy, Ty, TyCtxt, UintTy};
19use rustc_session::config::CrateType;
20use rustc_span::{Span, Symbol};
21use rustc_symbol_mangling::mangle_internal_symbol;
22
23use crate::*;
24
25/// Gets an instance for a path.
26///
27/// A `None` namespace indicates we are looking for a module.
28fn try_resolve_did(tcx: TyCtxt<'_>, path: &[&str], namespace: Option<Namespace>) -> Option<DefId> {
29    let _trace = enter_trace_span!("try_resolve_did", ?path);
30
31    /// Yield all children of the given item, that have the given name.
32    fn find_children<'tcx: 'a, 'a>(
33        tcx: TyCtxt<'tcx>,
34        item: DefId,
35        name: &'a str,
36    ) -> impl Iterator<Item = DefId> + 'a {
37        let name = Symbol::intern(name);
38        tcx.module_children(item)
39            .iter()
40            .filter(move |item| item.ident.name == name)
41            .map(move |item| item.res.def_id())
42    }
43
44    // Take apart the path: leading crate, a sequence of modules, and potentially a final item.
45    let (&crate_name, path) = path.split_first().expect("paths must have at least one segment");
46    let (modules, item) = if let Some(namespace) = namespace {
47        let (&item_name, modules) =
48            path.split_last().expect("non-module paths must have at least 2 segments");
49        (modules, Some((item_name, namespace)))
50    } else {
51        (path, None)
52    };
53
54    // There may be more than one crate with this name. We try them all.
55    // (This is particularly relevant when running `std` tests as then there are two `std` crates:
56    // the one in the sysroot and the one locally built by `cargo test`.)
57    // FIXME: can we prefer the one from the sysroot?
58    'crates: for krate in
59        tcx.crates(()).iter().filter(|&&krate| tcx.crate_name(krate).as_str() == crate_name)
60    {
61        let mut cur_item = DefId { krate: *krate, index: CRATE_DEF_INDEX };
62        // Go over the modules.
63        for &segment in modules {
64            let Some(next_item) = find_children(tcx, cur_item, segment)
65                .find(|item| tcx.def_kind(item) == DefKind::Mod)
66            else {
67                continue 'crates;
68            };
69            cur_item = next_item;
70        }
71        // Finally, look up the desired item in this module, if any.
72        match item {
73            Some((item_name, namespace)) => {
74                let Some(item) = find_children(tcx, cur_item, item_name)
75                    .find(|item| tcx.def_kind(item).ns() == Some(namespace))
76                else {
77                    continue 'crates;
78                };
79                return Some(item);
80            }
81            None => {
82                // Just return the module.
83                return Some(cur_item);
84            }
85        }
86    }
87    // Item not found in any of the crates with the right name.
88    None
89}
90
91/// Gets an instance for a path; fails gracefully if the path does not exist.
92pub fn try_resolve_path<'tcx>(
93    tcx: TyCtxt<'tcx>,
94    path: &[&str],
95    namespace: Namespace,
96) -> Option<ty::Instance<'tcx>> {
97    let did = try_resolve_did(tcx, path, Some(namespace))?;
98    Some(ty::Instance::mono(tcx, did))
99}
100
101/// Gets an instance for a path.
102#[track_caller]
103pub fn resolve_path<'tcx>(
104    tcx: TyCtxt<'tcx>,
105    path: &[&str],
106    namespace: Namespace,
107) -> ty::Instance<'tcx> {
108    try_resolve_path(tcx, path, namespace)
109        .unwrap_or_else(|| panic!("failed to find required Rust item: {path:?}"))
110}
111
112/// Gets the layout of a type at a path.
113#[track_caller]
114pub fn path_ty_layout<'tcx>(cx: &impl LayoutOf<'tcx>, path: &[&str]) -> TyAndLayout<'tcx> {
115    let ty = resolve_path(cx.tcx(), path, Namespace::TypeNS).ty(cx.tcx(), cx.typing_env());
116    cx.layout_of(ty).to_result().ok().unwrap()
117}
118
119/// Call `f` for each exported symbol.
120pub fn iter_exported_symbols<'tcx>(
121    tcx: TyCtxt<'tcx>,
122    mut f: impl FnMut(CrateNum, DefId) -> InterpResult<'tcx>,
123) -> InterpResult<'tcx> {
124    // First, the symbols in the local crate. We can't use `exported_symbols` here as that
125    // skips `#[used]` statics (since `reachable_set` skips them in binary crates).
126    // So we walk all HIR items ourselves instead.
127    let crate_items = tcx.hir_crate_items(());
128    for def_id in crate_items.definitions() {
129        let exported = tcx.def_kind(def_id).has_codegen_attrs() && {
130            let codegen_attrs = tcx.codegen_fn_attrs(def_id);
131            codegen_attrs.contains_extern_indicator()
132                || codegen_attrs.flags.contains(CodegenFnAttrFlags::USED_COMPILER)
133                || codegen_attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER)
134        };
135        if exported {
136            f(LOCAL_CRATE, def_id.into())?;
137        }
138    }
139
140    // Next, all our dependencies.
141    // `dependency_formats` includes all the transitive informations needed to link a crate,
142    // which is what we need here since we need to dig out `exported_symbols` from all transitive
143    // dependencies.
144    let dependency_formats = tcx.dependency_formats(());
145    // Find the dependencies of the executable we are running.
146    let dependency_format = dependency_formats
147        .get(&CrateType::Executable)
148        .expect("interpreting a non-executable crate");
149    for cnum in dependency_format
150        .iter_enumerated()
151        .filter_map(|(num, &linkage)| (linkage != Linkage::NotLinked).then_some(num))
152    {
153        if cnum == LOCAL_CRATE {
154            continue; // Already handled above
155        }
156
157        // We can ignore `_export_info` here: we are a Rust crate, and everything is exported
158        // from a Rust crate.
159        for &(symbol, _export_info) in tcx.exported_non_generic_symbols(cnum) {
160            if let ExportedSymbol::NonGeneric(def_id) = symbol {
161                f(cnum, def_id)?;
162            }
163        }
164    }
165    interp_ok(())
166}
167
168/// Convert a softfloat type to its corresponding hostfloat type.
169pub trait ToHost {
170    type HostFloat;
171    fn to_host(self) -> Self::HostFloat;
172}
173
174/// Convert a hostfloat type to its corresponding softfloat type.
175pub trait ToSoft {
176    type SoftFloat;
177    fn to_soft(self) -> Self::SoftFloat;
178}
179
180impl ToHost for rustc_apfloat::ieee::Double {
181    type HostFloat = f64;
182
183    fn to_host(self) -> Self::HostFloat {
184        f64::from_bits(self.to_bits().try_into().unwrap())
185    }
186}
187
188impl ToSoft for f64 {
189    type SoftFloat = rustc_apfloat::ieee::Double;
190
191    fn to_soft(self) -> Self::SoftFloat {
192        Float::from_bits(self.to_bits().into())
193    }
194}
195
196impl ToHost for rustc_apfloat::ieee::Single {
197    type HostFloat = f32;
198
199    fn to_host(self) -> Self::HostFloat {
200        f32::from_bits(self.to_bits().try_into().unwrap())
201    }
202}
203
204impl ToSoft for f32 {
205    type SoftFloat = rustc_apfloat::ieee::Single;
206
207    fn to_soft(self) -> Self::SoftFloat {
208        Float::from_bits(self.to_bits().into())
209    }
210}
211
212impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
213pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
214    /// Checks if the given crate/module exists.
215    fn have_module(&self, path: &[&str]) -> bool {
216        try_resolve_did(*self.eval_context_ref().tcx, path, None).is_some()
217    }
218
219    /// Evaluates the scalar at the specified path.
220    fn eval_path(&self, path: &[&str]) -> MPlaceTy<'tcx> {
221        let this = self.eval_context_ref();
222        let instance = resolve_path(*this.tcx, path, Namespace::ValueNS);
223        // We don't give a span -- this isn't actually used directly by the program anyway.
224        this.eval_global(instance).unwrap_or_else(|err| {
225            panic!("failed to evaluate required Rust item: {path:?}\n{err:?}")
226        })
227    }
228    fn eval_path_scalar(&self, path: &[&str]) -> Scalar {
229        let this = self.eval_context_ref();
230        let val = this.eval_path(path);
231        this.read_scalar(&val)
232            .unwrap_or_else(|err| panic!("failed to read required Rust item: {path:?}\n{err:?}"))
233    }
234
235    /// Helper function to get a `libc` constant as a `Scalar`.
236    fn eval_libc(&self, name: &str) -> Scalar {
237        if self.eval_context_ref().tcx.sess.target.os == "windows" {
238            panic!(
239                "`libc` crate is not reliably available on Windows targets; Miri should not use it there"
240            );
241        }
242        self.eval_path_scalar(&["libc", name])
243    }
244
245    /// Helper function to get a `libc` constant as an `i32`.
246    fn eval_libc_i32(&self, name: &str) -> i32 {
247        // TODO: Cache the result.
248        self.eval_libc(name).to_i32().unwrap_or_else(|_err| {
249            panic!("required libc item has unexpected type (not `i32`): {name}")
250        })
251    }
252
253    /// Helper function to get a `libc` constant as an `u32`.
254    fn eval_libc_u32(&self, name: &str) -> u32 {
255        // TODO: Cache the result.
256        self.eval_libc(name).to_u32().unwrap_or_else(|_err| {
257            panic!("required libc item has unexpected type (not `u32`): {name}")
258        })
259    }
260
261    /// Helper function to get a `libc` constant as an `u64`.
262    fn eval_libc_u64(&self, name: &str) -> u64 {
263        // TODO: Cache the result.
264        self.eval_libc(name).to_u64().unwrap_or_else(|_err| {
265            panic!("required libc item has unexpected type (not `u64`): {name}")
266        })
267    }
268
269    /// Helper function to get a `windows` constant as a `Scalar`.
270    fn eval_windows(&self, module: &str, name: &str) -> Scalar {
271        self.eval_context_ref().eval_path_scalar(&["std", "sys", "pal", "windows", module, name])
272    }
273
274    /// Helper function to get a `windows` constant as a `u32`.
275    fn eval_windows_u32(&self, module: &str, name: &str) -> u32 {
276        // TODO: Cache the result.
277        self.eval_windows(module, name).to_u32().unwrap_or_else(|_err| {
278            panic!("required Windows item has unexpected type (not `u32`): {module}::{name}")
279        })
280    }
281
282    /// Helper function to get a `windows` constant as a `u64`.
283    fn eval_windows_u64(&self, module: &str, name: &str) -> u64 {
284        // TODO: Cache the result.
285        self.eval_windows(module, name).to_u64().unwrap_or_else(|_err| {
286            panic!("required Windows item has unexpected type (not `u64`): {module}::{name}")
287        })
288    }
289
290    /// Helper function to get the `TyAndLayout` of a `libc` type
291    fn libc_ty_layout(&self, name: &str) -> TyAndLayout<'tcx> {
292        let this = self.eval_context_ref();
293        if this.tcx.sess.target.os == "windows" {
294            panic!(
295                "`libc` crate is not reliably available on Windows targets; Miri should not use it there"
296            );
297        }
298        path_ty_layout(this, &["libc", name])
299    }
300
301    /// Helper function to get the `TyAndLayout` of a `windows` type
302    fn windows_ty_layout(&self, name: &str) -> TyAndLayout<'tcx> {
303        let this = self.eval_context_ref();
304        path_ty_layout(this, &["std", "sys", "pal", "windows", "c", name])
305    }
306
307    /// Helper function to get `TyAndLayout` of an array that consists of `libc` type.
308    fn libc_array_ty_layout(&self, name: &str, size: u64) -> TyAndLayout<'tcx> {
309        let this = self.eval_context_ref();
310        let elem_ty_layout = this.libc_ty_layout(name);
311        let array_ty = Ty::new_array(*this.tcx, elem_ty_layout.ty, size);
312        this.layout_of(array_ty).unwrap()
313    }
314
315    /// Project to the given *named* field (which must be a struct or union type).
316    fn try_project_field_named<P: Projectable<'tcx, Provenance>>(
317        &self,
318        base: &P,
319        name: &str,
320    ) -> InterpResult<'tcx, Option<P>> {
321        let this = self.eval_context_ref();
322        let adt = base.layout().ty.ty_adt_def().unwrap();
323        for (idx, field) in adt.non_enum_variant().fields.iter_enumerated() {
324            if field.name.as_str() == name {
325                return interp_ok(Some(this.project_field(base, idx)?));
326            }
327        }
328        interp_ok(None)
329    }
330
331    /// Project to the given *named* field (which must be a struct or union type).
332    fn project_field_named<P: Projectable<'tcx, Provenance>>(
333        &self,
334        base: &P,
335        name: &str,
336    ) -> InterpResult<'tcx, P> {
337        interp_ok(
338            self.try_project_field_named(base, name)?
339                .unwrap_or_else(|| bug!("no field named {} in type {}", name, base.layout().ty)),
340        )
341    }
342
343    /// Write an int of the appropriate size to `dest`. The target type may be signed or unsigned,
344    /// we try to do the right thing anyway. `i128` can fit all integer types except for `u128` so
345    /// this method is fine for almost all integer types.
346    fn write_int(
347        &mut self,
348        i: impl Into<i128>,
349        dest: &impl Writeable<'tcx, Provenance>,
350    ) -> InterpResult<'tcx> {
351        assert!(
352            dest.layout().backend_repr.is_scalar(),
353            "write_int on non-scalar type {}",
354            dest.layout().ty
355        );
356        let val = if dest.layout().backend_repr.is_signed() {
357            Scalar::from_int(i, dest.layout().size)
358        } else {
359            // `unwrap` can only fail here if `i` is negative
360            Scalar::from_uint(u128::try_from(i.into()).unwrap(), dest.layout().size)
361        };
362        self.eval_context_mut().write_scalar(val, dest)
363    }
364
365    /// Write the first N fields of the given place.
366    fn write_int_fields(
367        &mut self,
368        values: &[i128],
369        dest: &impl Writeable<'tcx, Provenance>,
370    ) -> InterpResult<'tcx> {
371        let this = self.eval_context_mut();
372        for (idx, &val) in values.iter().enumerate() {
373            let idx = FieldIdx::from_usize(idx);
374            let field = this.project_field(dest, idx)?;
375            this.write_int(val, &field)?;
376        }
377        interp_ok(())
378    }
379
380    /// Write the given fields of the given place.
381    fn write_int_fields_named(
382        &mut self,
383        values: &[(&str, i128)],
384        dest: &impl Writeable<'tcx, Provenance>,
385    ) -> InterpResult<'tcx> {
386        let this = self.eval_context_mut();
387        for &(name, val) in values.iter() {
388            let field = this.project_field_named(dest, name)?;
389            this.write_int(val, &field)?;
390        }
391        interp_ok(())
392    }
393
394    /// Write a 0 of the appropriate size to `dest`.
395    fn write_null(&mut self, dest: &impl Writeable<'tcx, Provenance>) -> InterpResult<'tcx> {
396        self.write_int(0, dest)
397    }
398
399    /// Test if this pointer equals 0.
400    fn ptr_is_null(&self, ptr: Pointer) -> InterpResult<'tcx, bool> {
401        interp_ok(ptr.addr().bytes() == 0)
402    }
403
404    /// Generate some random bytes, and write them to `dest`.
405    fn gen_random(&mut self, ptr: Pointer, len: u64) -> InterpResult<'tcx> {
406        // Some programs pass in a null pointer and a length of 0
407        // to their platform's random-generation function (e.g. getrandom())
408        // on Linux. For compatibility with these programs, we don't perform
409        // any additional checks - it's okay if the pointer is invalid,
410        // since we wouldn't actually be writing to it.
411        if len == 0 {
412            return interp_ok(());
413        }
414        let this = self.eval_context_mut();
415
416        let mut data = vec![0; usize::try_from(len).unwrap()];
417
418        if this.machine.communicate() {
419            // Fill the buffer using the host's rng.
420            getrandom::fill(&mut data)
421                .map_err(|err| err_unsup_format!("host getrandom failed: {}", err))?;
422        } else {
423            let rng = this.machine.rng.get_mut();
424            rng.fill_bytes(&mut data);
425        }
426
427        this.write_bytes_ptr(ptr, data.iter().copied())
428    }
429
430    /// Call a function: Push the stack frame and pass the arguments.
431    /// For now, arguments must be scalars (so that the caller does not have to know the layout).
432    ///
433    /// If you do not provide a return place, a dangling zero-sized place will be created
434    /// for your convenience. This is only valid if the return type is `()`.
435    fn call_function(
436        &mut self,
437        f: ty::Instance<'tcx>,
438        caller_abi: ExternAbi,
439        args: &[ImmTy<'tcx>],
440        dest: Option<&MPlaceTy<'tcx>>,
441        cont: ReturnContinuation,
442    ) -> InterpResult<'tcx> {
443        let this = self.eval_context_mut();
444
445        // Get MIR.
446        let mir = this.load_mir(f.def, None)?;
447        let dest = match dest {
448            Some(dest) => dest.clone(),
449            None => MPlaceTy::fake_alloc_zst(this.machine.layouts.unit),
450        };
451
452        // Construct a function pointer type representing the caller perspective.
453        let sig = this.tcx.mk_fn_sig(
454            args.iter().map(|a| a.layout.ty),
455            dest.layout.ty,
456            /*c_variadic*/ false,
457            Safety::Safe,
458            caller_abi,
459        );
460        let caller_fn_abi = this.fn_abi_of_fn_ptr(ty::Binder::dummy(sig), ty::List::empty())?;
461
462        // This will also show proper errors if there is any ABI mismatch.
463        this.init_stack_frame(
464            f,
465            mir,
466            caller_fn_abi,
467            &args.iter().map(|a| FnArg::Copy(a.clone().into())).collect::<Vec<_>>(),
468            /*with_caller_location*/ false,
469            &dest.into(),
470            cont,
471        )
472    }
473
474    /// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter
475    /// of `action` will be true if this is frozen, false if this is in an `UnsafeCell`.
476    /// The range is relative to `place`.
477    fn visit_freeze_sensitive(
478        &self,
479        place: &MPlaceTy<'tcx>,
480        size: Size,
481        mut action: impl FnMut(AllocRange, bool) -> InterpResult<'tcx>,
482    ) -> InterpResult<'tcx> {
483        let this = self.eval_context_ref();
484        trace!("visit_frozen(place={:?}, size={:?})", *place, size);
485        debug_assert_eq!(
486            size,
487            this.size_and_align_of_val(place)?
488                .map(|(size, _)| size)
489                .unwrap_or_else(|| place.layout.size)
490        );
491        // Store how far we proceeded into the place so far. Everything to the left of
492        // this offset has already been handled, in the sense that the frozen parts
493        // have had `action` called on them.
494        let start_addr = place.ptr().addr();
495        let mut cur_addr = start_addr;
496        // Called when we detected an `UnsafeCell` at the given offset and size.
497        // Calls `action` and advances `cur_ptr`.
498        let mut unsafe_cell_action = |unsafe_cell_ptr: &Pointer, unsafe_cell_size: Size| {
499            // We assume that we are given the fields in increasing offset order,
500            // and nothing else changes.
501            let unsafe_cell_addr = unsafe_cell_ptr.addr();
502            assert!(unsafe_cell_addr >= cur_addr);
503            let frozen_size = unsafe_cell_addr - cur_addr;
504            // Everything between the cur_ptr and this `UnsafeCell` is frozen.
505            if frozen_size != Size::ZERO {
506                action(alloc_range(cur_addr - start_addr, frozen_size), /*frozen*/ true)?;
507            }
508            cur_addr += frozen_size;
509            // This `UnsafeCell` is NOT frozen.
510            if unsafe_cell_size != Size::ZERO {
511                action(
512                    alloc_range(cur_addr - start_addr, unsafe_cell_size),
513                    /*frozen*/ false,
514                )?;
515            }
516            cur_addr += unsafe_cell_size;
517            // Done
518            interp_ok(())
519        };
520        // Run a visitor
521        {
522            let mut visitor = UnsafeCellVisitor {
523                ecx: this,
524                unsafe_cell_action: |place| {
525                    trace!("unsafe_cell_action on {:?}", place.ptr());
526                    // We need a size to go on.
527                    let unsafe_cell_size = this
528                        .size_and_align_of_val(place)?
529                        .map(|(size, _)| size)
530                        // for extern types, just cover what we can
531                        .unwrap_or_else(|| place.layout.size);
532                    // Now handle this `UnsafeCell`, unless it is empty.
533                    if unsafe_cell_size != Size::ZERO {
534                        unsafe_cell_action(&place.ptr(), unsafe_cell_size)
535                    } else {
536                        interp_ok(())
537                    }
538                },
539            };
540            visitor.visit_value(place)?;
541        }
542        // The part between the end_ptr and the end of the place is also frozen.
543        // So pretend there is a 0-sized `UnsafeCell` at the end.
544        unsafe_cell_action(&place.ptr().wrapping_offset(size, this), Size::ZERO)?;
545        // Done!
546        return interp_ok(());
547
548        /// Visiting the memory covered by a `MemPlace`, being aware of
549        /// whether we are inside an `UnsafeCell` or not.
550        struct UnsafeCellVisitor<'ecx, 'tcx, F>
551        where
552            F: FnMut(&MPlaceTy<'tcx>) -> InterpResult<'tcx>,
553        {
554            ecx: &'ecx MiriInterpCx<'tcx>,
555            unsafe_cell_action: F,
556        }
557
558        impl<'ecx, 'tcx, F> ValueVisitor<'tcx, MiriMachine<'tcx>> for UnsafeCellVisitor<'ecx, 'tcx, F>
559        where
560            F: FnMut(&MPlaceTy<'tcx>) -> InterpResult<'tcx>,
561        {
562            type V = MPlaceTy<'tcx>;
563
564            #[inline(always)]
565            fn ecx(&self) -> &MiriInterpCx<'tcx> {
566                self.ecx
567            }
568
569            fn aggregate_field_iter(
570                memory_index: &IndexVec<FieldIdx, u32>,
571            ) -> impl Iterator<Item = FieldIdx> + 'static {
572                let inverse_memory_index = memory_index.invert_bijective_mapping();
573                inverse_memory_index.into_iter()
574            }
575
576            // Hook to detect `UnsafeCell`.
577            fn visit_value(&mut self, v: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
578                trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty);
579                let is_unsafe_cell = match v.layout.ty.kind() {
580                    ty::Adt(adt, _) =>
581                        Some(adt.did()) == self.ecx.tcx.lang_items().unsafe_cell_type(),
582                    _ => false,
583                };
584                if is_unsafe_cell {
585                    // We do not have to recurse further, this is an `UnsafeCell`.
586                    (self.unsafe_cell_action)(v)
587                } else if self.ecx.type_is_freeze(v.layout.ty) {
588                    // This is `Freeze`, there cannot be an `UnsafeCell`
589                    interp_ok(())
590                } else if matches!(v.layout.fields, FieldsShape::Union(..)) {
591                    // A (non-frozen) union. We fall back to whatever the type says.
592                    (self.unsafe_cell_action)(v)
593                } else {
594                    // We want to not actually read from memory for this visit. So, before
595                    // walking this value, we have to make sure it is not a
596                    // `Variants::Multiple`.
597                    // FIXME: the current logic here is layout-dependent, so enums with
598                    // multiple variants where all but 1 are uninhabited will be recursed into.
599                    // Is that truly what we want?
600                    match v.layout.variants {
601                        Variants::Multiple { .. } => {
602                            // A multi-variant enum, or coroutine, or so.
603                            // Treat this like a union: without reading from memory,
604                            // we cannot determine the variant we are in. Reading from
605                            // memory would be subject to Stacked Borrows rules, leading
606                            // to all sorts of "funny" recursion.
607                            // We only end up here if the type is *not* freeze, so we just call the
608                            // `UnsafeCell` action.
609                            (self.unsafe_cell_action)(v)
610                        }
611                        Variants::Single { .. } | Variants::Empty => {
612                            // Proceed further, try to find where exactly that `UnsafeCell`
613                            // is hiding.
614                            self.walk_value(v)
615                        }
616                    }
617                }
618            }
619
620            fn visit_union(
621                &mut self,
622                _v: &MPlaceTy<'tcx>,
623                _fields: NonZero<usize>,
624            ) -> InterpResult<'tcx> {
625                bug!("we should have already handled unions in `visit_value`")
626            }
627        }
628    }
629
630    /// Helper function used inside the shims of foreign functions to check that isolation is
631    /// disabled. It returns an error using the `name` of the foreign function if this is not the
632    /// case.
633    fn check_no_isolation(&self, name: &str) -> InterpResult<'tcx> {
634        if !self.eval_context_ref().machine.communicate() {
635            self.reject_in_isolation(name, RejectOpWith::Abort)?;
636        }
637        interp_ok(())
638    }
639
640    /// Helper function used inside the shims of foreign functions which reject the op
641    /// when isolation is enabled. It is used to print a warning/backtrace about the rejection.
642    fn reject_in_isolation(&self, op_name: &str, reject_with: RejectOpWith) -> InterpResult<'tcx> {
643        let this = self.eval_context_ref();
644        match reject_with {
645            RejectOpWith::Abort => isolation_abort_error(op_name),
646            RejectOpWith::WarningWithoutBacktrace => {
647                // Deduplicate these warnings *by shim* (not by span)
648                static DEDUP: Mutex<FxHashSet<String>> =
649                    Mutex::new(FxHashSet::with_hasher(rustc_hash::FxBuildHasher));
650                let mut emitted_warnings = DEDUP.lock().unwrap();
651                if !emitted_warnings.contains(op_name) {
652                    // First time we are seeing this.
653                    emitted_warnings.insert(op_name.to_owned());
654                    this.tcx
655                        .dcx()
656                        .warn(format!("{op_name} was made to return an error due to isolation"));
657                }
658
659                interp_ok(())
660            }
661            RejectOpWith::Warning => {
662                this.emit_diagnostic(NonHaltingDiagnostic::RejectedIsolatedOp(op_name.to_string()));
663                interp_ok(())
664            }
665            RejectOpWith::NoWarning => interp_ok(()), // no warning
666        }
667    }
668
669    /// Helper function used inside the shims of foreign functions to assert that the target OS
670    /// is `target_os`. It panics showing a message with the `name` of the foreign function
671    /// if this is not the case.
672    fn assert_target_os(&self, target_os: &str, name: &str) {
673        assert_eq!(
674            self.eval_context_ref().tcx.sess.target.os,
675            target_os,
676            "`{name}` is only available on the `{target_os}` target OS",
677        )
678    }
679
680    /// Helper function used inside shims of foreign functions to check that the target OS
681    /// is one of `target_oses`. It returns an error containing the `name` of the foreign function
682    /// in a message if this is not the case.
683    fn check_target_os(&self, target_oses: &[&str], name: Symbol) -> InterpResult<'tcx> {
684        let target_os = self.eval_context_ref().tcx.sess.target.os.as_ref();
685        if !target_oses.contains(&target_os) {
686            throw_unsup_format!("`{name}` is not supported on {target_os}");
687        }
688        interp_ok(())
689    }
690
691    /// Helper function used inside the shims of foreign functions to assert that the target OS
692    /// is part of the UNIX family. It panics showing a message with the `name` of the foreign function
693    /// if this is not the case.
694    fn assert_target_os_is_unix(&self, name: &str) {
695        assert!(self.target_os_is_unix(), "`{name}` is only available for unix targets",);
696    }
697
698    fn target_os_is_unix(&self) -> bool {
699        self.eval_context_ref().tcx.sess.target.families.iter().any(|f| f == "unix")
700    }
701
702    /// Dereference a pointer operand to a place using `layout` instead of the pointer's declared type
703    fn deref_pointer_as(
704        &self,
705        op: &impl Projectable<'tcx, Provenance>,
706        layout: TyAndLayout<'tcx>,
707    ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
708        let this = self.eval_context_ref();
709        let ptr = this.read_pointer(op)?;
710        interp_ok(this.ptr_to_mplace(ptr, layout))
711    }
712
713    /// Calculates the MPlaceTy given the offset and layout of an access on an operand
714    fn deref_pointer_and_offset(
715        &self,
716        op: &impl Projectable<'tcx, Provenance>,
717        offset: u64,
718        base_layout: TyAndLayout<'tcx>,
719        value_layout: TyAndLayout<'tcx>,
720    ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
721        let this = self.eval_context_ref();
722        let op_place = this.deref_pointer_as(op, base_layout)?;
723        let offset = Size::from_bytes(offset);
724
725        // Ensure that the access is within bounds.
726        assert!(base_layout.size >= offset + value_layout.size);
727        let value_place = op_place.offset(offset, value_layout, this)?;
728        interp_ok(value_place)
729    }
730
731    fn deref_pointer_and_read(
732        &self,
733        op: &impl Projectable<'tcx, Provenance>,
734        offset: u64,
735        base_layout: TyAndLayout<'tcx>,
736        value_layout: TyAndLayout<'tcx>,
737    ) -> InterpResult<'tcx, Scalar> {
738        let this = self.eval_context_ref();
739        let value_place = this.deref_pointer_and_offset(op, offset, base_layout, value_layout)?;
740        this.read_scalar(&value_place)
741    }
742
743    fn deref_pointer_and_write(
744        &mut self,
745        op: &impl Projectable<'tcx, Provenance>,
746        offset: u64,
747        value: impl Into<Scalar>,
748        base_layout: TyAndLayout<'tcx>,
749        value_layout: TyAndLayout<'tcx>,
750    ) -> InterpResult<'tcx, ()> {
751        let this = self.eval_context_mut();
752        let value_place = this.deref_pointer_and_offset(op, offset, base_layout, value_layout)?;
753        this.write_scalar(value, &value_place)
754    }
755
756    /// Parse a `timespec` struct and return it as a `std::time::Duration`. It returns `None`
757    /// if the value in the `timespec` struct is invalid. Some libc functions will return
758    /// `EINVAL` in this case.
759    fn read_timespec(&mut self, tp: &MPlaceTy<'tcx>) -> InterpResult<'tcx, Option<Duration>> {
760        let this = self.eval_context_mut();
761        let seconds_place = this.project_field(tp, FieldIdx::ZERO)?;
762        let seconds_scalar = this.read_scalar(&seconds_place)?;
763        let seconds = seconds_scalar.to_target_isize(this)?;
764        let nanoseconds_place = this.project_field(tp, FieldIdx::ONE)?;
765        let nanoseconds_scalar = this.read_scalar(&nanoseconds_place)?;
766        let nanoseconds = nanoseconds_scalar.to_target_isize(this)?;
767
768        interp_ok(
769            try {
770                // tv_sec must be non-negative.
771                let seconds: u64 = seconds.try_into().ok()?;
772                // tv_nsec must be non-negative.
773                let nanoseconds: u32 = nanoseconds.try_into().ok()?;
774                if nanoseconds >= 1_000_000_000 {
775                    // tv_nsec must not be greater than 999,999,999.
776                    None?
777                }
778                Duration::new(seconds, nanoseconds)
779            },
780        )
781    }
782
783    /// Read bytes from a byte slice.
784    fn read_byte_slice<'a>(&'a self, slice: &ImmTy<'tcx>) -> InterpResult<'tcx, &'a [u8]>
785    where
786        'tcx: 'a,
787    {
788        let this = self.eval_context_ref();
789        let (ptr, len) = slice.to_scalar_pair();
790        let ptr = ptr.to_pointer(this)?;
791        let len = len.to_target_usize(this)?;
792        let bytes = this.read_bytes_ptr_strip_provenance(ptr, Size::from_bytes(len))?;
793        interp_ok(bytes)
794    }
795
796    /// Read a sequence of bytes until the first null terminator.
797    fn read_c_str<'a>(&'a self, ptr: Pointer) -> InterpResult<'tcx, &'a [u8]>
798    where
799        'tcx: 'a,
800    {
801        let this = self.eval_context_ref();
802        let size1 = Size::from_bytes(1);
803
804        // Step 1: determine the length.
805        let mut len = Size::ZERO;
806        loop {
807            // FIXME: We are re-getting the allocation each time around the loop.
808            // Would be nice if we could somehow "extend" an existing AllocRange.
809            let alloc = this.get_ptr_alloc(ptr.wrapping_offset(len, this), size1)?.unwrap(); // not a ZST, so we will get a result
810            let byte = alloc.read_integer(alloc_range(Size::ZERO, size1))?.to_u8()?;
811            if byte == 0 {
812                break;
813            } else {
814                len += size1;
815            }
816        }
817
818        // Step 2: get the bytes.
819        this.read_bytes_ptr_strip_provenance(ptr, len)
820    }
821
822    /// Helper function to write a sequence of bytes with an added null-terminator, which is what
823    /// the Unix APIs usually handle. This function returns `Ok((false, length))` without trying
824    /// to write if `size` is not large enough to fit the contents of `c_str` plus a null
825    /// terminator. It returns `Ok((true, length))` if the writing process was successful. The
826    /// string length returned does include the null terminator.
827    fn write_c_str(
828        &mut self,
829        c_str: &[u8],
830        ptr: Pointer,
831        size: u64,
832    ) -> InterpResult<'tcx, (bool, u64)> {
833        // If `size` is smaller or equal than `bytes.len()`, writing `bytes` plus the required null
834        // terminator to memory using the `ptr` pointer would cause an out-of-bounds access.
835        let string_length = u64::try_from(c_str.len()).unwrap();
836        let string_length = string_length.strict_add(1);
837        if size < string_length {
838            return interp_ok((false, string_length));
839        }
840        self.eval_context_mut()
841            .write_bytes_ptr(ptr, c_str.iter().copied().chain(iter::once(0u8)))?;
842        interp_ok((true, string_length))
843    }
844
845    /// Helper function to read a sequence of unsigned integers of the given size and alignment
846    /// until the first null terminator.
847    fn read_c_str_with_char_size<T>(
848        &self,
849        mut ptr: Pointer,
850        size: Size,
851        align: Align,
852    ) -> InterpResult<'tcx, Vec<T>>
853    where
854        T: TryFrom<u128>,
855        <T as TryFrom<u128>>::Error: std::fmt::Debug,
856    {
857        assert_ne!(size, Size::ZERO);
858
859        let this = self.eval_context_ref();
860
861        this.check_ptr_align(ptr, align)?;
862
863        let mut wchars = Vec::new();
864        loop {
865            // FIXME: We are re-getting the allocation each time around the loop.
866            // Would be nice if we could somehow "extend" an existing AllocRange.
867            let alloc = this.get_ptr_alloc(ptr, size)?.unwrap(); // not a ZST, so we will get a result
868            let wchar_int = alloc.read_integer(alloc_range(Size::ZERO, size))?.to_bits(size)?;
869            if wchar_int == 0 {
870                break;
871            } else {
872                wchars.push(wchar_int.try_into().unwrap());
873                ptr = ptr.wrapping_offset(size, this);
874            }
875        }
876
877        interp_ok(wchars)
878    }
879
880    /// Read a sequence of u16 until the first null terminator.
881    fn read_wide_str(&self, ptr: Pointer) -> InterpResult<'tcx, Vec<u16>> {
882        self.read_c_str_with_char_size(ptr, Size::from_bytes(2), Align::from_bytes(2).unwrap())
883    }
884
885    /// Helper function to write a sequence of u16 with an added 0x0000-terminator, which is what
886    /// the Windows APIs usually handle. This function returns `Ok((false, length))` without trying
887    /// to write if `size` is not large enough to fit the contents of `os_string` plus a null
888    /// terminator. It returns `Ok((true, length))` if the writing process was successful. The
889    /// string length returned does include the null terminator. Length is measured in units of
890    /// `u16.`
891    fn write_wide_str(
892        &mut self,
893        wide_str: &[u16],
894        ptr: Pointer,
895        size: u64,
896    ) -> InterpResult<'tcx, (bool, u64)> {
897        // If `size` is smaller or equal than `bytes.len()`, writing `bytes` plus the required
898        // 0x0000 terminator to memory would cause an out-of-bounds access.
899        let string_length = u64::try_from(wide_str.len()).unwrap();
900        let string_length = string_length.strict_add(1);
901        if size < string_length {
902            return interp_ok((false, string_length));
903        }
904
905        // Store the UTF-16 string.
906        let size2 = Size::from_bytes(2);
907        let this = self.eval_context_mut();
908        this.check_ptr_align(ptr, Align::from_bytes(2).unwrap())?;
909        let mut alloc = this.get_ptr_alloc_mut(ptr, size2 * string_length)?.unwrap(); // not a ZST, so we will get a result
910        for (offset, wchar) in wide_str.iter().copied().chain(iter::once(0x0000)).enumerate() {
911            let offset = u64::try_from(offset).unwrap();
912            alloc.write_scalar(alloc_range(size2 * offset, size2), Scalar::from_u16(wchar))?;
913        }
914        interp_ok((true, string_length))
915    }
916
917    /// Read a sequence of wchar_t until the first null terminator.
918    /// Always returns a `Vec<u32>` no matter the size of `wchar_t`.
919    fn read_wchar_t_str(&self, ptr: Pointer) -> InterpResult<'tcx, Vec<u32>> {
920        let this = self.eval_context_ref();
921        let wchar_t = if this.tcx.sess.target.os == "windows" {
922            // We don't have libc on Windows so we have to hard-code the type ourselves.
923            this.machine.layouts.u16
924        } else {
925            this.libc_ty_layout("wchar_t")
926        };
927        self.read_c_str_with_char_size(ptr, wchar_t.size, wchar_t.align.abi)
928    }
929
930    fn frame_in_std(&self) -> bool {
931        let this = self.eval_context_ref();
932        let frame = this.frame();
933        // Make an attempt to get at the instance of the function this is inlined from.
934        let instance: Option<_> = try {
935            let scope = frame.current_source_info()?.scope;
936            let inlined_parent = frame.body().source_scopes[scope].inlined_parent_scope?;
937            let source = &frame.body().source_scopes[inlined_parent];
938            source.inlined.expect("inlined_parent_scope points to scope without inline info").0
939        };
940        // Fall back to the instance of the function itself.
941        let instance = instance.unwrap_or(frame.instance());
942        // Now check the crate it is in. We could try to be clever here and e.g. check if this is
943        // the same crate as `start_fn`, but that would not work for running std tests in Miri, so
944        // we'd need some more hacks anyway. So we just check the name of the crate. If someone
945        // calls their crate `std` then we'll just let them keep the pieces.
946        let frame_crate = this.tcx.def_path(instance.def_id()).krate;
947        let crate_name = this.tcx.crate_name(frame_crate);
948        let crate_name = crate_name.as_str();
949        // On miri-test-libstd, the name of the crate is different.
950        crate_name == "std" || crate_name == "std_miri_test"
951    }
952
953    /// Mark a machine allocation that was just created as immutable.
954    fn mark_immutable(&mut self, mplace: &MPlaceTy<'tcx>) {
955        let this = self.eval_context_mut();
956        // This got just allocated, so there definitely is a pointer here.
957        let provenance = mplace.ptr().into_pointer_or_addr().unwrap().provenance;
958        this.alloc_mark_immutable(provenance.get_alloc_id().unwrap()).unwrap();
959    }
960
961    /// Returns an integer type that is twice wide as `ty`
962    fn get_twice_wide_int_ty(&self, ty: Ty<'tcx>) -> Ty<'tcx> {
963        let this = self.eval_context_ref();
964        match ty.kind() {
965            // Unsigned
966            ty::Uint(UintTy::U8) => this.tcx.types.u16,
967            ty::Uint(UintTy::U16) => this.tcx.types.u32,
968            ty::Uint(UintTy::U32) => this.tcx.types.u64,
969            ty::Uint(UintTy::U64) => this.tcx.types.u128,
970            // Signed
971            ty::Int(IntTy::I8) => this.tcx.types.i16,
972            ty::Int(IntTy::I16) => this.tcx.types.i32,
973            ty::Int(IntTy::I32) => this.tcx.types.i64,
974            ty::Int(IntTy::I64) => this.tcx.types.i128,
975            _ => span_bug!(this.cur_span(), "unexpected type: {ty:?}"),
976        }
977    }
978
979    /// Checks that target feature `target_feature` is enabled.
980    ///
981    /// If not enabled, emits an UB error that states that the feature is
982    /// required by `intrinsic`.
983    fn expect_target_feature_for_intrinsic(
984        &self,
985        intrinsic: Symbol,
986        target_feature: &str,
987    ) -> InterpResult<'tcx, ()> {
988        let this = self.eval_context_ref();
989        if !this.tcx.sess.unstable_target_features.contains(&Symbol::intern(target_feature)) {
990            throw_ub_format!(
991                "attempted to call intrinsic `{intrinsic}` that requires missing target feature {target_feature}"
992            );
993        }
994        interp_ok(())
995    }
996
997    /// Lookup an array of immediates from any linker sections matching the provided predicate.
998    fn lookup_link_section(
999        &mut self,
1000        include_name: impl Fn(&str) -> bool,
1001    ) -> InterpResult<'tcx, Vec<ImmTy<'tcx>>> {
1002        let this = self.eval_context_mut();
1003        let tcx = this.tcx.tcx;
1004
1005        let mut array = vec![];
1006
1007        iter_exported_symbols(tcx, |_cnum, def_id| {
1008            let attrs = tcx.codegen_fn_attrs(def_id);
1009            let Some(link_section) = attrs.link_section else {
1010                return interp_ok(());
1011            };
1012            if include_name(link_section.as_str()) {
1013                let instance = ty::Instance::mono(tcx, def_id);
1014                let const_val = this.eval_global(instance).unwrap_or_else(|err| {
1015                    panic!(
1016                        "failed to evaluate static in required link_section: {def_id:?}\n{err:?}"
1017                    )
1018                });
1019                match const_val.layout.ty.kind() {
1020                    ty::FnPtr(..) => {
1021                        array.push(this.read_immediate(&const_val)?);
1022                    }
1023                    ty::Array(elem_ty, _) if matches!(elem_ty.kind(), ty::FnPtr(..)) => {
1024                        let mut elems = this.project_array_fields(&const_val)?;
1025                        while let Some((_idx, elem)) = elems.next(this)? {
1026                            array.push(this.read_immediate(&elem)?);
1027                        }
1028                    }
1029                    _ =>
1030                        throw_unsup_format!(
1031                            "only function pointers and arrays of function pointers are supported in well-known linker sections"
1032                        ),
1033                }
1034            }
1035            interp_ok(())
1036        })?;
1037
1038        interp_ok(array)
1039    }
1040
1041    fn mangle_internal_symbol<'a>(&'a mut self, name: &'static str) -> &'a str
1042    where
1043        'tcx: 'a,
1044    {
1045        let this = self.eval_context_mut();
1046        let tcx = *this.tcx;
1047        this.machine
1048            .mangle_internal_symbol_cache
1049            .entry(name)
1050            .or_insert_with(|| mangle_internal_symbol(tcx, name))
1051    }
1052}
1053
1054impl<'tcx> MiriMachine<'tcx> {
1055    /// Get the current span in the topmost function which is workspace-local and not
1056    /// `#[track_caller]`.
1057    /// This function is backed by a cache, and can be assumed to be very fast.
1058    /// It will work even when the stack is empty.
1059    pub fn current_user_relevant_span(&self) -> Span {
1060        self.threads.active_thread_ref().current_user_relevant_span()
1061    }
1062
1063    /// Returns the span of the *caller* of the current operation, again
1064    /// walking down the stack to find the closest frame in a local crate, if the caller of the
1065    /// current operation is not in a local crate.
1066    /// This is useful when we are processing something which occurs on function-entry and we want
1067    /// to point at the call to the function, not the function definition generally.
1068    pub fn caller_span(&self) -> Span {
1069        // We need to go down at least to the caller (len - 2), or however
1070        // far we have to go to find a frame in a local crate which is also not #[track_caller].
1071        let frame_idx = self.top_user_relevant_frame().unwrap();
1072        let frame_idx = cmp::min(frame_idx, self.stack().len().saturating_sub(2));
1073        self.stack()[frame_idx].current_span()
1074    }
1075
1076    fn stack(&self) -> &[Frame<'tcx, Provenance, machine::FrameExtra<'tcx>>] {
1077        self.threads.active_thread_stack()
1078    }
1079
1080    fn top_user_relevant_frame(&self) -> Option<usize> {
1081        self.threads.active_thread_ref().top_user_relevant_frame()
1082    }
1083
1084    /// This is the source of truth for the `user_relevance` flag in our `FrameExtra`.
1085    pub fn user_relevance(&self, frame: &Frame<'tcx, Provenance>) -> u8 {
1086        if frame.instance().def.requires_caller_location(self.tcx) {
1087            return 0;
1088        }
1089        if self.is_local(frame.instance()) {
1090            u8::MAX
1091        } else {
1092            // A non-relevant frame, but at least it doesn't require a caller location, so
1093            // better than nothing.
1094            1
1095        }
1096    }
1097}
1098
1099pub fn isolation_abort_error<'tcx>(name: &str) -> InterpResult<'tcx> {
1100    throw_machine_stop!(TerminationInfo::UnsupportedInIsolation(format!(
1101        "{name} not available when isolation is enabled",
1102    )))
1103}
1104
1105pub(crate) fn bool_to_simd_element(b: bool, size: Size) -> Scalar {
1106    // SIMD uses all-1 as pattern for "true". In two's complement,
1107    // -1 has all its bits set to one and `from_int` will truncate or
1108    // sign-extend it to `size` as required.
1109    let val = if b { -1 } else { 0 };
1110    Scalar::from_int(val, size)
1111}
1112
1113/// Check whether an operation that writes to a target buffer was successful.
1114/// Accordingly select return value.
1115/// Local helper function to be used in Windows shims.
1116pub(crate) fn windows_check_buffer_size((success, len): (bool, u64)) -> u32 {
1117    if success {
1118        // If the function succeeds, the return value is the number of characters stored in the target buffer,
1119        // not including the terminating null character.
1120        u32::try_from(len.strict_sub(1)).unwrap()
1121    } else {
1122        // If the target buffer was not large enough to hold the data, the return value is the buffer size, in characters,
1123        // required to hold the string and its terminating null character.
1124        u32::try_from(len).unwrap()
1125    }
1126}
1127
1128/// We don't support 16-bit systems, so let's have ergonomic conversion from `u32` to `usize`.
1129pub trait ToUsize {
1130    fn to_usize(self) -> usize;
1131}
1132
1133impl ToUsize for u32 {
1134    fn to_usize(self) -> usize {
1135        self.try_into().unwrap()
1136    }
1137}
1138
1139/// Similarly, a maximum address size of `u64` is assumed widely here, so let's have ergonomic
1140/// converion from `usize` to `u64`.
1141pub trait ToU64 {
1142    fn to_u64(self) -> u64;
1143}
1144
1145impl ToU64 for usize {
1146    fn to_u64(self) -> u64 {
1147        self.try_into().unwrap()
1148    }
1149}
1150
1151/// Enters a [tracing::info_span] only if the "tracing" feature is enabled, otherwise does nothing.
1152/// This calls [rustc_const_eval::enter_trace_span] with [MiriMachine] as the first argument, which
1153/// will in turn call [MiriMachine::enter_trace_span], which takes care of determining at compile
1154/// time whether to trace or not (and supposedly the call is compiled out if tracing is disabled).
1155/// Look at [rustc_const_eval::enter_trace_span] for complete documentation, examples and tips.
1156#[macro_export]
1157macro_rules! enter_trace_span {
1158    ($($tt:tt)*) => {
1159        rustc_const_eval::enter_trace_span!($crate::MiriMachine<'static>, $($tt)*)
1160    };
1161}