rustc_const_eval/interpret/intern.rs
1//! This module specifies the type based interner for constants.
2//!
3//! After a const evaluation has computed a value, before we destroy the const evaluator's session
4//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
5//!
6//! In principle, this is not very complicated: we recursively walk the final value, follow all the
7//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8//! is picking the right mutability: the outermost allocation generally has a clear mutability, but
9//! what about the other allocations it points to that have also been created with this value? We
10//! don't want to do guesswork here. The rules are: `static`, `const`, and promoted can only create
11//! immutable allocations that way. `static mut` can be initialized with expressions like `&mut 42`,
12//! so all inner allocations are marked mutable. Some of them could potentially be made immutable,
13//! but that would require relying on type information, and given how many ways Rust has to lie
14//! about type information, we want to avoid doing that.
15
16use hir::def::DefKind;
17use rustc_ast::Mutability;
18use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
19use rustc_hir as hir;
20use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
21use rustc_middle::mir::interpret::{ConstAllocation, CtfeProvenance, InterpResult};
22use rustc_middle::query::TyCtxtAt;
23use rustc_middle::span_bug;
24use rustc_middle::ty::layout::TyAndLayout;
25use rustc_span::def_id::LocalDefId;
26use rustc_span::sym;
27use tracing::{instrument, trace};
28
29use super::{
30 AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy, err_ub, interp_ok,
31};
32use crate::const_eval;
33use crate::errors::NestedStaticInThreadLocal;
34
35pub trait CompileTimeMachine<'tcx, T> = Machine<
36 'tcx,
37 MemoryKind = T,
38 Provenance = CtfeProvenance,
39 ExtraFnVal = !,
40 FrameExtra = (),
41 AllocExtra = (),
42 MemoryMap = FxIndexMap<AllocId, (MemoryKind<T>, Allocation)>,
43 > + HasStaticRootDefId;
44
45pub trait HasStaticRootDefId {
46 /// Returns the `DefId` of the static item that is currently being evaluated.
47 /// Used for interning to be able to handle nested allocations.
48 fn static_def_id(&self) -> Option<LocalDefId>;
49}
50
51impl HasStaticRootDefId for const_eval::CompileTimeMachine<'_> {
52 fn static_def_id(&self) -> Option<LocalDefId> {
53 Some(self.static_root_ids?.1)
54 }
55}
56
57/// Intern an allocation. Returns `Err` if the allocation does not exist in the local memory.
58///
59/// `mutability` can be used to force immutable interning: if it is `Mutability::Not`, the
60/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
61/// already mutable (as a sanity check).
62///
63/// Returns an iterator over all relocations referred to by this allocation.
64fn intern_shallow<'rt, 'tcx, T, M: CompileTimeMachine<'tcx, T>>(
65 ecx: &'rt mut InterpCx<'tcx, M>,
66 alloc_id: AllocId,
67 mutability: Mutability,
68) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, ()> {
69 trace!("intern_shallow {:?}", alloc_id);
70 // remove allocation
71 // FIXME(#120456) - is `swap_remove` correct?
72 let Some((_kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
73 return Err(());
74 };
75 // Set allocation mutability as appropriate. This is used by LLVM to put things into
76 // read-only memory, and also by Miri when evaluating other globals that
77 // access this one.
78 match mutability {
79 Mutability::Not => {
80 alloc.mutability = Mutability::Not;
81 }
82 Mutability::Mut => {
83 // This must be already mutable, we won't "un-freeze" allocations ever.
84 assert_eq!(alloc.mutability, Mutability::Mut);
85 }
86 }
87 // link the alloc id to the actual allocation
88 let alloc = ecx.tcx.mk_const_alloc(alloc);
89 if let Some(static_id) = ecx.machine.static_def_id() {
90 intern_as_new_static(ecx.tcx, static_id, alloc_id, alloc);
91 } else {
92 ecx.tcx.set_alloc_id_memory(alloc_id, alloc);
93 }
94 Ok(alloc.0.0.provenance().ptrs().iter().map(|&(_, prov)| prov))
95}
96
97/// Creates a new `DefId` and feeds all the right queries to make this `DefId`
98/// appear as if it were a user-written `static` (though it has no HIR).
99fn intern_as_new_static<'tcx>(
100 tcx: TyCtxtAt<'tcx>,
101 static_id: LocalDefId,
102 alloc_id: AllocId,
103 alloc: ConstAllocation<'tcx>,
104) {
105 let feed = tcx.create_def(
106 static_id,
107 sym::nested,
108 DefKind::Static { safety: hir::Safety::Safe, mutability: alloc.0.mutability, nested: true },
109 );
110 tcx.set_nested_alloc_id_static(alloc_id, feed.def_id());
111
112 if tcx.is_thread_local_static(static_id.into()) {
113 tcx.dcx().emit_err(NestedStaticInThreadLocal { span: tcx.def_span(static_id) });
114 }
115
116 // These do not inherit the codegen attrs of the parent static allocation, since
117 // it doesn't make sense for them to inherit their `#[no_mangle]` and `#[link_name = ..]`
118 // and the like.
119 feed.codegen_fn_attrs(CodegenFnAttrs::new());
120
121 feed.eval_static_initializer(Ok(alloc));
122 feed.generics_of(tcx.generics_of(static_id).clone());
123 feed.def_ident_span(tcx.def_ident_span(static_id));
124 feed.explicit_predicates_of(tcx.explicit_predicates_of(static_id));
125 feed.feed_hir();
126}
127
128/// How a constant value should be interned.
129#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
130pub enum InternKind {
131 /// The `mutability` of the static, ignoring the type which may have interior mutability.
132 Static(hir::Mutability),
133 /// A `const` item
134 Constant,
135 Promoted,
136}
137
138#[derive(Debug)]
139pub enum InternResult {
140 FoundBadMutablePointer,
141 FoundDanglingPointer,
142}
143
144/// Intern `ret` and everything it references.
145///
146/// This *cannot raise an interpreter error*. Doing so is left to validation, which
147/// tracks where in the value we are and thus can show much better error messages.
148///
149/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
150#[instrument(level = "debug", skip(ecx))]
151pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx, const_eval::MemoryKind>>(
152 ecx: &mut InterpCx<'tcx, M>,
153 intern_kind: InternKind,
154 ret: &MPlaceTy<'tcx>,
155) -> Result<(), InternResult> {
156 // We are interning recursively, and for mutability we are distinguishing the "root" allocation
157 // that we are starting in, and all other allocations that we are encountering recursively.
158 let (base_mutability, inner_mutability, is_static) = match intern_kind {
159 InternKind::Constant | InternKind::Promoted => {
160 // Completely immutable. Interning anything mutably here can only lead to unsoundness,
161 // since all consts are conceptually independent values but share the same underlying
162 // memory.
163 (Mutability::Not, Mutability::Not, false)
164 }
165 InternKind::Static(Mutability::Not) => {
166 (
167 // Outermost allocation is mutable if `!Freeze`.
168 if ret.layout.ty.is_freeze(*ecx.tcx, ecx.typing_env) {
169 Mutability::Not
170 } else {
171 Mutability::Mut
172 },
173 // Inner allocations are never mutable. They can only arise via the "tail
174 // expression" / "outer scope" rule, and we treat them consistently with `const`.
175 Mutability::Not,
176 true,
177 )
178 }
179 InternKind::Static(Mutability::Mut) => {
180 // Just make everything mutable. We accept code like
181 // `static mut X = &mut [42]`, so even inner allocations need to be mutable.
182 (Mutability::Mut, Mutability::Mut, true)
183 }
184 };
185
186 // Intern the base allocation, and initialize todo list for recursive interning.
187 let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
188 trace!(?base_alloc_id, ?base_mutability);
189 // First we intern the base allocation, as it requires a different mutability.
190 // This gives us the initial set of nested allocations, which will then all be processed
191 // recursively in the loop below.
192 let mut todo: Vec<_> = if is_static {
193 // Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`.
194 // But still change its mutability to match the requested one.
195 let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
196 alloc.1.mutability = base_mutability;
197 alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
198 } else {
199 intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().collect()
200 };
201 // We need to distinguish "has just been interned" from "was already in `tcx`",
202 // so we track this in a separate set.
203 let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
204 // Whether we encountered a bad mutable pointer.
205 // We want to first report "dangling" and then "mutable", so we need to delay reporting these
206 // errors.
207 let mut result = Ok(());
208
209 // Keep interning as long as there are things to intern.
210 // We show errors if there are dangling pointers, or mutable pointers in immutable contexts
211 // (i.e., everything except for `static mut`). When these errors affect references, it is
212 // unfortunate that we show these errors here and not during validation, since validation can
213 // show much nicer errors. However, we do need these checks to be run on all pointers, including
214 // raw pointers, so we cannot rely on validation to catch them -- and since interning runs
215 // before validation, and interning doesn't know the type of anything, this means we can't show
216 // better errors. Maybe we should consider doing validation before interning in the future.
217 while let Some(prov) = todo.pop() {
218 trace!(?prov);
219 let alloc_id = prov.alloc_id();
220
221 if base_alloc_id == alloc_id && is_static {
222 // This is a pointer to the static itself. It's ok for a static to refer to itself,
223 // even mutably. Whether that mutable pointer is legal at all is checked in validation.
224 // See tests/ui/statics/recursive_interior_mut.rs for how such a situation can occur.
225 // We also already collected all the nested allocations, so there's no need to do that again.
226 continue;
227 }
228
229 // Ensure that this is derived from a shared reference. Crucially, we check this *before*
230 // checking whether the `alloc_id` has already been interned. The point of this check is to
231 // ensure that when there are multiple pointers to the same allocation, they are *all*
232 // derived from a shared reference. Therefore it would be bad if we only checked the first
233 // pointer to any given allocation.
234 // (It is likely not possible to actually have multiple pointers to the same allocation,
235 // so alternatively we could also check that and ICE if there are multiple such pointers.)
236 // See <https://github.com/rust-lang/rust/pull/128543> for why we are checking for "shared
237 // reference" and not "immutable", i.e., for why we are allowing interior-mutable shared
238 // references: they can actually be created in safe code while pointing to apparently
239 // "immutable" values, via promotion or tail expression lifetime extension of
240 // `&None::<Cell<T>>`.
241 // We also exclude promoteds from this as `&mut []` can be promoted, which is a mutable
242 // reference pointing to an immutable (zero-sized) allocation. We rely on the promotion
243 // analysis not screwing up to ensure that it is sound to intern promoteds as immutable.
244 if intern_kind != InternKind::Promoted
245 && inner_mutability == Mutability::Not
246 && !prov.shared_ref()
247 {
248 let is_already_global = ecx.tcx.try_get_global_alloc(alloc_id).is_some();
249 if is_already_global && !just_interned.contains(&alloc_id) {
250 // This is a pointer to some memory from another constant. We encounter mutable
251 // pointers to such memory since we do not always track immutability through
252 // these "global" pointers. Allowing them is harmless; the point of these checks
253 // during interning is to justify why we intern the *new* allocations immutably,
254 // so we can completely ignore existing allocations.
255 // We can also skip the rest of this loop iteration, since after all it is already
256 // interned.
257 continue;
258 }
259 // If this is a dangling pointer, that's actually fine -- the problematic case is
260 // when there is memory there that someone might expect to be mutable, but we make it immutable.
261 let dangling = !is_already_global && !ecx.memory.alloc_map.contains_key(&alloc_id);
262 if !dangling {
263 // Found a mutable reference inside a const where inner allocations should be
264 // immutable.
265 if !ecx.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
266 span_bug!(
267 ecx.tcx.span,
268 "the static const safety checks accepted mutable references they should not have accepted"
269 );
270 }
271 // Prefer dangling pointer errors over mutable pointer errors
272 if result.is_ok() {
273 result = Err(InternResult::FoundBadMutablePointer);
274 }
275 }
276 }
277 if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
278 // Already interned.
279 debug_assert!(!ecx.memory.alloc_map.contains_key(&alloc_id));
280 continue;
281 }
282 // We always intern with `inner_mutability`, and furthermore we ensured above that if
283 // that is "immutable", then there are *no* mutable pointers anywhere in the newly
284 // interned memory -- justifying that we can indeed intern immutably. However this also
285 // means we can *not* easily intern immutably here if `prov.immutable()` is true and
286 // `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
287 // we'd have to somehow check that they are *all* immutable before deciding that this
288 // allocation can be made immutable. In the future we could consider analyzing all
289 // pointers before deciding which allocations can be made immutable; but for now we are
290 // okay with losing some potential for immutability here. This can anyway only affect
291 // `static mut`.
292 just_interned.insert(alloc_id);
293 match intern_shallow(ecx, alloc_id, inner_mutability) {
294 Ok(nested) => todo.extend(nested),
295 Err(()) => {
296 ecx.tcx.dcx().delayed_bug("found dangling pointer during const interning");
297 result = Err(InternResult::FoundDanglingPointer);
298 }
299 }
300 }
301 result
302}
303
304/// Intern `ret`. This function assumes that `ret` references no other allocation.
305#[instrument(level = "debug", skip(ecx))]
306pub fn intern_const_alloc_for_constprop<'tcx, T, M: CompileTimeMachine<'tcx, T>>(
307 ecx: &mut InterpCx<'tcx, M>,
308 alloc_id: AllocId,
309) -> InterpResult<'tcx, ()> {
310 if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
311 // The constant is already in global memory. Do nothing.
312 return interp_ok(());
313 }
314 // Move allocation to `tcx`.
315 if let Some(_) =
316 (intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))?).next()
317 {
318 // We are not doing recursive interning, so we don't currently support provenance.
319 // (If this assertion ever triggers, we should just implement a
320 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
321 panic!("`intern_const_alloc_for_constprop` called on allocation with nested provenance")
322 }
323 interp_ok(())
324}
325
326impl<'tcx, M: super::intern::CompileTimeMachine<'tcx, !>> InterpCx<'tcx, M> {
327 /// A helper function that allocates memory for the layout given and gives you access to mutate
328 /// it. Once your own mutation code is done, the backing `Allocation` is removed from the
329 /// current `Memory` and interned as read-only into the global memory.
330 pub fn intern_with_temp_alloc(
331 &mut self,
332 layout: TyAndLayout<'tcx>,
333 f: impl FnOnce(&mut InterpCx<'tcx, M>, &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, ()>,
334 ) -> InterpResult<'tcx, AllocId> {
335 // `allocate` picks a fresh AllocId that we will associate with its data below.
336 let dest = self.allocate(layout, MemoryKind::Stack)?;
337 f(self, &dest.clone().into())?;
338 let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
339 for prov in intern_shallow(self, alloc_id, Mutability::Not).unwrap() {
340 // We are not doing recursive interning, so we don't currently support provenance.
341 // (If this assertion ever triggers, we should just implement a
342 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
343 if self.tcx.try_get_global_alloc(prov.alloc_id()).is_none() {
344 panic!("`intern_with_temp_alloc` with nested allocations");
345 }
346 }
347 interp_ok(alloc_id)
348 }
349}