rustc_const_eval/interpret/intern.rs
1//! This module specifies the type based interner for constants.
2//!
3//! After a const evaluation has computed a value, before we destroy the const evaluator's session
4//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
5//!
6//! In principle, this is not very complicated: we recursively walk the final value, follow all the
7//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8//! is picking the right mutability: the outermost allocation generally has a clear mutability, but
9//! what about the other allocations it points to that have also been created with this value? We
10//! don't want to do guesswork here. The rules are: `static`, `const`, and promoted can only create
11//! immutable allocations that way. `static mut` can be initialized with expressions like `&mut 42`,
12//! so all inner allocations are marked mutable. Some of them could potentially be made immutable,
13//! but that would require relying on type information, and given how many ways Rust has to lie
14//! about type information, we want to avoid doing that.
15
16use hir::def::DefKind;
17use rustc_ast::Mutability;
18use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
19use rustc_hir as hir;
20use rustc_hir::definitions::{DefPathData, DisambiguatorState};
21use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
22use rustc_middle::mir::interpret::{ConstAllocation, CtfeProvenance, InterpResult};
23use rustc_middle::query::TyCtxtAt;
24use rustc_middle::span_bug;
25use rustc_middle::ty::layout::TyAndLayout;
26use rustc_span::def_id::LocalDefId;
27use tracing::{instrument, trace};
28
29use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy, interp_ok};
30use crate::const_eval::DummyMachine;
31use crate::{const_eval, errors};
32
33pub trait CompileTimeMachine<'tcx> = Machine<
34 'tcx,
35 MemoryKind = const_eval::MemoryKind,
36 Provenance = CtfeProvenance,
37 ExtraFnVal = !,
38 FrameExtra = (),
39 AllocExtra = (),
40 MemoryMap = FxIndexMap<AllocId, (MemoryKind<const_eval::MemoryKind>, Allocation)>,
41 > + HasStaticRootDefId;
42
43pub trait HasStaticRootDefId {
44 /// Returns the `DefId` of the static item that is currently being evaluated.
45 /// Used for interning to be able to handle nested allocations.
46 fn static_def_id(&self) -> Option<LocalDefId>;
47}
48
49impl HasStaticRootDefId for const_eval::CompileTimeMachine<'_> {
50 fn static_def_id(&self) -> Option<LocalDefId> {
51 Some(self.static_root_ids?.1)
52 }
53}
54
55/// Intern an allocation. Returns `Err` if the allocation does not exist in the local memory.
56///
57/// `mutability` can be used to force immutable interning: if it is `Mutability::Not`, the
58/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
59/// already mutable (as a sanity check).
60///
61/// Returns an iterator over all relocations referred to by this allocation.
62fn intern_shallow<'tcx, M: CompileTimeMachine<'tcx>>(
63 ecx: &mut InterpCx<'tcx, M>,
64 alloc_id: AllocId,
65 mutability: Mutability,
66 disambiguator: Option<&mut DisambiguatorState>,
67) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, InternError> {
68 trace!("intern_shallow {:?}", alloc_id);
69 // remove allocation
70 // FIXME(#120456) - is `swap_remove` correct?
71 let Some((kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
72 return Err(InternError::DanglingPointer);
73 };
74
75 match kind {
76 MemoryKind::Machine(const_eval::MemoryKind::Heap { was_made_global }) => {
77 if !was_made_global {
78 // Attempting to intern a `const_allocate`d pointer that was not made global via
79 // `const_make_global`. We want to error here, but we have to first put the
80 // allocation back into the `alloc_map` to keep things in a consistent state.
81 ecx.memory.alloc_map.insert(alloc_id, (kind, alloc));
82 return Err(InternError::ConstAllocNotGlobal);
83 }
84 }
85 MemoryKind::Stack | MemoryKind::CallerLocation => {}
86 }
87
88 // Set allocation mutability as appropriate. This is used by LLVM to put things into
89 // read-only memory, and also by Miri when evaluating other globals that
90 // access this one.
91 match mutability {
92 Mutability::Not => {
93 alloc.mutability = Mutability::Not;
94 }
95 Mutability::Mut => {
96 // This must be already mutable, we won't "un-freeze" allocations ever.
97 assert_eq!(alloc.mutability, Mutability::Mut);
98 }
99 }
100 // link the alloc id to the actual allocation
101 let alloc = ecx.tcx.mk_const_alloc(alloc);
102 if let Some(static_id) = ecx.machine.static_def_id() {
103 intern_as_new_static(
104 ecx.tcx,
105 static_id,
106 alloc_id,
107 alloc,
108 disambiguator.expect("disambiguator needed"),
109 );
110 } else {
111 ecx.tcx.set_alloc_id_memory(alloc_id, alloc);
112 }
113 Ok(alloc.inner().provenance().ptrs().iter().map(|&(_, prov)| prov))
114}
115
116/// Creates a new `DefId` and feeds all the right queries to make this `DefId`
117/// appear as if it were a user-written `static` (though it has no HIR).
118fn intern_as_new_static<'tcx>(
119 tcx: TyCtxtAt<'tcx>,
120 static_id: LocalDefId,
121 alloc_id: AllocId,
122 alloc: ConstAllocation<'tcx>,
123 disambiguator: &mut DisambiguatorState,
124) {
125 // `intern_const_alloc_recursive` is called once per static and it contains the `DisambiguatorState`.
126 // The `<static_id>::{{nested}}` path is thus unique to `intern_const_alloc_recursive` and the
127 // `DisambiguatorState` ensures the generated path is unique for this call as we generate
128 // `<static_id>::{{nested#n}}` where `n` is the `n`th `intern_as_new_static` call.
129 let feed = tcx.create_def(
130 static_id,
131 None,
132 DefKind::Static { safety: hir::Safety::Safe, mutability: alloc.0.mutability, nested: true },
133 Some(DefPathData::NestedStatic),
134 disambiguator,
135 );
136 tcx.set_nested_alloc_id_static(alloc_id, feed.def_id());
137
138 if tcx.is_thread_local_static(static_id.into()) {
139 tcx.dcx().emit_err(errors::NestedStaticInThreadLocal { span: tcx.def_span(static_id) });
140 }
141
142 // These do not inherit the codegen attrs of the parent static allocation, since
143 // it doesn't make sense for them to inherit their `#[no_mangle]` and `#[link_name = ..]`
144 // and the like.
145 feed.codegen_fn_attrs(CodegenFnAttrs::new());
146
147 feed.eval_static_initializer(Ok(alloc));
148 feed.generics_of(tcx.generics_of(static_id).clone());
149 feed.def_ident_span(tcx.def_ident_span(static_id));
150 feed.explicit_predicates_of(tcx.explicit_predicates_of(static_id));
151 feed.feed_hir();
152}
153
154/// How a constant value should be interned.
155#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
156pub enum InternKind {
157 /// The `mutability` of the static, ignoring the type which may have interior mutability.
158 Static(hir::Mutability),
159 /// A `const` item
160 Constant,
161 Promoted,
162}
163
164#[derive(Debug)]
165pub enum InternError {
166 BadMutablePointer,
167 DanglingPointer,
168 ConstAllocNotGlobal,
169}
170
171/// Intern `ret` and everything it references.
172///
173/// This *cannot raise an interpreter error*. Doing so is left to validation, which
174/// tracks where in the value we are and thus can show much better error messages.
175///
176/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
177#[instrument(level = "debug", skip(ecx))]
178pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>(
179 ecx: &mut InterpCx<'tcx, M>,
180 intern_kind: InternKind,
181 ret: &MPlaceTy<'tcx>,
182) -> Result<(), InternError> {
183 let mut disambiguator = DisambiguatorState::new();
184
185 // We are interning recursively, and for mutability we are distinguishing the "root" allocation
186 // that we are starting in, and all other allocations that we are encountering recursively.
187 let (base_mutability, inner_mutability, is_static) = match intern_kind {
188 InternKind::Constant | InternKind::Promoted => {
189 // Completely immutable. Interning anything mutably here can only lead to unsoundness,
190 // since all consts are conceptually independent values but share the same underlying
191 // memory.
192 (Mutability::Not, Mutability::Not, false)
193 }
194 InternKind::Static(Mutability::Not) => {
195 (
196 // Outermost allocation is mutable if `!Freeze` i.e. contains interior mutable types.
197 if ret.layout.ty.is_freeze(*ecx.tcx, ecx.typing_env) {
198 Mutability::Not
199 } else {
200 Mutability::Mut
201 },
202 // Inner allocations are never mutable. They can only arise via the "tail
203 // expression" / "outer scope" rule, and we treat them consistently with `const`.
204 Mutability::Not,
205 true,
206 )
207 }
208 InternKind::Static(Mutability::Mut) => {
209 // Just make everything mutable. We accept code like
210 // `static mut X = &mut [42]`, so even inner allocations need to be mutable.
211 (Mutability::Mut, Mutability::Mut, true)
212 }
213 };
214
215 // Intern the base allocation, and initialize todo list for recursive interning.
216 let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
217 trace!(?base_alloc_id, ?base_mutability);
218 // First we intern the base allocation, as it requires a different mutability.
219 // This gives us the initial set of nested allocations, which will then all be processed
220 // recursively in the loop below.
221 let mut todo: Vec<_> = if is_static {
222 // Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`.
223 // But still change its mutability to match the requested one.
224 let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
225 alloc.1.mutability = base_mutability;
226 alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
227 } else {
228 intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator))
229 .unwrap()
230 .collect()
231 };
232 // We need to distinguish "has just been interned" from "was already in `tcx`",
233 // so we track this in a separate set.
234 let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
235 // Whether we encountered a bad mutable pointer.
236 // We want to first report "dangling" and then "mutable", so we need to delay reporting these
237 // errors.
238 let mut result = Ok(());
239 let mut found_bad_mutable_ptr = false;
240
241 // Keep interning as long as there are things to intern.
242 // We show errors if there are dangling pointers, or mutable pointers in immutable contexts
243 // (i.e., everything except for `static mut`). We only return these errors as a `Result`
244 // so that the caller can run validation, and subsequently only report interning errors
245 // if validation fails. Validation has the better error messages so we prefer those, but
246 // interning has better coverage since it "sees" *all* pointers, including raw pointers and
247 // references stored in unions.
248 while let Some(prov) = todo.pop() {
249 trace!(?prov);
250 let alloc_id = prov.alloc_id();
251
252 if base_alloc_id == alloc_id && is_static {
253 // This is a pointer to the static itself. It's ok for a static to refer to itself,
254 // even mutably. Whether that mutable pointer is legal at all is checked in validation.
255 // See tests/ui/statics/recursive_interior_mut.rs for how such a situation can occur.
256 // We also already collected all the nested allocations, so there's no need to do that again.
257 continue;
258 }
259
260 // Ensure that this is derived from a shared reference. Crucially, we check this *before*
261 // checking whether the `alloc_id` has already been interned. The point of this check is to
262 // ensure that when there are multiple pointers to the same allocation, they are *all*
263 // derived from a shared reference. Therefore it would be bad if we only checked the first
264 // pointer to any given allocation.
265 // (It is likely not possible to actually have multiple pointers to the same allocation,
266 // so alternatively we could also check that and ICE if there are multiple such pointers.)
267 // See <https://github.com/rust-lang/rust/pull/128543> for why we are checking for "shared
268 // reference" and not "immutable", i.e., for why we are allowing interior-mutable shared
269 // references: they can actually be created in safe code while pointing to apparently
270 // "immutable" values, via promotion or tail expression lifetime extension of
271 // `&None::<Cell<T>>`.
272 // We also exclude promoteds from this as `&mut []` can be promoted, which is a mutable
273 // reference pointing to an immutable (zero-sized) allocation. We rely on the promotion
274 // analysis not screwing up to ensure that it is sound to intern promoteds as immutable.
275 if intern_kind != InternKind::Promoted
276 && inner_mutability == Mutability::Not
277 && !prov.shared_ref()
278 {
279 let is_already_global = ecx.tcx.try_get_global_alloc(alloc_id).is_some();
280 if is_already_global && !just_interned.contains(&alloc_id) {
281 // This is a pointer to some memory from another constant. We encounter mutable
282 // pointers to such memory since we do not always track immutability through
283 // these "global" pointers. Allowing them is harmless; the point of these checks
284 // during interning is to justify why we intern the *new* allocations immutably,
285 // so we can completely ignore existing allocations.
286 // We can also skip the rest of this loop iteration, since after all it is already
287 // interned.
288 continue;
289 }
290 // If this is a dangling pointer, that's actually fine -- the problematic case is
291 // when there is memory there that someone might expect to be mutable, but we make it immutable.
292 let dangling = !is_already_global && !ecx.memory.alloc_map.contains_key(&alloc_id);
293 if !dangling {
294 found_bad_mutable_ptr = true;
295 }
296 }
297 if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
298 // Already interned.
299 debug_assert!(!ecx.memory.alloc_map.contains_key(&alloc_id));
300 continue;
301 }
302 // We always intern with `inner_mutability`, and furthermore we ensured above that if
303 // that is "immutable", then there are *no* mutable pointers anywhere in the newly
304 // interned memory -- justifying that we can indeed intern immutably. However this also
305 // means we can *not* easily intern immutably here if `prov.immutable()` is true and
306 // `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
307 // we'd have to somehow check that they are *all* immutable before deciding that this
308 // allocation can be made immutable. In the future we could consider analyzing all
309 // pointers before deciding which allocations can be made immutable; but for now we are
310 // okay with losing some potential for immutability here. This can anyway only affect
311 // `static mut`.
312 just_interned.insert(alloc_id);
313 match intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator)) {
314 Ok(nested) => todo.extend(nested),
315 Err(err) => {
316 ecx.tcx.dcx().delayed_bug("error during const interning");
317 result = Err(err);
318 }
319 }
320 }
321 if found_bad_mutable_ptr && result.is_ok() {
322 // We found a mutable pointer inside a const where inner allocations should be immutable,
323 // and there was no other error. This should usually never happen! However, this can happen
324 // in unleash-miri mode, so report it as a normal error then.
325 if ecx.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
326 result = Err(InternError::BadMutablePointer);
327 } else {
328 span_bug!(
329 ecx.tcx.span,
330 "the static const safety checks accepted a mutable pointer they should not have accepted"
331 );
332 }
333 }
334 result
335}
336
337/// Intern `ret`. This function assumes that `ret` references no other allocation.
338#[instrument(level = "debug", skip(ecx))]
339pub fn intern_const_alloc_for_constprop<'tcx, M: CompileTimeMachine<'tcx>>(
340 ecx: &mut InterpCx<'tcx, M>,
341 alloc_id: AllocId,
342) -> InterpResult<'tcx, ()> {
343 if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
344 // The constant is already in global memory. Do nothing.
345 return interp_ok(());
346 }
347 // Move allocation to `tcx`.
348 if let Some(_) = intern_shallow(ecx, alloc_id, Mutability::Not, None).unwrap().next() {
349 // We are not doing recursive interning, so we don't currently support provenance.
350 // (If this assertion ever triggers, we should just implement a
351 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
352 panic!("`intern_const_alloc_for_constprop` called on allocation with nested provenance")
353 }
354 interp_ok(())
355}
356
357impl<'tcx> InterpCx<'tcx, DummyMachine> {
358 /// A helper function that allocates memory for the layout given and gives you access to mutate
359 /// it. Once your own mutation code is done, the backing `Allocation` is removed from the
360 /// current `Memory` and interned as read-only into the global memory.
361 pub fn intern_with_temp_alloc(
362 &mut self,
363 layout: TyAndLayout<'tcx>,
364 f: impl FnOnce(
365 &mut InterpCx<'tcx, DummyMachine>,
366 &PlaceTy<'tcx, CtfeProvenance>,
367 ) -> InterpResult<'tcx, ()>,
368 ) -> InterpResult<'tcx, AllocId> {
369 // `allocate` picks a fresh AllocId that we will associate with its data below.
370 let dest = self.allocate(layout, MemoryKind::Stack)?;
371 f(self, &dest.clone().into())?;
372 let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
373 for prov in intern_shallow(self, alloc_id, Mutability::Not, None).unwrap() {
374 // We are not doing recursive interning, so we don't currently support provenance.
375 // (If this assertion ever triggers, we should just implement a
376 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
377 if self.tcx.try_get_global_alloc(prov.alloc_id()).is_none() {
378 panic!("`intern_with_temp_alloc` with nested allocations");
379 }
380 }
381 interp_ok(alloc_id)
382 }
383}