rustc_const_eval/const_eval/
valtrees.rs

1use rustc_abi::{BackendRepr, VariantIdx};
2use rustc_data_structures::stack::ensure_sufficient_stack;
3use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId, ReportedErrorInfo};
4use rustc_middle::ty::layout::{LayoutCx, LayoutOf, TyAndLayout};
5use rustc_middle::ty::{self, Ty, TyCtxt};
6use rustc_middle::{bug, mir};
7use rustc_span::DUMMY_SP;
8use tracing::{debug, instrument, trace};
9
10use super::eval_queries::{mk_eval_cx_to_read_const_val, op_to_const};
11use super::machine::CompileTimeInterpCx;
12use super::{VALTREE_MAX_NODES, ValTreeCreationError, ValTreeCreationResult};
13use crate::const_eval::CanAccessMutGlobal;
14use crate::errors::MaxNumNodesInConstErr;
15use crate::interpret::{
16    ImmTy, Immediate, InternKind, MPlaceTy, MemPlaceMeta, MemoryKind, PlaceTy, Projectable, Scalar,
17    intern_const_alloc_recursive,
18};
19
20#[instrument(skip(ecx), level = "debug")]
21fn branches<'tcx>(
22    ecx: &CompileTimeInterpCx<'tcx>,
23    place: &MPlaceTy<'tcx>,
24    field_count: usize,
25    variant: Option<VariantIdx>,
26    num_nodes: &mut usize,
27) -> ValTreeCreationResult<'tcx> {
28    let place = match variant {
29        Some(variant) => ecx.project_downcast(place, variant).unwrap(),
30        None => place.clone(),
31    };
32    debug!(?place);
33
34    let mut branches = Vec::with_capacity(field_count + variant.is_some() as usize);
35
36    // For enums, we prepend their variant index before the variant's fields so we can figure out
37    // the variant again when just seeing a valtree.
38    if let Some(variant) = variant {
39        branches.push(ty::ValTree::from_scalar_int(*ecx.tcx, variant.as_u32().into()));
40    }
41
42    for i in 0..field_count {
43        let field = ecx.project_field(&place, i).unwrap();
44        let valtree = const_to_valtree_inner(ecx, &field, num_nodes)?;
45        branches.push(valtree);
46    }
47
48    // Have to account for ZSTs here
49    if branches.len() == 0 {
50        *num_nodes += 1;
51    }
52
53    Ok(ty::ValTree::from_branches(*ecx.tcx, branches))
54}
55
56#[instrument(skip(ecx), level = "debug")]
57fn slice_branches<'tcx>(
58    ecx: &CompileTimeInterpCx<'tcx>,
59    place: &MPlaceTy<'tcx>,
60    num_nodes: &mut usize,
61) -> ValTreeCreationResult<'tcx> {
62    let n = place.len(ecx).unwrap_or_else(|_| panic!("expected to use len of place {place:?}"));
63
64    let mut elems = Vec::with_capacity(n as usize);
65    for i in 0..n {
66        let place_elem = ecx.project_index(place, i).unwrap();
67        let valtree = const_to_valtree_inner(ecx, &place_elem, num_nodes)?;
68        elems.push(valtree);
69    }
70
71    Ok(ty::ValTree::from_branches(*ecx.tcx, elems))
72}
73
74#[instrument(skip(ecx), level = "debug")]
75fn const_to_valtree_inner<'tcx>(
76    ecx: &CompileTimeInterpCx<'tcx>,
77    place: &MPlaceTy<'tcx>,
78    num_nodes: &mut usize,
79) -> ValTreeCreationResult<'tcx> {
80    let tcx = *ecx.tcx;
81    let ty = place.layout.ty;
82    debug!("ty kind: {:?}", ty.kind());
83
84    if *num_nodes >= VALTREE_MAX_NODES {
85        return Err(ValTreeCreationError::NodesOverflow);
86    }
87
88    match ty.kind() {
89        ty::FnDef(..) => {
90            *num_nodes += 1;
91            Ok(ty::ValTree::zst(tcx))
92        }
93        ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
94            let val = ecx.read_immediate(place).unwrap();
95            let val = val.to_scalar_int().unwrap();
96            *num_nodes += 1;
97
98            Ok(ty::ValTree::from_scalar_int(tcx, val))
99        }
100
101        ty::Pat(base, ..) => {
102            let mut place = place.clone();
103            // The valtree of the base type is the same as the valtree of the pattern type.
104            // Since the returned valtree does not contain the type or layout, we can just
105            // switch to the base type.
106            place.layout = ecx.layout_of(*base).unwrap();
107            ensure_sufficient_stack(|| const_to_valtree_inner(ecx, &place, num_nodes))
108        },
109
110
111        ty::RawPtr(_, _) => {
112            // Not all raw pointers are allowed, as we cannot properly test them for
113            // equality at compile-time (see `ptr_guaranteed_cmp`).
114            // However we allow those that are just integers in disguise.
115            // First, get the pointer. Remember it might be wide!
116            let val = ecx.read_immediate(place).unwrap();
117            // We could allow wide raw pointers where both sides are integers in the future,
118            // but for now we reject them.
119            if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
120                return Err(ValTreeCreationError::NonSupportedType(ty));
121            }
122            let val = val.to_scalar();
123            // We are in the CTFE machine, so ptr-to-int casts will fail.
124            // This can only be `Ok` if `val` already is an integer.
125            let Ok(val) = val.try_to_scalar_int() else {
126                return Err(ValTreeCreationError::NonSupportedType(ty));
127            };
128            // It's just a ScalarInt!
129            Ok(ty::ValTree::from_scalar_int(tcx, val))
130        }
131
132        // Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to
133        // agree with runtime equality tests.
134        ty::FnPtr(..) => Err(ValTreeCreationError::NonSupportedType(ty)),
135
136        ty::Ref(_, _, _)  => {
137            let derefd_place = ecx.deref_pointer(place).unwrap();
138            const_to_valtree_inner(ecx, &derefd_place, num_nodes)
139        }
140
141        ty::Str | ty::Slice(_) | ty::Array(_, _) => {
142            slice_branches(ecx, place, num_nodes)
143        }
144        // Trait objects are not allowed in type level constants, as we have no concept for
145        // resolving their backing type, even if we can do that at const eval time. We may
146        // hypothetically be able to allow `dyn StructuralPartialEq` trait objects in the future,
147        // but it is unclear if this is useful.
148        ty::Dynamic(..) => Err(ValTreeCreationError::NonSupportedType(ty)),
149
150        ty::Tuple(elem_tys) => {
151            branches(ecx, place, elem_tys.len(), None, num_nodes)
152        }
153
154        ty::Adt(def, _) => {
155            if def.is_union() {
156                return Err(ValTreeCreationError::NonSupportedType(ty));
157            } else if def.variants().is_empty() {
158                bug!("uninhabited types should have errored and never gotten converted to valtree")
159            }
160
161            let variant = ecx.read_discriminant(place).unwrap();
162            branches(ecx, place, def.variant(variant).fields.len(), def.is_enum().then_some(variant), num_nodes)
163        }
164
165        ty::Never
166        | ty::Error(_)
167        | ty::Foreign(..)
168        | ty::Infer(ty::FreshIntTy(_))
169        | ty::Infer(ty::FreshFloatTy(_))
170        // FIXME(oli-obk): we could look behind opaque types
171        | ty::Alias(..)
172        | ty::Param(_)
173        | ty::Bound(..)
174        | ty::Placeholder(..)
175        | ty::Infer(_)
176        // FIXME(oli-obk): we can probably encode closures just like structs
177        | ty::Closure(..)
178        | ty::CoroutineClosure(..)
179        | ty::Coroutine(..)
180        | ty::CoroutineWitness(..)
181        | ty::UnsafeBinder(_) => Err(ValTreeCreationError::NonSupportedType(ty)),
182    }
183}
184
185/// Valtrees don't store the `MemPlaceMeta` that all dynamically sized values have in the interpreter.
186/// This function reconstructs it.
187fn reconstruct_place_meta<'tcx>(
188    layout: TyAndLayout<'tcx>,
189    valtree: ty::ValTree<'tcx>,
190    tcx: TyCtxt<'tcx>,
191) -> MemPlaceMeta {
192    if layout.is_sized() {
193        return MemPlaceMeta::None;
194    }
195
196    let mut last_valtree = valtree;
197    // Traverse the type, and update `last_valtree` as we go.
198    let tail = tcx.struct_tail_raw(
199        layout.ty,
200        |ty| ty,
201        || {
202            let branches = last_valtree.unwrap_branch();
203            last_valtree = *branches.last().unwrap();
204            debug!(?branches, ?last_valtree);
205        },
206    );
207    // Sanity-check that we got a tail we support.
208    match tail.kind() {
209        ty::Slice(..) | ty::Str => {}
210        _ => bug!("unsized tail of a valtree must be Slice or Str"),
211    };
212
213    // Get the number of elements in the unsized field.
214    let num_elems = last_valtree.unwrap_branch().len();
215    MemPlaceMeta::Meta(Scalar::from_target_usize(num_elems as u64, &tcx))
216}
217
218#[instrument(skip(ecx), level = "debug", ret)]
219fn create_valtree_place<'tcx>(
220    ecx: &mut CompileTimeInterpCx<'tcx>,
221    layout: TyAndLayout<'tcx>,
222    valtree: ty::ValTree<'tcx>,
223) -> MPlaceTy<'tcx> {
224    let meta = reconstruct_place_meta(layout, valtree, ecx.tcx.tcx);
225    ecx.allocate_dyn(layout, MemoryKind::Stack, meta).unwrap()
226}
227
228/// Evaluates a constant and turns it into a type-level constant value.
229pub(crate) fn eval_to_valtree<'tcx>(
230    tcx: TyCtxt<'tcx>,
231    typing_env: ty::TypingEnv<'tcx>,
232    cid: GlobalId<'tcx>,
233) -> EvalToValTreeResult<'tcx> {
234    // Const eval always happens in PostAnalysis mode . See the comment in
235    // `InterpCx::new` for more details.
236    debug_assert_eq!(typing_env.typing_mode, ty::TypingMode::PostAnalysis);
237    let const_alloc = tcx.eval_to_allocation_raw(typing_env.as_query_input(cid))?;
238
239    // FIXME Need to provide a span to `eval_to_valtree`
240    let ecx = mk_eval_cx_to_read_const_val(
241        tcx,
242        DUMMY_SP,
243        typing_env,
244        // It is absolutely crucial for soundness that
245        // we do not read from mutable memory.
246        CanAccessMutGlobal::No,
247    );
248    let place = ecx.raw_const_to_mplace(const_alloc).unwrap();
249    debug!(?place);
250
251    let mut num_nodes = 0;
252    let valtree_result = const_to_valtree_inner(&ecx, &place, &mut num_nodes);
253
254    match valtree_result {
255        Ok(valtree) => Ok(Ok(valtree)),
256        Err(err) => {
257            let did = cid.instance.def_id();
258            let global_const_id = cid.display(tcx);
259            let span = tcx.hir().span_if_local(did);
260            match err {
261                ValTreeCreationError::NodesOverflow => {
262                    let handled =
263                        tcx.dcx().emit_err(MaxNumNodesInConstErr { span, global_const_id });
264                    Err(ReportedErrorInfo::allowed_in_infallible(handled).into())
265                }
266                ValTreeCreationError::NonSupportedType(ty) => Ok(Err(ty)),
267            }
268        }
269    }
270}
271
272/// Converts a `ValTree` to a `ConstValue`, which is needed after mir
273/// construction has finished.
274// FIXME(valtrees): Merge `valtree_to_const_value` and `valtree_into_mplace` into one function
275#[instrument(skip(tcx), level = "debug", ret)]
276pub fn valtree_to_const_value<'tcx>(
277    tcx: TyCtxt<'tcx>,
278    typing_env: ty::TypingEnv<'tcx>,
279    cv: ty::Value<'tcx>,
280) -> mir::ConstValue<'tcx> {
281    // Basic idea: We directly construct `Scalar` values from trivial `ValTree`s
282    // (those for constants with type bool, int, uint, float or char).
283    // For all other types we create an `MPlace` and fill that by walking
284    // the `ValTree` and using `place_projection` and `place_field` to
285    // create inner `MPlace`s which are filled recursively.
286    // FIXME: Does this need an example?
287    match *cv.ty.kind() {
288        ty::FnDef(..) => {
289            assert!(cv.valtree.is_zst());
290            mir::ConstValue::ZeroSized
291        }
292        ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(_, _) => {
293            mir::ConstValue::Scalar(Scalar::Int(cv.valtree.unwrap_leaf()))
294        }
295        ty::Pat(ty, _) => {
296            let cv = ty::Value { valtree: cv.valtree, ty };
297            valtree_to_const_value(tcx, typing_env, cv)
298        }
299        ty::Ref(_, inner_ty, _) => {
300            let mut ecx =
301                mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, typing_env, CanAccessMutGlobal::No);
302            let imm = valtree_to_ref(&mut ecx, cv.valtree, inner_ty);
303            let imm = ImmTy::from_immediate(
304                imm,
305                tcx.layout_of(typing_env.as_query_input(cv.ty)).unwrap(),
306            );
307            op_to_const(&ecx, &imm.into(), /* for diagnostics */ false)
308        }
309        ty::Tuple(_) | ty::Array(_, _) | ty::Adt(..) => {
310            let layout = tcx.layout_of(typing_env.as_query_input(cv.ty)).unwrap();
311            if layout.is_zst() {
312                // Fast path to avoid some allocations.
313                return mir::ConstValue::ZeroSized;
314            }
315            if layout.backend_repr.is_scalar()
316                && (matches!(cv.ty.kind(), ty::Tuple(_))
317                    || matches!(cv.ty.kind(), ty::Adt(def, _) if def.is_struct()))
318            {
319                // A Scalar tuple/struct; we can avoid creating an allocation.
320                let branches = cv.valtree.unwrap_branch();
321                // Find the non-ZST field. (There can be aligned ZST!)
322                for (i, &inner_valtree) in branches.iter().enumerate() {
323                    let field = layout.field(&LayoutCx::new(tcx, typing_env), i);
324                    if !field.is_zst() {
325                        let cv = ty::Value { valtree: inner_valtree, ty: field.ty };
326                        return valtree_to_const_value(tcx, typing_env, cv);
327                    }
328                }
329                bug!("could not find non-ZST field during in {layout:#?}");
330            }
331
332            let mut ecx =
333                mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, typing_env, CanAccessMutGlobal::No);
334
335            // Need to create a place for this valtree.
336            let place = create_valtree_place(&mut ecx, layout, cv.valtree);
337
338            valtree_into_mplace(&mut ecx, &place, cv.valtree);
339            dump_place(&ecx, &place);
340            intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap();
341
342            op_to_const(&ecx, &place.into(), /* for diagnostics */ false)
343        }
344        ty::Never
345        | ty::Error(_)
346        | ty::Foreign(..)
347        | ty::Infer(ty::FreshIntTy(_))
348        | ty::Infer(ty::FreshFloatTy(_))
349        | ty::Alias(..)
350        | ty::Param(_)
351        | ty::Bound(..)
352        | ty::Placeholder(..)
353        | ty::Infer(_)
354        | ty::Closure(..)
355        | ty::CoroutineClosure(..)
356        | ty::Coroutine(..)
357        | ty::CoroutineWitness(..)
358        | ty::FnPtr(..)
359        | ty::Str
360        | ty::Slice(_)
361        | ty::Dynamic(..)
362        | ty::UnsafeBinder(_) => {
363            bug!("no ValTree should have been created for type {:?}", cv.ty.kind())
364        }
365    }
366}
367
368/// Put a valtree into memory and return a reference to that.
369fn valtree_to_ref<'tcx>(
370    ecx: &mut CompileTimeInterpCx<'tcx>,
371    valtree: ty::ValTree<'tcx>,
372    pointee_ty: Ty<'tcx>,
373) -> Immediate {
374    let pointee_place = create_valtree_place(ecx, ecx.layout_of(pointee_ty).unwrap(), valtree);
375    debug!(?pointee_place);
376
377    valtree_into_mplace(ecx, &pointee_place, valtree);
378    dump_place(ecx, &pointee_place);
379    intern_const_alloc_recursive(ecx, InternKind::Constant, &pointee_place).unwrap();
380
381    pointee_place.to_ref(&ecx.tcx)
382}
383
384#[instrument(skip(ecx), level = "debug")]
385fn valtree_into_mplace<'tcx>(
386    ecx: &mut CompileTimeInterpCx<'tcx>,
387    place: &MPlaceTy<'tcx>,
388    valtree: ty::ValTree<'tcx>,
389) {
390    // This will match on valtree and write the value(s) corresponding to the ValTree
391    // inside the place recursively.
392
393    let ty = place.layout.ty;
394
395    match ty.kind() {
396        ty::FnDef(_, _) => {
397            // Zero-sized type, nothing to do.
398        }
399        ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(..) => {
400            let scalar_int = valtree.unwrap_leaf();
401            debug!("writing trivial valtree {:?} to place {:?}", scalar_int, place);
402            ecx.write_immediate(Immediate::Scalar(scalar_int.into()), place).unwrap();
403        }
404        ty::Ref(_, inner_ty, _) => {
405            let imm = valtree_to_ref(ecx, valtree, *inner_ty);
406            debug!(?imm);
407            ecx.write_immediate(imm, place).unwrap();
408        }
409        ty::Adt(_, _) | ty::Tuple(_) | ty::Array(_, _) | ty::Str | ty::Slice(_) => {
410            let branches = valtree.unwrap_branch();
411
412            // Need to downcast place for enums
413            let (place_adjusted, branches, variant_idx) = match ty.kind() {
414                ty::Adt(def, _) if def.is_enum() => {
415                    // First element of valtree corresponds to variant
416                    let scalar_int = branches[0].unwrap_leaf();
417                    let variant_idx = VariantIdx::from_u32(scalar_int.to_u32());
418                    let variant = def.variant(variant_idx);
419                    debug!(?variant);
420
421                    (
422                        ecx.project_downcast(place, variant_idx).unwrap(),
423                        &branches[1..],
424                        Some(variant_idx),
425                    )
426                }
427                _ => (place.clone(), branches, None),
428            };
429            debug!(?place_adjusted, ?branches);
430
431            // Create the places (by indexing into `place`) for the fields and fill
432            // them recursively
433            for (i, inner_valtree) in branches.iter().enumerate() {
434                debug!(?i, ?inner_valtree);
435
436                let place_inner = match ty.kind() {
437                    ty::Str | ty::Slice(_) | ty::Array(..) => {
438                        ecx.project_index(place, i as u64).unwrap()
439                    }
440                    _ => ecx.project_field(&place_adjusted, i).unwrap(),
441                };
442
443                debug!(?place_inner);
444                valtree_into_mplace(ecx, &place_inner, *inner_valtree);
445                dump_place(ecx, &place_inner);
446            }
447
448            debug!("dump of place_adjusted:");
449            dump_place(ecx, &place_adjusted);
450
451            if let Some(variant_idx) = variant_idx {
452                // don't forget filling the place with the discriminant of the enum
453                ecx.write_discriminant(variant_idx, place).unwrap();
454            }
455
456            debug!("dump of place after writing discriminant:");
457            dump_place(ecx, place);
458        }
459        _ => bug!("shouldn't have created a ValTree for {:?}", ty),
460    }
461}
462
463fn dump_place<'tcx>(ecx: &CompileTimeInterpCx<'tcx>, place: &MPlaceTy<'tcx>) {
464    trace!("{:?}", ecx.dump_place(&PlaceTy::from(place.clone())));
465}