rustc_const_eval/const_eval/
valtrees.rs

1use rustc_abi::{BackendRepr, FieldIdx, VariantIdx};
2use rustc_data_structures::stack::ensure_sufficient_stack;
3use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId, ValTreeCreationError};
4use rustc_middle::traits::ObligationCause;
5use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};
6use rustc_middle::ty::{self, Ty, TyCtxt};
7use rustc_middle::{bug, mir};
8use rustc_span::DUMMY_SP;
9use tracing::{debug, instrument, trace};
10
11use super::VALTREE_MAX_NODES;
12use super::eval_queries::{mk_eval_cx_to_read_const_val, op_to_const};
13use super::machine::CompileTimeInterpCx;
14use crate::const_eval::CanAccessMutGlobal;
15use crate::interpret::{
16    ImmTy, Immediate, InternKind, MPlaceTy, MemPlaceMeta, MemoryKind, PlaceTy, Projectable, Scalar,
17    intern_const_alloc_recursive,
18};
19
20#[instrument(skip(ecx), level = "debug")]
21fn branches<'tcx>(
22    ecx: &CompileTimeInterpCx<'tcx>,
23    place: &MPlaceTy<'tcx>,
24    field_count: usize,
25    variant: Option<VariantIdx>,
26    num_nodes: &mut usize,
27) -> EvalToValTreeResult<'tcx> {
28    let place = match variant {
29        Some(variant) => ecx.project_downcast(place, variant).unwrap(),
30        None => place.clone(),
31    };
32    debug!(?place);
33
34    let mut branches = Vec::with_capacity(field_count + variant.is_some() as usize);
35
36    // For enums, we prepend their variant index before the variant's fields so we can figure out
37    // the variant again when just seeing a valtree.
38    if let Some(variant) = variant {
39        branches.push(ty::Const::new_value(
40            *ecx.tcx,
41            ty::ValTree::from_scalar_int(*ecx.tcx, variant.as_u32().into()),
42            ecx.tcx.types.u32,
43        ));
44    }
45
46    for i in 0..field_count {
47        let field = ecx.project_field(&place, FieldIdx::from_usize(i)).unwrap();
48        let valtree = const_to_valtree_inner(ecx, &field, num_nodes)?;
49        branches.push(ty::Const::new_value(*ecx.tcx, valtree, field.layout.ty));
50    }
51
52    // Have to account for ZSTs here
53    if branches.len() == 0 {
54        *num_nodes += 1;
55    }
56
57    Ok(ty::ValTree::from_branches(*ecx.tcx, branches))
58}
59
60#[instrument(skip(ecx), level = "debug")]
61fn slice_branches<'tcx>(
62    ecx: &CompileTimeInterpCx<'tcx>,
63    place: &MPlaceTy<'tcx>,
64    num_nodes: &mut usize,
65) -> EvalToValTreeResult<'tcx> {
66    let n = place.len(ecx).unwrap_or_else(|_| panic!("expected to use len of place {place:?}"));
67
68    let mut elems = Vec::with_capacity(n as usize);
69    for i in 0..n {
70        let place_elem = ecx.project_index(place, i).unwrap();
71        let valtree = const_to_valtree_inner(ecx, &place_elem, num_nodes)?;
72        elems.push(ty::Const::new_value(*ecx.tcx, valtree, place_elem.layout.ty));
73    }
74
75    Ok(ty::ValTree::from_branches(*ecx.tcx, elems))
76}
77
78#[instrument(skip(ecx), level = "debug")]
79fn const_to_valtree_inner<'tcx>(
80    ecx: &CompileTimeInterpCx<'tcx>,
81    place: &MPlaceTy<'tcx>,
82    num_nodes: &mut usize,
83) -> EvalToValTreeResult<'tcx> {
84    let tcx = *ecx.tcx;
85    let ty = place.layout.ty;
86    debug!("ty kind: {:?}", ty.kind());
87
88    if *num_nodes >= VALTREE_MAX_NODES {
89        return Err(ValTreeCreationError::NodesOverflow);
90    }
91
92    match ty.kind() {
93        ty::FnDef(..) => {
94            *num_nodes += 1;
95            Ok(ty::ValTree::zst(tcx))
96        }
97        ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
98            let val = ecx.read_immediate(place).report_err()?;
99            let val = val.to_scalar_int().unwrap();
100            *num_nodes += 1;
101
102            Ok(ty::ValTree::from_scalar_int(tcx, val))
103        }
104
105        ty::Pat(base, ..) => {
106            let mut place = place.clone();
107            // The valtree of the base type is the same as the valtree of the pattern type.
108            // Since the returned valtree does not contain the type or layout, we can just
109            // switch to the base type.
110            place.layout = ecx.layout_of(*base).unwrap();
111            ensure_sufficient_stack(|| const_to_valtree_inner(ecx, &place, num_nodes))
112        },
113
114
115        ty::RawPtr(_, _) => {
116            // Not all raw pointers are allowed, as we cannot properly test them for
117            // equality at compile-time (see `ptr_guaranteed_cmp`).
118            // However we allow those that are just integers in disguise.
119            // First, get the pointer. Remember it might be wide!
120            let val = ecx.read_immediate(place).report_err()?;
121            // We could allow wide raw pointers where both sides are integers in the future,
122            // but for now we reject them.
123            if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
124                return Err(ValTreeCreationError::NonSupportedType(ty));
125            }
126            let val = val.to_scalar();
127            // We are in the CTFE machine, so ptr-to-int casts will fail.
128            // This can only be `Ok` if `val` already is an integer.
129            let Ok(val) = val.try_to_scalar_int() else {
130                return Err(ValTreeCreationError::NonSupportedType(ty));
131            };
132            // It's just a ScalarInt!
133            Ok(ty::ValTree::from_scalar_int(tcx, val))
134        }
135
136        // Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to
137        // agree with runtime equality tests.
138        ty::FnPtr(..) => Err(ValTreeCreationError::NonSupportedType(ty)),
139
140        ty::Ref(_, _, _)  => {
141            let derefd_place = ecx.deref_pointer(place).report_err()?;
142            const_to_valtree_inner(ecx, &derefd_place, num_nodes)
143        }
144
145        ty::Str | ty::Slice(_) | ty::Array(_, _) => {
146            slice_branches(ecx, place, num_nodes)
147        }
148        // Trait objects are not allowed in type level constants, as we have no concept for
149        // resolving their backing type, even if we can do that at const eval time. We may
150        // hypothetically be able to allow `dyn StructuralPartialEq` trait objects in the future,
151        // but it is unclear if this is useful.
152        ty::Dynamic(..) => Err(ValTreeCreationError::NonSupportedType(ty)),
153
154        ty::Tuple(elem_tys) => {
155            branches(ecx, place, elem_tys.len(), None, num_nodes)
156        }
157
158        ty::Adt(def, _) => {
159            if def.is_union() {
160                return Err(ValTreeCreationError::NonSupportedType(ty));
161            } else if def.variants().is_empty() {
162                bug!("uninhabited types should have errored and never gotten converted to valtree")
163            }
164
165            let variant = ecx.read_discriminant(place).report_err()?;
166            branches(ecx, place, def.variant(variant).fields.len(), def.is_enum().then_some(variant), num_nodes)
167        }
168
169        ty::Never
170        | ty::Error(_)
171        | ty::Foreign(..)
172        | ty::Infer(ty::FreshIntTy(_))
173        | ty::Infer(ty::FreshFloatTy(_))
174        // FIXME(oli-obk): we could look behind opaque types
175        | ty::Alias(..)
176        | ty::Param(_)
177        | ty::Bound(..)
178        | ty::Placeholder(..)
179        | ty::Infer(_)
180        // FIXME(oli-obk): we can probably encode closures just like structs
181        | ty::Closure(..)
182        | ty::CoroutineClosure(..)
183        | ty::Coroutine(..)
184        | ty::CoroutineWitness(..)
185        | ty::UnsafeBinder(_) => Err(ValTreeCreationError::NonSupportedType(ty)),
186    }
187}
188
189/// Valtrees don't store the `MemPlaceMeta` that all dynamically sized values have in the interpreter.
190/// This function reconstructs it.
191fn reconstruct_place_meta<'tcx>(
192    layout: TyAndLayout<'tcx>,
193    valtree: ty::ValTree<'tcx>,
194    tcx: TyCtxt<'tcx>,
195) -> MemPlaceMeta {
196    if layout.is_sized() {
197        return MemPlaceMeta::None;
198    }
199
200    let mut last_valtree = valtree;
201    // Traverse the type, and update `last_valtree` as we go.
202    let tail = tcx.struct_tail_raw(
203        layout.ty,
204        &ObligationCause::dummy(),
205        |ty| ty,
206        || {
207            let branches = last_valtree.to_branch();
208            last_valtree = branches.last().unwrap().to_value().valtree;
209            debug!(?branches, ?last_valtree);
210        },
211    );
212    // Sanity-check that we got a tail we support.
213    match tail.kind() {
214        ty::Slice(..) | ty::Str => {}
215        _ => bug!("unsized tail of a valtree must be Slice or Str"),
216    };
217
218    // Get the number of elements in the unsized field.
219    let num_elems = last_valtree.to_branch().len();
220    MemPlaceMeta::Meta(Scalar::from_target_usize(num_elems as u64, &tcx))
221}
222
223#[instrument(skip(ecx), level = "debug", ret)]
224fn create_valtree_place<'tcx>(
225    ecx: &mut CompileTimeInterpCx<'tcx>,
226    layout: TyAndLayout<'tcx>,
227    valtree: ty::ValTree<'tcx>,
228) -> MPlaceTy<'tcx> {
229    let meta = reconstruct_place_meta(layout, valtree, ecx.tcx.tcx);
230    ecx.allocate_dyn(layout, MemoryKind::Stack, meta).unwrap()
231}
232
233/// Evaluates a constant and turns it into a type-level constant value.
234pub(crate) fn eval_to_valtree<'tcx>(
235    tcx: TyCtxt<'tcx>,
236    typing_env: ty::TypingEnv<'tcx>,
237    cid: GlobalId<'tcx>,
238) -> EvalToValTreeResult<'tcx> {
239    // Const eval always happens in PostAnalysis mode . See the comment in
240    // `InterpCx::new` for more details.
241    debug_assert_eq!(typing_env.typing_mode, ty::TypingMode::PostAnalysis);
242    let const_alloc = tcx.eval_to_allocation_raw(typing_env.as_query_input(cid))?;
243
244    // FIXME Need to provide a span to `eval_to_valtree`
245    let ecx = mk_eval_cx_to_read_const_val(
246        tcx,
247        DUMMY_SP,
248        typing_env,
249        // It is absolutely crucial for soundness that
250        // we do not read from mutable memory.
251        CanAccessMutGlobal::No,
252    );
253    let place = ecx.raw_const_to_mplace(const_alloc).unwrap();
254    debug!(?place);
255
256    let mut num_nodes = 0;
257    const_to_valtree_inner(&ecx, &place, &mut num_nodes)
258}
259
260/// Converts a `ValTree` to a `ConstValue`, which is needed after mir
261/// construction has finished.
262// FIXME(valtrees): Merge `valtree_to_const_value` and `valtree_into_mplace` into one function
263#[instrument(skip(tcx), level = "debug", ret)]
264pub fn valtree_to_const_value<'tcx>(
265    tcx: TyCtxt<'tcx>,
266    typing_env: ty::TypingEnv<'tcx>,
267    cv: ty::Value<'tcx>,
268) -> mir::ConstValue {
269    // Basic idea: We directly construct `Scalar` values from trivial `ValTree`s
270    // (those for constants with type bool, int, uint, float or char).
271    // For all other types we create an `MPlace` and fill that by walking
272    // the `ValTree` and using `place_projection` and `place_field` to
273    // create inner `MPlace`s which are filled recursively.
274    // FIXME: Does this need an example?
275    match *cv.ty.kind() {
276        ty::FnDef(..) => {
277            assert!(cv.valtree.is_zst());
278            mir::ConstValue::ZeroSized
279        }
280        ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(_, _) => {
281            mir::ConstValue::Scalar(Scalar::Int(cv.to_leaf()))
282        }
283        ty::Pat(ty, _) => {
284            let cv = ty::Value { valtree: cv.valtree, ty };
285            valtree_to_const_value(tcx, typing_env, cv)
286        }
287        ty::Ref(_, inner_ty, _) => {
288            let mut ecx =
289                mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, typing_env, CanAccessMutGlobal::No);
290            let imm = valtree_to_ref(&mut ecx, cv.valtree, inner_ty);
291            let imm = ImmTy::from_immediate(
292                imm,
293                tcx.layout_of(typing_env.as_query_input(cv.ty)).unwrap(),
294            );
295            op_to_const(&ecx, &imm.into(), /* for diagnostics */ false)
296        }
297        ty::Tuple(_) | ty::Array(_, _) | ty::Adt(..) => {
298            let layout = tcx.layout_of(typing_env.as_query_input(cv.ty)).unwrap();
299            if layout.is_zst() {
300                // Fast path to avoid some allocations.
301                return mir::ConstValue::ZeroSized;
302            }
303            if layout.backend_repr.is_scalar()
304                && (matches!(cv.ty.kind(), ty::Tuple(_))
305                    || matches!(cv.ty.kind(), ty::Adt(def, _) if def.is_struct()))
306            {
307                // A Scalar tuple/struct; we can avoid creating an allocation.
308                let branches = cv.to_branch();
309                // Find the non-ZST field. (There can be aligned ZST!)
310                for (i, &inner_valtree) in branches.iter().enumerate() {
311                    let field = layout.field(&LayoutCx::new(tcx, typing_env), i);
312                    if !field.is_zst() {
313                        let cv =
314                            ty::Value { valtree: inner_valtree.to_value().valtree, ty: field.ty };
315                        return valtree_to_const_value(tcx, typing_env, cv);
316                    }
317                }
318                bug!("could not find non-ZST field during in {layout:#?}");
319            }
320
321            let mut ecx =
322                mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, typing_env, CanAccessMutGlobal::No);
323
324            // Need to create a place for this valtree.
325            let place = create_valtree_place(&mut ecx, layout, cv.valtree);
326
327            valtree_into_mplace(&mut ecx, &place, cv.valtree);
328            dump_place(&ecx, &place);
329            intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap();
330
331            op_to_const(&ecx, &place.into(), /* for diagnostics */ false)
332        }
333        ty::Never
334        | ty::Error(_)
335        | ty::Foreign(..)
336        | ty::Infer(ty::FreshIntTy(_))
337        | ty::Infer(ty::FreshFloatTy(_))
338        | ty::Alias(..)
339        | ty::Param(_)
340        | ty::Bound(..)
341        | ty::Placeholder(..)
342        | ty::Infer(_)
343        | ty::Closure(..)
344        | ty::CoroutineClosure(..)
345        | ty::Coroutine(..)
346        | ty::CoroutineWitness(..)
347        | ty::FnPtr(..)
348        | ty::Str
349        | ty::Slice(_)
350        | ty::Dynamic(..)
351        | ty::UnsafeBinder(_) => {
352            bug!("no ValTree should have been created for type {:?}", cv.ty.kind())
353        }
354    }
355}
356
357/// Put a valtree into memory and return a reference to that.
358fn valtree_to_ref<'tcx>(
359    ecx: &mut CompileTimeInterpCx<'tcx>,
360    valtree: ty::ValTree<'tcx>,
361    pointee_ty: Ty<'tcx>,
362) -> Immediate {
363    let pointee_place = create_valtree_place(ecx, ecx.layout_of(pointee_ty).unwrap(), valtree);
364    debug!(?pointee_place);
365
366    valtree_into_mplace(ecx, &pointee_place, valtree);
367    dump_place(ecx, &pointee_place);
368    intern_const_alloc_recursive(ecx, InternKind::Constant, &pointee_place).unwrap();
369
370    pointee_place.to_ref(&ecx.tcx)
371}
372
373#[instrument(skip(ecx), level = "debug")]
374fn valtree_into_mplace<'tcx>(
375    ecx: &mut CompileTimeInterpCx<'tcx>,
376    place: &MPlaceTy<'tcx>,
377    valtree: ty::ValTree<'tcx>,
378) {
379    // This will match on valtree and write the value(s) corresponding to the ValTree
380    // inside the place recursively.
381
382    let ty = place.layout.ty;
383
384    match ty.kind() {
385        ty::FnDef(_, _) => {
386            // Zero-sized type, nothing to do.
387        }
388        ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(..) => {
389            let scalar_int = valtree.to_leaf();
390            debug!("writing trivial valtree {:?} to place {:?}", scalar_int, place);
391            ecx.write_immediate(Immediate::Scalar(scalar_int.into()), place).unwrap();
392        }
393        ty::Ref(_, inner_ty, _) => {
394            let imm = valtree_to_ref(ecx, valtree, *inner_ty);
395            debug!(?imm);
396            ecx.write_immediate(imm, place).unwrap();
397        }
398        ty::Adt(_, _) | ty::Tuple(_) | ty::Array(_, _) | ty::Str | ty::Slice(_) => {
399            let branches = valtree.to_branch();
400
401            // Need to downcast place for enums
402            let (place_adjusted, branches, variant_idx) = match ty.kind() {
403                ty::Adt(def, _) if def.is_enum() => {
404                    // First element of valtree corresponds to variant
405                    let scalar_int = branches[0].to_leaf();
406                    let variant_idx = VariantIdx::from_u32(scalar_int.to_u32());
407                    let variant = def.variant(variant_idx);
408                    debug!(?variant);
409
410                    (
411                        ecx.project_downcast(place, variant_idx).unwrap(),
412                        &branches[1..],
413                        Some(variant_idx),
414                    )
415                }
416                _ => (place.clone(), branches, None),
417            };
418            debug!(?place_adjusted, ?branches);
419
420            // Create the places (by indexing into `place`) for the fields and fill
421            // them recursively
422            for (i, inner_valtree) in branches.iter().enumerate() {
423                debug!(?i, ?inner_valtree);
424
425                let place_inner = match ty.kind() {
426                    ty::Str | ty::Slice(_) | ty::Array(..) => {
427                        ecx.project_index(place, i as u64).unwrap()
428                    }
429                    _ => ecx.project_field(&place_adjusted, FieldIdx::from_usize(i)).unwrap(),
430                };
431
432                debug!(?place_inner);
433                valtree_into_mplace(ecx, &place_inner, inner_valtree.to_value().valtree);
434                dump_place(ecx, &place_inner);
435            }
436
437            debug!("dump of place_adjusted:");
438            dump_place(ecx, &place_adjusted);
439
440            if let Some(variant_idx) = variant_idx {
441                // don't forget filling the place with the discriminant of the enum
442                ecx.write_discriminant(variant_idx, place).unwrap();
443            }
444
445            debug!("dump of place after writing discriminant:");
446            dump_place(ecx, place);
447        }
448        _ => bug!("shouldn't have created a ValTree for {:?}", ty),
449    }
450}
451
452fn dump_place<'tcx>(ecx: &CompileTimeInterpCx<'tcx>, place: &MPlaceTy<'tcx>) {
453    trace!("{:?}", ecx.dump_place(&PlaceTy::from(place.clone())));
454}