rustc_const_eval/check_consts/
check.rs

1//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3use std::assert_matches::assert_matches;
4use std::borrow::Cow;
5use std::mem;
6use std::num::NonZero;
7use std::ops::Deref;
8
9use rustc_errors::{Diag, ErrorGuaranteed};
10use rustc_hir::def::DefKind;
11use rustc_hir::def_id::DefId;
12use rustc_hir::{self as hir, LangItem};
13use rustc_index::bit_set::DenseBitSet;
14use rustc_infer::infer::TyCtxtInferExt;
15use rustc_middle::mir::visit::Visitor;
16use rustc_middle::mir::*;
17use rustc_middle::span_bug;
18use rustc_middle::ty::adjustment::PointerCoercion;
19use rustc_middle::ty::{self, Ty, TypeVisitableExt};
20use rustc_mir_dataflow::Analysis;
21use rustc_mir_dataflow::impls::{MaybeStorageLive, always_storage_live_locals};
22use rustc_span::{Span, Symbol, sym};
23use rustc_trait_selection::traits::{
24    Obligation, ObligationCause, ObligationCauseCode, ObligationCtxt,
25};
26use tracing::{instrument, trace};
27
28use super::ops::{self, NonConstOp, Status};
29use super::qualifs::{self, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
30use super::resolver::FlowSensitiveAnalysis;
31use super::{ConstCx, Qualif};
32use crate::check_consts::is_fn_or_trait_safe_to_expose_on_stable;
33use crate::errors;
34
35type QualifResults<'mir, 'tcx, Q> =
36    rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'tcx, Q>>;
37
38#[derive(Copy, Clone, PartialEq, Eq, Debug)]
39enum ConstConditionsHold {
40    Yes,
41    No,
42}
43
44#[derive(Default)]
45pub(crate) struct Qualifs<'mir, 'tcx> {
46    has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
47    needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
48    needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
49}
50
51impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
52    /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
53    ///
54    /// Only updates the cursor if absolutely necessary
55    pub(crate) fn needs_drop(
56        &mut self,
57        ccx: &'mir ConstCx<'mir, 'tcx>,
58        local: Local,
59        location: Location,
60    ) -> bool {
61        let ty = ccx.body.local_decls[local].ty;
62        // Peeking into opaque types causes cycles if the current function declares said opaque
63        // type. Thus we avoid short circuiting on the type and instead run the more expensive
64        // analysis that looks at the actual usage within this function
65        if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
66            return false;
67        }
68
69        let needs_drop = self.needs_drop.get_or_insert_with(|| {
70            let ConstCx { tcx, body, .. } = *ccx;
71
72            FlowSensitiveAnalysis::new(NeedsDrop, ccx)
73                .iterate_to_fixpoint(tcx, body, None)
74                .into_results_cursor(body)
75        });
76
77        needs_drop.seek_before_primary_effect(location);
78        needs_drop.get().contains(local)
79    }
80
81    /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
82    ///
83    /// Only updates the cursor if absolutely necessary
84    pub(crate) fn needs_non_const_drop(
85        &mut self,
86        ccx: &'mir ConstCx<'mir, 'tcx>,
87        local: Local,
88        location: Location,
89    ) -> bool {
90        let ty = ccx.body.local_decls[local].ty;
91        // Peeking into opaque types causes cycles if the current function declares said opaque
92        // type. Thus we avoid short circuiting on the type and instead run the more expensive
93        // analysis that looks at the actual usage within this function
94        if !ty.has_opaque_types() && !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
95            return false;
96        }
97
98        let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
99            let ConstCx { tcx, body, .. } = *ccx;
100
101            FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
102                .iterate_to_fixpoint(tcx, body, None)
103                .into_results_cursor(body)
104        });
105
106        needs_non_const_drop.seek_before_primary_effect(location);
107        needs_non_const_drop.get().contains(local)
108    }
109
110    /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
111    ///
112    /// Only updates the cursor if absolutely necessary.
113    fn has_mut_interior(
114        &mut self,
115        ccx: &'mir ConstCx<'mir, 'tcx>,
116        local: Local,
117        location: Location,
118    ) -> bool {
119        let ty = ccx.body.local_decls[local].ty;
120        // Peeking into opaque types causes cycles if the current function declares said opaque
121        // type. Thus we avoid short circuiting on the type and instead run the more expensive
122        // analysis that looks at the actual usage within this function
123        if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
124            return false;
125        }
126
127        let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
128            let ConstCx { tcx, body, .. } = *ccx;
129
130            FlowSensitiveAnalysis::new(HasMutInterior, ccx)
131                .iterate_to_fixpoint(tcx, body, None)
132                .into_results_cursor(body)
133        });
134
135        has_mut_interior.seek_before_primary_effect(location);
136        has_mut_interior.get().contains(local)
137    }
138
139    fn in_return_place(
140        &mut self,
141        ccx: &'mir ConstCx<'mir, 'tcx>,
142        tainted_by_errors: Option<ErrorGuaranteed>,
143    ) -> ConstQualifs {
144        // FIXME(explicit_tail_calls): uhhhh I think we can return without return now, does it change anything
145
146        // Find the `Return` terminator if one exists.
147        //
148        // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
149        // qualifs for the return type.
150        let return_block = ccx
151            .body
152            .basic_blocks
153            .iter_enumerated()
154            .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
155            .map(|(bb, _)| bb);
156
157        let Some(return_block) = return_block else {
158            return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
159        };
160
161        let return_loc = ccx.body.terminator_loc(return_block);
162
163        ConstQualifs {
164            needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
165            needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
166            has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
167            tainted_by_errors,
168        }
169    }
170}
171
172pub struct Checker<'mir, 'tcx> {
173    ccx: &'mir ConstCx<'mir, 'tcx>,
174    qualifs: Qualifs<'mir, 'tcx>,
175
176    /// The span of the current statement.
177    span: Span,
178
179    /// A set that stores for each local whether it is "transient", i.e. guaranteed to be dead
180    /// when this MIR body returns.
181    transient_locals: Option<DenseBitSet<Local>>,
182
183    error_emitted: Option<ErrorGuaranteed>,
184    secondary_errors: Vec<Diag<'tcx>>,
185}
186
187impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
188    type Target = ConstCx<'mir, 'tcx>;
189
190    fn deref(&self) -> &Self::Target {
191        self.ccx
192    }
193}
194
195impl<'mir, 'tcx> Checker<'mir, 'tcx> {
196    pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
197        Checker {
198            span: ccx.body.span,
199            ccx,
200            qualifs: Default::default(),
201            transient_locals: None,
202            error_emitted: None,
203            secondary_errors: Vec::new(),
204        }
205    }
206
207    pub fn check_body(&mut self) {
208        let ConstCx { tcx, body, .. } = *self.ccx;
209        let def_id = self.ccx.def_id();
210
211        // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
212        // no need to emit duplicate errors here.
213        if self.ccx.is_async() || body.coroutine.is_some() {
214            tcx.dcx().span_delayed_bug(body.span, "`async` functions cannot be `const fn`");
215            return;
216        }
217
218        if !tcx.has_attr(def_id, sym::rustc_do_not_const_check) {
219            self.visit_body(body);
220        }
221
222        // If we got through const-checking without emitting any "primary" errors, emit any
223        // "secondary" errors if they occurred. Otherwise, cancel the "secondary" errors.
224        let secondary_errors = mem::take(&mut self.secondary_errors);
225        if self.error_emitted.is_none() {
226            for error in secondary_errors {
227                self.error_emitted = Some(error.emit());
228            }
229        } else {
230            assert!(self.tcx.dcx().has_errors().is_some());
231            for error in secondary_errors {
232                error.cancel();
233            }
234        }
235    }
236
237    fn local_is_transient(&mut self, local: Local) -> bool {
238        let ccx = self.ccx;
239        self.transient_locals
240            .get_or_insert_with(|| {
241                // A local is "transient" if it is guaranteed dead at all `Return`.
242                // So first compute the say of "maybe live" locals at each program point.
243                let always_live_locals = &always_storage_live_locals(&ccx.body);
244                let mut maybe_storage_live =
245                    MaybeStorageLive::new(Cow::Borrowed(always_live_locals))
246                        .iterate_to_fixpoint(ccx.tcx, &ccx.body, None)
247                        .into_results_cursor(&ccx.body);
248
249                // And then check all `Return` in the MIR, and if a local is "maybe live" at a
250                // `Return` then it is definitely not transient.
251                let mut transient = DenseBitSet::new_filled(ccx.body.local_decls.len());
252                // Make sure to only visit reachable blocks, the dataflow engine can ICE otherwise.
253                for (bb, data) in traversal::reachable(&ccx.body) {
254                    if matches!(data.terminator().kind, TerminatorKind::Return) {
255                        let location = ccx.body.terminator_loc(bb);
256                        maybe_storage_live.seek_after_primary_effect(location);
257                        // If a local may be live here, it is definitely not transient.
258                        transient.subtract(maybe_storage_live.get());
259                    }
260                }
261
262                transient
263            })
264            .contains(local)
265    }
266
267    pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
268        self.qualifs.in_return_place(self.ccx, self.error_emitted)
269    }
270
271    /// Emits an error if an expression cannot be evaluated in the current context.
272    pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
273        self.check_op_spanned(op, self.span);
274    }
275
276    /// Emits an error at the given `span` if an expression cannot be evaluated in the current
277    /// context.
278    pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
279        let gate = match op.status_in_item(self.ccx) {
280            Status::Unstable {
281                gate,
282                safe_to_expose_on_stable,
283                is_function_call,
284                gate_already_checked,
285            } if gate_already_checked || self.tcx.features().enabled(gate) => {
286                if gate_already_checked {
287                    assert!(
288                        !safe_to_expose_on_stable,
289                        "setting `gate_already_checked` without `safe_to_expose_on_stable` makes no sense"
290                    );
291                }
292                // Generally this is allowed since the feature gate is enabled -- except
293                // if this function wants to be safe-to-expose-on-stable.
294                if !safe_to_expose_on_stable
295                    && self.enforce_recursive_const_stability()
296                    && !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate)
297                {
298                    emit_unstable_in_stable_exposed_error(self.ccx, span, gate, is_function_call);
299                }
300
301                return;
302            }
303
304            Status::Unstable { gate, .. } => Some(gate),
305            Status::Forbidden => None,
306        };
307
308        if self.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
309            self.tcx.sess.miri_unleashed_feature(span, gate);
310            return;
311        }
312
313        let err = op.build_error(self.ccx, span);
314        assert!(err.is_error());
315
316        match op.importance() {
317            ops::DiagImportance::Primary => {
318                let reported = err.emit();
319                self.error_emitted = Some(reported);
320            }
321
322            ops::DiagImportance::Secondary => {
323                self.secondary_errors.push(err);
324                self.tcx.dcx().span_delayed_bug(
325                    span,
326                    "compilation must fail when there is a secondary const checker error",
327                );
328            }
329        }
330    }
331
332    fn check_static(&mut self, def_id: DefId, span: Span) {
333        if self.tcx.is_thread_local_static(def_id) {
334            self.tcx.dcx().span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef`");
335        }
336        if let Some(def_id) = def_id.as_local()
337            && let Err(guar) = self.tcx.ensure_ok().check_well_formed(hir::OwnerId { def_id })
338        {
339            self.error_emitted = Some(guar);
340        }
341    }
342
343    /// Returns whether this place can possibly escape the evaluation of the current const/static
344    /// initializer. The check assumes that all already existing pointers and references point to
345    /// non-escaping places.
346    fn place_may_escape(&mut self, place: &Place<'_>) -> bool {
347        let is_transient = match self.const_kind() {
348            // In a const fn all borrows are transient or point to the places given via
349            // references in the arguments (so we already checked them with
350            // TransientMutBorrow/MutBorrow as appropriate).
351            // The borrow checker guarantees that no new non-transient borrows are created.
352            // NOTE: Once we have heap allocations during CTFE we need to figure out
353            // how to prevent `const fn` to create long-lived allocations that point
354            // to mutable memory.
355            hir::ConstContext::ConstFn => true,
356            _ => {
357                // For indirect places, we are not creating a new permanent borrow, it's just as
358                // transient as the already existing one.
359                // Locals with StorageDead do not live beyond the evaluation and can
360                // thus safely be borrowed without being able to be leaked to the final
361                // value of the constant.
362                // Note: This is only sound if every local that has a `StorageDead` has a
363                // `StorageDead` in every control flow path leading to a `return` terminator.
364                // If anything slips through, there's no safety net -- safe code can create
365                // references to variants of `!Freeze` enums as long as that variant is `Freeze`, so
366                // interning can't protect us here. (There *is* a safety net for mutable references
367                // though, interning will ICE if we miss something here.)
368                place.is_indirect() || self.local_is_transient(place.local)
369            }
370        };
371        // Transient places cannot possibly escape because the place doesn't exist any more at the
372        // end of evaluation.
373        !is_transient
374    }
375
376    /// Returns whether there are const-conditions.
377    fn revalidate_conditional_constness(
378        &mut self,
379        callee: DefId,
380        callee_args: ty::GenericArgsRef<'tcx>,
381        call_span: Span,
382    ) -> Option<ConstConditionsHold> {
383        let tcx = self.tcx;
384        if !tcx.is_conditionally_const(callee) {
385            return None;
386        }
387
388        let const_conditions = tcx.const_conditions(callee).instantiate(tcx, callee_args);
389        if const_conditions.is_empty() {
390            return None;
391        }
392
393        let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(self.body.typing_env(tcx));
394        let ocx = ObligationCtxt::new(&infcx);
395
396        let body_id = self.body.source.def_id().expect_local();
397        let host_polarity = match self.const_kind() {
398            hir::ConstContext::ConstFn => ty::BoundConstness::Maybe,
399            hir::ConstContext::Static(_) | hir::ConstContext::Const { .. } => {
400                ty::BoundConstness::Const
401            }
402        };
403        let const_conditions =
404            ocx.normalize(&ObligationCause::misc(call_span, body_id), param_env, const_conditions);
405        ocx.register_obligations(const_conditions.into_iter().map(|(trait_ref, span)| {
406            Obligation::new(
407                tcx,
408                ObligationCause::new(
409                    call_span,
410                    body_id,
411                    ObligationCauseCode::WhereClause(callee, span),
412                ),
413                param_env,
414                trait_ref.to_host_effect_clause(tcx, host_polarity),
415            )
416        }));
417
418        let errors = ocx.select_all_or_error();
419        if errors.is_empty() {
420            Some(ConstConditionsHold::Yes)
421        } else {
422            tcx.dcx()
423                .span_delayed_bug(call_span, "this should have reported a [const] error in HIR");
424            Some(ConstConditionsHold::No)
425        }
426    }
427
428    pub fn check_drop_terminator(
429        &mut self,
430        dropped_place: Place<'tcx>,
431        location: Location,
432        terminator_span: Span,
433    ) {
434        let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
435
436        let needs_drop = if let Some(local) = dropped_place.as_local() {
437            self.qualifs.needs_drop(self.ccx, local, location)
438        } else {
439            qualifs::NeedsDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
440        };
441        // If this type doesn't need a drop at all, then there's nothing to enforce.
442        if !needs_drop {
443            return;
444        }
445
446        let mut err_span = self.span;
447        let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
448            // Use the span where the local was declared as the span of the drop error.
449            err_span = self.body.local_decls[local].source_info.span;
450            self.qualifs.needs_non_const_drop(self.ccx, local, location)
451        } else {
452            qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
453        };
454
455        self.check_op_spanned(
456            ops::LiveDrop {
457                dropped_at: terminator_span,
458                dropped_ty: ty_of_dropped_place,
459                needs_non_const_drop,
460            },
461            err_span,
462        );
463    }
464
465    /// Check the const stability of the given item (fn or trait).
466    fn check_callee_stability(&mut self, def_id: DefId) {
467        match self.tcx.lookup_const_stability(def_id) {
468            Some(hir::ConstStability { level: hir::StabilityLevel::Stable { .. }, .. }) => {
469                // All good.
470            }
471            None => {
472                // This doesn't need a separate const-stability check -- const-stability equals
473                // regular stability, and regular stability is checked separately.
474                // However, we *do* have to worry about *recursive* const stability.
475                if self.enforce_recursive_const_stability()
476                    && !is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id)
477                {
478                    self.dcx().emit_err(errors::UnmarkedConstItemExposed {
479                        span: self.span,
480                        def_path: self.tcx.def_path_str(def_id),
481                    });
482                }
483            }
484            Some(hir::ConstStability {
485                level: hir::StabilityLevel::Unstable { implied_by: implied_feature, issue, .. },
486                feature,
487                ..
488            }) => {
489                // An unstable const fn/trait with a feature gate.
490                let callee_safe_to_expose_on_stable =
491                    is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id);
492
493                // We only honor `span.allows_unstable` aka `#[allow_internal_unstable]` if
494                // the callee is safe to expose, to avoid bypassing recursive stability.
495                // This is not ideal since it means the user sees an error, not the macro
496                // author, but that's also the case if one forgets to set
497                // `#[allow_internal_unstable]` in the first place. Note that this cannot be
498                // integrated in the check below since we want to enforce
499                // `callee_safe_to_expose_on_stable` even if
500                // `!self.enforce_recursive_const_stability()`.
501                if (self.span.allows_unstable(feature)
502                    || implied_feature.is_some_and(|f| self.span.allows_unstable(f)))
503                    && callee_safe_to_expose_on_stable
504                {
505                    return;
506                }
507
508                // We can't use `check_op` to check whether the feature is enabled because
509                // the logic is a bit different than elsewhere: local functions don't need
510                // the feature gate, and there might be an "implied" gate that also suffices
511                // to allow this.
512                let feature_enabled = def_id.is_local()
513                    || self.tcx.features().enabled(feature)
514                    || implied_feature.is_some_and(|f| self.tcx.features().enabled(f))
515                    || {
516                        // When we're compiling the compiler itself we may pull in
517                        // crates from crates.io, but those crates may depend on other
518                        // crates also pulled in from crates.io. We want to ideally be
519                        // able to compile everything without requiring upstream
520                        // modifications, so in the case that this looks like a
521                        // `rustc_private` crate (e.g., a compiler crate) and we also have
522                        // the `-Z force-unstable-if-unmarked` flag present (we're
523                        // compiling a compiler crate), then let this missing feature
524                        // annotation slide.
525                        // This matches what we do in `eval_stability_allow_unstable` for
526                        // regular stability.
527                        feature == sym::rustc_private
528                            && issue == NonZero::new(27812)
529                            && self.tcx.sess.opts.unstable_opts.force_unstable_if_unmarked
530                    };
531                // Even if the feature is enabled, we still need check_op to double-check
532                // this if the callee is not safe to expose on stable.
533                if !feature_enabled || !callee_safe_to_expose_on_stable {
534                    self.check_op(ops::CallUnstable {
535                        def_id,
536                        feature,
537                        feature_enabled,
538                        safe_to_expose_on_stable: callee_safe_to_expose_on_stable,
539                        is_function_call: self.tcx.def_kind(def_id) != DefKind::Trait,
540                    });
541                }
542            }
543        }
544    }
545}
546
547impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
548    fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
549        trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
550
551        // We don't const-check basic blocks on the cleanup path since we never unwind during
552        // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
553        // are unreachable during const-eval.
554        //
555        // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
556        // locals that would never be dropped during normal execution are sometimes dropped during
557        // unwinding, which means backwards-incompatible live-drop errors.
558        if block.is_cleanup {
559            return;
560        }
561
562        self.super_basic_block_data(bb, block);
563    }
564
565    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
566        trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
567
568        self.super_rvalue(rvalue, location);
569
570        match rvalue {
571            Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
572
573            Rvalue::Use(_)
574            | Rvalue::CopyForDeref(..)
575            | Rvalue::Repeat(..)
576            | Rvalue::Discriminant(..) => {}
577
578            Rvalue::Aggregate(kind, ..) => {
579                if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref()
580                    && let Some(coroutine_kind) = self.tcx.coroutine_kind(def_id)
581                {
582                    self.check_op(ops::Coroutine(coroutine_kind));
583                }
584            }
585
586            Rvalue::Ref(_, BorrowKind::Mut { .. }, place)
587            | Rvalue::RawPtr(RawPtrKind::Mut, place) => {
588                // Inside mutable statics, we allow arbitrary mutable references.
589                // We've allowed `static mut FOO = &mut [elements];` for a long time (the exact
590                // reasons why are lost to history), and there is no reason to restrict that to
591                // arrays and slices.
592                let is_allowed =
593                    self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut);
594
595                if !is_allowed && self.place_may_escape(place) {
596                    self.check_op(ops::EscapingMutBorrow);
597                }
598            }
599
600            Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake(_), place)
601            | Rvalue::RawPtr(RawPtrKind::Const, place) => {
602                let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
603                    self.ccx,
604                    &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
605                    place.as_ref(),
606                );
607
608                if borrowed_place_has_mut_interior && self.place_may_escape(place) {
609                    self.check_op(ops::EscapingCellBorrow);
610                }
611            }
612
613            Rvalue::RawPtr(RawPtrKind::FakeForPtrMetadata, place) => {
614                // These are only inserted for slice length, so the place must already be indirect.
615                // This implies we do not have to worry about whether the borrow escapes.
616                if !place.is_indirect() {
617                    self.tcx.dcx().span_delayed_bug(
618                        self.body.source_info(location).span,
619                        "fake borrows are always indirect",
620                    );
621                }
622            }
623
624            Rvalue::Cast(
625                CastKind::PointerCoercion(
626                    PointerCoercion::MutToConstPointer
627                    | PointerCoercion::ArrayToPointer
628                    | PointerCoercion::UnsafeFnPointer
629                    | PointerCoercion::ClosureFnPointer(_)
630                    | PointerCoercion::ReifyFnPointer,
631                    _,
632                ),
633                _,
634                _,
635            ) => {
636                // These are all okay; they only change the type, not the data.
637            }
638
639            Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => {
640                self.check_op(ops::RawPtrToIntCast);
641            }
642            Rvalue::Cast(CastKind::PointerWithExposedProvenance, _, _) => {
643                // Since no pointer can ever get exposed (rejected above), this is easy to support.
644            }
645
646            Rvalue::Cast(_, _, _) => {}
647
648            Rvalue::NullaryOp(
649                NullOp::SizeOf
650                | NullOp::AlignOf
651                | NullOp::OffsetOf(_)
652                | NullOp::UbChecks
653                | NullOp::ContractChecks,
654                _,
655            ) => {}
656            Rvalue::ShallowInitBox(_, _) => {}
657
658            Rvalue::UnaryOp(op, operand) => {
659                let ty = operand.ty(self.body, self.tcx);
660                match op {
661                    UnOp::Not | UnOp::Neg => {
662                        if is_int_bool_float_or_char(ty) {
663                            // Int, bool, float, and char operations are fine.
664                        } else {
665                            span_bug!(
666                                self.span,
667                                "non-primitive type in `Rvalue::UnaryOp{op:?}`: {ty:?}",
668                            );
669                        }
670                    }
671                    UnOp::PtrMetadata => {
672                        // Getting the metadata from a pointer is always const.
673                        // We already validated the type is valid in the validator.
674                    }
675                }
676            }
677
678            Rvalue::BinaryOp(op, box (lhs, rhs)) => {
679                let lhs_ty = lhs.ty(self.body, self.tcx);
680                let rhs_ty = rhs.ty(self.body, self.tcx);
681
682                if is_int_bool_float_or_char(lhs_ty) && is_int_bool_float_or_char(rhs_ty) {
683                    // Int, bool, float, and char operations are fine.
684                } else if lhs_ty.is_fn_ptr() || lhs_ty.is_raw_ptr() {
685                    assert_matches!(
686                        op,
687                        BinOp::Eq
688                            | BinOp::Ne
689                            | BinOp::Le
690                            | BinOp::Lt
691                            | BinOp::Ge
692                            | BinOp::Gt
693                            | BinOp::Offset
694                    );
695
696                    self.check_op(ops::RawPtrComparison);
697                } else {
698                    span_bug!(
699                        self.span,
700                        "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
701                        lhs_ty,
702                        rhs_ty
703                    );
704                }
705            }
706
707            Rvalue::WrapUnsafeBinder(..) => {
708                // Unsafe binders are always trivial to create.
709            }
710        }
711    }
712
713    fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
714        self.super_operand(op, location);
715        if let Operand::Constant(c) = op
716            && let Some(def_id) = c.check_static_ptr(self.tcx)
717        {
718            self.check_static(def_id, self.span);
719        }
720    }
721
722    fn visit_source_info(&mut self, source_info: &SourceInfo) {
723        trace!("visit_source_info: source_info={:?}", source_info);
724        self.span = source_info.span;
725    }
726
727    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
728        trace!("visit_statement: statement={:?} location={:?}", statement, location);
729
730        self.super_statement(statement, location);
731
732        match statement.kind {
733            StatementKind::Assign(..)
734            | StatementKind::SetDiscriminant { .. }
735            | StatementKind::Deinit(..)
736            | StatementKind::FakeRead(..)
737            | StatementKind::StorageLive(_)
738            | StatementKind::StorageDead(_)
739            | StatementKind::Retag { .. }
740            | StatementKind::PlaceMention(..)
741            | StatementKind::AscribeUserType(..)
742            | StatementKind::Coverage(..)
743            | StatementKind::Intrinsic(..)
744            | StatementKind::ConstEvalCounter
745            | StatementKind::BackwardIncompatibleDropHint { .. }
746            | StatementKind::Nop => {}
747        }
748    }
749
750    #[instrument(level = "debug", skip(self))]
751    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
752        self.super_terminator(terminator, location);
753
754        match &terminator.kind {
755            TerminatorKind::Call { func, args, fn_span, .. }
756            | TerminatorKind::TailCall { func, args, fn_span, .. } => {
757                let call_source = match terminator.kind {
758                    TerminatorKind::Call { call_source, .. } => call_source,
759                    TerminatorKind::TailCall { .. } => CallSource::Normal,
760                    _ => unreachable!(),
761                };
762
763                let ConstCx { tcx, body, .. } = *self.ccx;
764
765                let fn_ty = func.ty(body, tcx);
766
767                let (callee, fn_args) = match *fn_ty.kind() {
768                    ty::FnDef(def_id, fn_args) => (def_id, fn_args),
769
770                    ty::FnPtr(..) => {
771                        self.check_op(ops::FnCallIndirect);
772                        // We can get here without an error in miri-unleashed mode... might as well
773                        // skip the rest of the checks as well then.
774                        return;
775                    }
776                    _ => {
777                        span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
778                    }
779                };
780
781                let has_const_conditions =
782                    self.revalidate_conditional_constness(callee, fn_args, *fn_span);
783
784                // Attempting to call a trait method?
785                if let Some(trait_did) = tcx.trait_of_assoc(callee) {
786                    // We can't determine the actual callee here, so we have to do different checks
787                    // than usual.
788
789                    trace!("attempting to call a trait method");
790                    let trait_is_const = tcx.is_const_trait(trait_did);
791
792                    // Only consider a trait to be const if the const conditions hold.
793                    // Otherwise, it's really misleading to call something "conditionally"
794                    // const when it's very obviously not conditionally const.
795                    if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) {
796                        // Trait calls are always conditionally-const.
797                        self.check_op(ops::ConditionallyConstCall {
798                            callee,
799                            args: fn_args,
800                            span: *fn_span,
801                            call_source,
802                        });
803                        self.check_callee_stability(trait_did);
804                    } else {
805                        // Not even a const trait.
806                        self.check_op(ops::FnCallNonConst {
807                            callee,
808                            args: fn_args,
809                            span: *fn_span,
810                            call_source,
811                        });
812                    }
813                    // That's all we can check here.
814                    return;
815                }
816
817                // Even if we know the callee, ensure we can use conditionally-const calls.
818                if has_const_conditions.is_some() {
819                    self.check_op(ops::ConditionallyConstCall {
820                        callee,
821                        args: fn_args,
822                        span: *fn_span,
823                        call_source,
824                    });
825                }
826
827                // At this point, we are calling a function, `callee`, whose `DefId` is known...
828
829                // `begin_panic` and `panic_display` functions accept generic
830                // types other than str. Check to enforce that only str can be used in
831                // const-eval.
832
833                // const-eval of the `begin_panic` fn assumes the argument is `&str`
834                if tcx.is_lang_item(callee, LangItem::BeginPanic) {
835                    match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
836                        ty::Ref(_, ty, _) if ty.is_str() => {}
837                        _ => self.check_op(ops::PanicNonStr),
838                    }
839                    // Allow this call, skip all the checks below.
840                    return;
841                }
842
843                // const-eval of `panic_display` assumes the argument is `&&str`
844                if tcx.is_lang_item(callee, LangItem::PanicDisplay) {
845                    match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
846                        ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
847                            {}
848                        _ => {
849                            self.check_op(ops::PanicNonStr);
850                        }
851                    }
852                    // Allow this call, skip all the checks below.
853                    return;
854                }
855
856                // This can be called on stable via the `vec!` macro.
857                if tcx.is_lang_item(callee, LangItem::ExchangeMalloc) {
858                    self.check_op(ops::HeapAllocation);
859                    // Allow this call, skip all the checks below.
860                    return;
861                }
862
863                // Intrinsics are language primitives, not regular calls, so treat them separately.
864                if let Some(intrinsic) = tcx.intrinsic(callee) {
865                    if !tcx.is_const_fn(callee) {
866                        // Non-const intrinsic.
867                        self.check_op(ops::IntrinsicNonConst { name: intrinsic.name });
868                        // If we allowed this, we're in miri-unleashed mode, so we might
869                        // as well skip the remaining checks.
870                        return;
871                    }
872                    // We use `intrinsic.const_stable` to determine if this can be safely exposed to
873                    // stable code, rather than `const_stable_indirect`. This is to make
874                    // `#[rustc_const_stable_indirect]` an attribute that is always safe to add.
875                    // We also ask is_safe_to_expose_on_stable_const_fn; this determines whether the intrinsic
876                    // fallback body is safe to expose on stable.
877                    let is_const_stable = intrinsic.const_stable
878                        || (!intrinsic.must_be_overridden
879                            && is_fn_or_trait_safe_to_expose_on_stable(tcx, callee));
880                    match tcx.lookup_const_stability(callee) {
881                        None => {
882                            // This doesn't need a separate const-stability check -- const-stability equals
883                            // regular stability, and regular stability is checked separately.
884                            // However, we *do* have to worry about *recursive* const stability.
885                            if !is_const_stable && self.enforce_recursive_const_stability() {
886                                self.dcx().emit_err(errors::UnmarkedIntrinsicExposed {
887                                    span: self.span,
888                                    def_path: self.tcx.def_path_str(callee),
889                                });
890                            }
891                        }
892                        Some(hir::ConstStability {
893                            level: hir::StabilityLevel::Unstable { .. },
894                            feature,
895                            ..
896                        }) => {
897                            self.check_op(ops::IntrinsicUnstable {
898                                name: intrinsic.name,
899                                feature,
900                                const_stable_indirect: is_const_stable,
901                            });
902                        }
903                        Some(hir::ConstStability {
904                            level: hir::StabilityLevel::Stable { .. },
905                            ..
906                        }) => {
907                            // All good. Note that a `#[rustc_const_stable]` intrinsic (meaning it
908                            // can be *directly* invoked from stable const code) does not always
909                            // have the `#[rustc_intrinsic_const_stable_indirect]` attribute (which controls
910                            // exposing an intrinsic indirectly); we accept this call anyway.
911                        }
912                    }
913                    // This completes the checks for intrinsics.
914                    return;
915                }
916
917                if !tcx.is_const_fn(callee) {
918                    self.check_op(ops::FnCallNonConst {
919                        callee,
920                        args: fn_args,
921                        span: *fn_span,
922                        call_source,
923                    });
924                    // If we allowed this, we're in miri-unleashed mode, so we might
925                    // as well skip the remaining checks.
926                    return;
927                }
928
929                // Finally, stability for regular function calls -- this is the big one.
930                self.check_callee_stability(callee);
931            }
932
933            // Forbid all `Drop` terminators unless the place being dropped is a local with no
934            // projections that cannot be `NeedsNonConstDrop`.
935            TerminatorKind::Drop { place: dropped_place, .. } => {
936                // If we are checking live drops after drop-elaboration, don't emit duplicate
937                // errors here.
938                if super::post_drop_elaboration::checking_enabled(self.ccx) {
939                    return;
940                }
941
942                self.check_drop_terminator(*dropped_place, location, terminator.source_info.span);
943            }
944
945            TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
946
947            TerminatorKind::Yield { .. } => {
948                self.check_op(ops::Coroutine(
949                    self.tcx
950                        .coroutine_kind(self.body.source.def_id())
951                        .expect("Only expected to have a yield in a coroutine"),
952                ));
953            }
954
955            TerminatorKind::CoroutineDrop => {
956                span_bug!(
957                    self.body.source_info(location).span,
958                    "We should not encounter TerminatorKind::CoroutineDrop after coroutine transform"
959                );
960            }
961
962            TerminatorKind::UnwindTerminate(_) => {
963                // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
964                span_bug!(self.span, "`Terminate` terminator outside of cleanup block")
965            }
966
967            TerminatorKind::Assert { .. }
968            | TerminatorKind::FalseEdge { .. }
969            | TerminatorKind::FalseUnwind { .. }
970            | TerminatorKind::Goto { .. }
971            | TerminatorKind::UnwindResume
972            | TerminatorKind::Return
973            | TerminatorKind::SwitchInt { .. }
974            | TerminatorKind::Unreachable => {}
975        }
976    }
977}
978
979fn is_int_bool_float_or_char(ty: Ty<'_>) -> bool {
980    ty.is_bool() || ty.is_integral() || ty.is_char() || ty.is_floating_point()
981}
982
983fn emit_unstable_in_stable_exposed_error(
984    ccx: &ConstCx<'_, '_>,
985    span: Span,
986    gate: Symbol,
987    is_function_call: bool,
988) -> ErrorGuaranteed {
989    let attr_span = ccx.tcx.def_span(ccx.def_id()).shrink_to_lo();
990
991    ccx.dcx().emit_err(errors::UnstableInStableExposed {
992        gate: gate.to_string(),
993        span,
994        attr_span,
995        is_function_call,
996        is_function_call2: is_function_call,
997    })
998}