rustc_const_eval/check_consts/
check.rs

1//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3use std::assert_matches::assert_matches;
4use std::borrow::Cow;
5use std::mem;
6use std::num::NonZero;
7use std::ops::Deref;
8
9use rustc_attr_parsing::{ConstStability, StabilityLevel};
10use rustc_errors::{Diag, ErrorGuaranteed};
11use rustc_hir::def::DefKind;
12use rustc_hir::def_id::DefId;
13use rustc_hir::{self as hir, LangItem};
14use rustc_index::bit_set::DenseBitSet;
15use rustc_infer::infer::TyCtxtInferExt;
16use rustc_middle::mir::visit::Visitor;
17use rustc_middle::mir::*;
18use rustc_middle::span_bug;
19use rustc_middle::ty::adjustment::PointerCoercion;
20use rustc_middle::ty::{self, Ty, TypeVisitableExt};
21use rustc_mir_dataflow::Analysis;
22use rustc_mir_dataflow::impls::{MaybeStorageLive, always_storage_live_locals};
23use rustc_span::{Span, Symbol, sym};
24use rustc_trait_selection::traits::{
25    Obligation, ObligationCause, ObligationCauseCode, ObligationCtxt,
26};
27use tracing::{instrument, trace};
28
29use super::ops::{self, NonConstOp, Status};
30use super::qualifs::{self, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
31use super::resolver::FlowSensitiveAnalysis;
32use super::{ConstCx, Qualif};
33use crate::check_consts::is_fn_or_trait_safe_to_expose_on_stable;
34use crate::errors;
35
36type QualifResults<'mir, 'tcx, Q> =
37    rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
38
39#[derive(Copy, Clone, PartialEq, Eq, Debug)]
40enum ConstConditionsHold {
41    Yes,
42    No,
43}
44
45#[derive(Default)]
46pub(crate) struct Qualifs<'mir, 'tcx> {
47    has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
48    needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
49    needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
50}
51
52impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
53    /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
54    ///
55    /// Only updates the cursor if absolutely necessary
56    pub(crate) fn needs_drop(
57        &mut self,
58        ccx: &'mir ConstCx<'mir, 'tcx>,
59        local: Local,
60        location: Location,
61    ) -> bool {
62        let ty = ccx.body.local_decls[local].ty;
63        // Peeking into opaque types causes cycles if the current function declares said opaque
64        // type. Thus we avoid short circuiting on the type and instead run the more expensive
65        // analysis that looks at the actual usage within this function
66        if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
67            return false;
68        }
69
70        let needs_drop = self.needs_drop.get_or_insert_with(|| {
71            let ConstCx { tcx, body, .. } = *ccx;
72
73            FlowSensitiveAnalysis::new(NeedsDrop, ccx)
74                .iterate_to_fixpoint(tcx, body, None)
75                .into_results_cursor(body)
76        });
77
78        needs_drop.seek_before_primary_effect(location);
79        needs_drop.get().contains(local)
80    }
81
82    /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
83    ///
84    /// Only updates the cursor if absolutely necessary
85    pub(crate) fn needs_non_const_drop(
86        &mut self,
87        ccx: &'mir ConstCx<'mir, 'tcx>,
88        local: Local,
89        location: Location,
90    ) -> bool {
91        let ty = ccx.body.local_decls[local].ty;
92        // Peeking into opaque types causes cycles if the current function declares said opaque
93        // type. Thus we avoid short circuiting on the type and instead run the more expensive
94        // analysis that looks at the actual usage within this function
95        if !ty.has_opaque_types() && !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
96            return false;
97        }
98
99        let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
100            let ConstCx { tcx, body, .. } = *ccx;
101
102            FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
103                .iterate_to_fixpoint(tcx, body, None)
104                .into_results_cursor(body)
105        });
106
107        needs_non_const_drop.seek_before_primary_effect(location);
108        needs_non_const_drop.get().contains(local)
109    }
110
111    /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
112    ///
113    /// Only updates the cursor if absolutely necessary.
114    fn has_mut_interior(
115        &mut self,
116        ccx: &'mir ConstCx<'mir, 'tcx>,
117        local: Local,
118        location: Location,
119    ) -> bool {
120        let ty = ccx.body.local_decls[local].ty;
121        // Peeking into opaque types causes cycles if the current function declares said opaque
122        // type. Thus we avoid short circuiting on the type and instead run the more expensive
123        // analysis that looks at the actual usage within this function
124        if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
125            return false;
126        }
127
128        let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
129            let ConstCx { tcx, body, .. } = *ccx;
130
131            FlowSensitiveAnalysis::new(HasMutInterior, ccx)
132                .iterate_to_fixpoint(tcx, body, None)
133                .into_results_cursor(body)
134        });
135
136        has_mut_interior.seek_before_primary_effect(location);
137        has_mut_interior.get().contains(local)
138    }
139
140    fn in_return_place(
141        &mut self,
142        ccx: &'mir ConstCx<'mir, 'tcx>,
143        tainted_by_errors: Option<ErrorGuaranteed>,
144    ) -> ConstQualifs {
145        // FIXME(explicit_tail_calls): uhhhh I think we can return without return now, does it change anything
146
147        // Find the `Return` terminator if one exists.
148        //
149        // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
150        // qualifs for the return type.
151        let return_block = ccx
152            .body
153            .basic_blocks
154            .iter_enumerated()
155            .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
156            .map(|(bb, _)| bb);
157
158        let Some(return_block) = return_block else {
159            return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
160        };
161
162        let return_loc = ccx.body.terminator_loc(return_block);
163
164        ConstQualifs {
165            needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
166            needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
167            has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
168            tainted_by_errors,
169        }
170    }
171}
172
173pub struct Checker<'mir, 'tcx> {
174    ccx: &'mir ConstCx<'mir, 'tcx>,
175    qualifs: Qualifs<'mir, 'tcx>,
176
177    /// The span of the current statement.
178    span: Span,
179
180    /// A set that stores for each local whether it is "transient", i.e. guaranteed to be dead
181    /// when this MIR body returns.
182    transient_locals: Option<DenseBitSet<Local>>,
183
184    error_emitted: Option<ErrorGuaranteed>,
185    secondary_errors: Vec<Diag<'tcx>>,
186}
187
188impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
189    type Target = ConstCx<'mir, 'tcx>;
190
191    fn deref(&self) -> &Self::Target {
192        self.ccx
193    }
194}
195
196impl<'mir, 'tcx> Checker<'mir, 'tcx> {
197    pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
198        Checker {
199            span: ccx.body.span,
200            ccx,
201            qualifs: Default::default(),
202            transient_locals: None,
203            error_emitted: None,
204            secondary_errors: Vec::new(),
205        }
206    }
207
208    pub fn check_body(&mut self) {
209        let ConstCx { tcx, body, .. } = *self.ccx;
210        let def_id = self.ccx.def_id();
211
212        // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
213        // no need to emit duplicate errors here.
214        if self.ccx.is_async() || body.coroutine.is_some() {
215            tcx.dcx().span_delayed_bug(body.span, "`async` functions cannot be `const fn`");
216            return;
217        }
218
219        if !tcx.has_attr(def_id, sym::rustc_do_not_const_check) {
220            self.visit_body(body);
221        }
222
223        // If we got through const-checking without emitting any "primary" errors, emit any
224        // "secondary" errors if they occurred. Otherwise, cancel the "secondary" errors.
225        let secondary_errors = mem::take(&mut self.secondary_errors);
226        if self.error_emitted.is_none() {
227            for error in secondary_errors {
228                self.error_emitted = Some(error.emit());
229            }
230        } else {
231            assert!(self.tcx.dcx().has_errors().is_some());
232            for error in secondary_errors {
233                error.cancel();
234            }
235        }
236    }
237
238    fn local_is_transient(&mut self, local: Local) -> bool {
239        let ccx = self.ccx;
240        self.transient_locals
241            .get_or_insert_with(|| {
242                // A local is "transient" if it is guaranteed dead at all `Return`.
243                // So first compute the say of "maybe live" locals at each program point.
244                let always_live_locals = &always_storage_live_locals(&ccx.body);
245                let mut maybe_storage_live =
246                    MaybeStorageLive::new(Cow::Borrowed(always_live_locals))
247                        .iterate_to_fixpoint(ccx.tcx, &ccx.body, None)
248                        .into_results_cursor(&ccx.body);
249
250                // And then check all `Return` in the MIR, and if a local is "maybe live" at a
251                // `Return` then it is definitely not transient.
252                let mut transient = DenseBitSet::new_filled(ccx.body.local_decls.len());
253                // Make sure to only visit reachable blocks, the dataflow engine can ICE otherwise.
254                for (bb, data) in traversal::reachable(&ccx.body) {
255                    if matches!(data.terminator().kind, TerminatorKind::Return) {
256                        let location = ccx.body.terminator_loc(bb);
257                        maybe_storage_live.seek_after_primary_effect(location);
258                        // If a local may be live here, it is definitely not transient.
259                        transient.subtract(maybe_storage_live.get());
260                    }
261                }
262
263                transient
264            })
265            .contains(local)
266    }
267
268    pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
269        self.qualifs.in_return_place(self.ccx, self.error_emitted)
270    }
271
272    /// Emits an error if an expression cannot be evaluated in the current context.
273    pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
274        self.check_op_spanned(op, self.span);
275    }
276
277    /// Emits an error at the given `span` if an expression cannot be evaluated in the current
278    /// context.
279    pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
280        let gate = match op.status_in_item(self.ccx) {
281            Status::Unstable {
282                gate,
283                safe_to_expose_on_stable,
284                is_function_call,
285                gate_already_checked,
286            } if gate_already_checked || self.tcx.features().enabled(gate) => {
287                if gate_already_checked {
288                    assert!(
289                        !safe_to_expose_on_stable,
290                        "setting `gate_already_checked` without `safe_to_expose_on_stable` makes no sense"
291                    );
292                }
293                // Generally this is allowed since the feature gate is enabled -- except
294                // if this function wants to be safe-to-expose-on-stable.
295                if !safe_to_expose_on_stable
296                    && self.enforce_recursive_const_stability()
297                    && !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate)
298                {
299                    emit_unstable_in_stable_exposed_error(self.ccx, span, gate, is_function_call);
300                }
301
302                return;
303            }
304
305            Status::Unstable { gate, .. } => Some(gate),
306            Status::Forbidden => None,
307        };
308
309        if self.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
310            self.tcx.sess.miri_unleashed_feature(span, gate);
311            return;
312        }
313
314        let err = op.build_error(self.ccx, span);
315        assert!(err.is_error());
316
317        match op.importance() {
318            ops::DiagImportance::Primary => {
319                let reported = err.emit();
320                self.error_emitted = Some(reported);
321            }
322
323            ops::DiagImportance::Secondary => {
324                self.secondary_errors.push(err);
325                self.tcx.dcx().span_delayed_bug(
326                    span,
327                    "compilation must fail when there is a secondary const checker error",
328                );
329            }
330        }
331    }
332
333    fn check_static(&mut self, def_id: DefId, span: Span) {
334        if self.tcx.is_thread_local_static(def_id) {
335            self.tcx.dcx().span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef`");
336        }
337        if let Some(def_id) = def_id.as_local()
338            && let Err(guar) = self.tcx.at(span).check_well_formed(hir::OwnerId { def_id })
339        {
340            self.error_emitted = Some(guar);
341        }
342    }
343
344    /// Returns whether this place can possibly escape the evaluation of the current const/static
345    /// initializer. The check assumes that all already existing pointers and references point to
346    /// non-escaping places.
347    fn place_may_escape(&mut self, place: &Place<'_>) -> bool {
348        let is_transient = match self.const_kind() {
349            // In a const fn all borrows are transient or point to the places given via
350            // references in the arguments (so we already checked them with
351            // TransientMutBorrow/MutBorrow as appropriate).
352            // The borrow checker guarantees that no new non-transient borrows are created.
353            // NOTE: Once we have heap allocations during CTFE we need to figure out
354            // how to prevent `const fn` to create long-lived allocations that point
355            // to mutable memory.
356            hir::ConstContext::ConstFn => true,
357            _ => {
358                // For indirect places, we are not creating a new permanent borrow, it's just as
359                // transient as the already existing one. For reborrowing references this is handled
360                // at the top of `visit_rvalue`, but for raw pointers we handle it here.
361                // Pointers/references to `static mut` and cases where the `*` is not the first
362                // projection also end up here.
363                // Locals with StorageDead do not live beyond the evaluation and can
364                // thus safely be borrowed without being able to be leaked to the final
365                // value of the constant.
366                // Note: This is only sound if every local that has a `StorageDead` has a
367                // `StorageDead` in every control flow path leading to a `return` terminator.
368                // If anything slips through, there's no safety net -- safe code can create
369                // references to variants of `!Freeze` enums as long as that variant is `Freeze`, so
370                // interning can't protect us here. (There *is* a safety net for mutable references
371                // though, interning will ICE if we miss something here.)
372                place.is_indirect() || self.local_is_transient(place.local)
373            }
374        };
375        // Transient places cannot possibly escape because the place doesn't exist any more at the
376        // end of evaluation.
377        !is_transient
378    }
379
380    /// Returns whether there are const-conditions.
381    fn revalidate_conditional_constness(
382        &mut self,
383        callee: DefId,
384        callee_args: ty::GenericArgsRef<'tcx>,
385        call_span: Span,
386    ) -> Option<ConstConditionsHold> {
387        let tcx = self.tcx;
388        if !tcx.is_conditionally_const(callee) {
389            return None;
390        }
391
392        let const_conditions = tcx.const_conditions(callee).instantiate(tcx, callee_args);
393        if const_conditions.is_empty() {
394            return None;
395        }
396
397        let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(self.body.typing_env(tcx));
398        let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
399
400        let body_id = self.body.source.def_id().expect_local();
401        let host_polarity = match self.const_kind() {
402            hir::ConstContext::ConstFn => ty::BoundConstness::Maybe,
403            hir::ConstContext::Static(_) | hir::ConstContext::Const { .. } => {
404                ty::BoundConstness::Const
405            }
406        };
407        let const_conditions =
408            ocx.normalize(&ObligationCause::misc(call_span, body_id), param_env, const_conditions);
409        ocx.register_obligations(const_conditions.into_iter().map(|(trait_ref, span)| {
410            Obligation::new(
411                tcx,
412                ObligationCause::new(
413                    call_span,
414                    body_id,
415                    ObligationCauseCode::WhereClause(callee, span),
416                ),
417                param_env,
418                trait_ref.to_host_effect_clause(tcx, host_polarity),
419            )
420        }));
421
422        let errors = ocx.select_all_or_error();
423        if errors.is_empty() {
424            Some(ConstConditionsHold::Yes)
425        } else {
426            tcx.dcx()
427                .span_delayed_bug(call_span, "this should have reported a ~const error in HIR");
428            Some(ConstConditionsHold::No)
429        }
430    }
431
432    pub fn check_drop_terminator(
433        &mut self,
434        dropped_place: Place<'tcx>,
435        location: Location,
436        terminator_span: Span,
437    ) {
438        let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
439
440        let needs_drop = if let Some(local) = dropped_place.as_local() {
441            self.qualifs.needs_drop(self.ccx, local, location)
442        } else {
443            qualifs::NeedsDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
444        };
445        // If this type doesn't need a drop at all, then there's nothing to enforce.
446        if !needs_drop {
447            return;
448        }
449
450        let mut err_span = self.span;
451        let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
452            // Use the span where the local was declared as the span of the drop error.
453            err_span = self.body.local_decls[local].source_info.span;
454            self.qualifs.needs_non_const_drop(self.ccx, local, location)
455        } else {
456            qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
457        };
458
459        self.check_op_spanned(
460            ops::LiveDrop {
461                dropped_at: terminator_span,
462                dropped_ty: ty_of_dropped_place,
463                needs_non_const_drop,
464            },
465            err_span,
466        );
467    }
468
469    fn crate_inject_span(&self) -> Option<Span> {
470        self.tcx.hir_crate_items(()).definitions().next().and_then(|id| {
471            self.tcx.crate_level_attribute_injection_span(self.tcx.local_def_id_to_hir_id(id))
472        })
473    }
474
475    /// Check the const stability of the given item (fn or trait).
476    fn check_callee_stability(&mut self, def_id: DefId) {
477        match self.tcx.lookup_const_stability(def_id) {
478            Some(ConstStability { level: StabilityLevel::Stable { .. }, .. }) => {
479                // All good.
480            }
481            None => {
482                // This doesn't need a separate const-stability check -- const-stability equals
483                // regular stability, and regular stability is checked separately.
484                // However, we *do* have to worry about *recursive* const stability.
485                if self.enforce_recursive_const_stability()
486                    && !is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id)
487                {
488                    self.dcx().emit_err(errors::UnmarkedConstItemExposed {
489                        span: self.span,
490                        def_path: self.tcx.def_path_str(def_id),
491                    });
492                }
493            }
494            Some(ConstStability {
495                level: StabilityLevel::Unstable { implied_by: implied_feature, issue, .. },
496                feature,
497                ..
498            }) => {
499                // An unstable const fn/trait with a feature gate.
500                let callee_safe_to_expose_on_stable =
501                    is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id);
502
503                // We only honor `span.allows_unstable` aka `#[allow_internal_unstable]` if
504                // the callee is safe to expose, to avoid bypassing recursive stability.
505                // This is not ideal since it means the user sees an error, not the macro
506                // author, but that's also the case if one forgets to set
507                // `#[allow_internal_unstable]` in the first place. Note that this cannot be
508                // integrated in the check below since we want to enforce
509                // `callee_safe_to_expose_on_stable` even if
510                // `!self.enforce_recursive_const_stability()`.
511                if (self.span.allows_unstable(feature)
512                    || implied_feature.is_some_and(|f| self.span.allows_unstable(f)))
513                    && callee_safe_to_expose_on_stable
514                {
515                    return;
516                }
517
518                // We can't use `check_op` to check whether the feature is enabled because
519                // the logic is a bit different than elsewhere: local functions don't need
520                // the feature gate, and there might be an "implied" gate that also suffices
521                // to allow this.
522                let feature_enabled = def_id.is_local()
523                    || self.tcx.features().enabled(feature)
524                    || implied_feature.is_some_and(|f| self.tcx.features().enabled(f))
525                    || {
526                        // When we're compiling the compiler itself we may pull in
527                        // crates from crates.io, but those crates may depend on other
528                        // crates also pulled in from crates.io. We want to ideally be
529                        // able to compile everything without requiring upstream
530                        // modifications, so in the case that this looks like a
531                        // `rustc_private` crate (e.g., a compiler crate) and we also have
532                        // the `-Z force-unstable-if-unmarked` flag present (we're
533                        // compiling a compiler crate), then let this missing feature
534                        // annotation slide.
535                        // This matches what we do in `eval_stability_allow_unstable` for
536                        // regular stability.
537                        feature == sym::rustc_private
538                            && issue == NonZero::new(27812)
539                            && self.tcx.sess.opts.unstable_opts.force_unstable_if_unmarked
540                    };
541                // Even if the feature is enabled, we still need check_op to double-check
542                // this if the callee is not safe to expose on stable.
543                if !feature_enabled || !callee_safe_to_expose_on_stable {
544                    self.check_op(ops::CallUnstable {
545                        def_id,
546                        feature,
547                        feature_enabled,
548                        safe_to_expose_on_stable: callee_safe_to_expose_on_stable,
549                        suggestion_span: self.crate_inject_span(),
550                        is_function_call: self.tcx.def_kind(def_id) != DefKind::Trait,
551                    });
552                }
553            }
554        }
555    }
556}
557
558impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
559    fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
560        trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
561
562        // We don't const-check basic blocks on the cleanup path since we never unwind during
563        // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
564        // are unreachable during const-eval.
565        //
566        // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
567        // locals that would never be dropped during normal execution are sometimes dropped during
568        // unwinding, which means backwards-incompatible live-drop errors.
569        if block.is_cleanup {
570            return;
571        }
572
573        self.super_basic_block_data(bb, block);
574    }
575
576    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
577        trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
578
579        self.super_rvalue(rvalue, location);
580
581        match rvalue {
582            Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
583
584            Rvalue::Use(_)
585            | Rvalue::CopyForDeref(..)
586            | Rvalue::Repeat(..)
587            | Rvalue::Discriminant(..)
588            | Rvalue::Len(_) => {}
589
590            Rvalue::Aggregate(kind, ..) => {
591                if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref()
592                    && let Some(
593                        coroutine_kind @ hir::CoroutineKind::Desugared(
594                            hir::CoroutineDesugaring::Async,
595                            _,
596                        ),
597                    ) = self.tcx.coroutine_kind(def_id)
598                {
599                    self.check_op(ops::Coroutine(coroutine_kind));
600                }
601            }
602
603            Rvalue::Ref(_, BorrowKind::Mut { .. }, place)
604            | Rvalue::RawPtr(RawPtrKind::Mut, place) => {
605                // Inside mutable statics, we allow arbitrary mutable references.
606                // We've allowed `static mut FOO = &mut [elements];` for a long time (the exact
607                // reasons why are lost to history), and there is no reason to restrict that to
608                // arrays and slices.
609                let is_allowed =
610                    self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut);
611
612                if !is_allowed && self.place_may_escape(place) {
613                    self.check_op(ops::EscapingMutBorrow(if matches!(rvalue, Rvalue::Ref(..)) {
614                        hir::BorrowKind::Ref
615                    } else {
616                        hir::BorrowKind::Raw
617                    }));
618                }
619            }
620
621            Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake(_), place)
622            | Rvalue::RawPtr(RawPtrKind::Const, place) => {
623                let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
624                    self.ccx,
625                    &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
626                    place.as_ref(),
627                );
628
629                if borrowed_place_has_mut_interior && self.place_may_escape(place) {
630                    self.check_op(ops::EscapingCellBorrow);
631                }
632            }
633
634            Rvalue::RawPtr(RawPtrKind::FakeForPtrMetadata, place) => {
635                // These are only inserted for slice length, so the place must already be indirect.
636                // This implies we do not have to worry about whether the borrow escapes.
637                if !place.is_indirect() {
638                    self.tcx.dcx().span_delayed_bug(
639                        self.body.source_info(location).span,
640                        "fake borrows are always indirect",
641                    );
642                }
643            }
644
645            Rvalue::Cast(
646                CastKind::PointerCoercion(
647                    PointerCoercion::MutToConstPointer
648                    | PointerCoercion::ArrayToPointer
649                    | PointerCoercion::UnsafeFnPointer
650                    | PointerCoercion::ClosureFnPointer(_)
651                    | PointerCoercion::ReifyFnPointer,
652                    _,
653                ),
654                _,
655                _,
656            ) => {
657                // These are all okay; they only change the type, not the data.
658            }
659
660            Rvalue::Cast(
661                CastKind::PointerCoercion(PointerCoercion::Unsize | PointerCoercion::DynStar, _),
662                _,
663                _,
664            ) => {
665                // Unsizing and `dyn*` coercions are implemented for CTFE.
666            }
667
668            Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => {
669                self.check_op(ops::RawPtrToIntCast);
670            }
671            Rvalue::Cast(CastKind::PointerWithExposedProvenance, _, _) => {
672                // Since no pointer can ever get exposed (rejected above), this is easy to support.
673            }
674
675            Rvalue::Cast(_, _, _) => {}
676
677            Rvalue::NullaryOp(
678                NullOp::SizeOf
679                | NullOp::AlignOf
680                | NullOp::OffsetOf(_)
681                | NullOp::UbChecks
682                | NullOp::ContractChecks,
683                _,
684            ) => {}
685            Rvalue::ShallowInitBox(_, _) => {}
686
687            Rvalue::UnaryOp(op, operand) => {
688                let ty = operand.ty(self.body, self.tcx);
689                match op {
690                    UnOp::Not | UnOp::Neg => {
691                        if is_int_bool_float_or_char(ty) {
692                            // Int, bool, float, and char operations are fine.
693                        } else {
694                            span_bug!(
695                                self.span,
696                                "non-primitive type in `Rvalue::UnaryOp{op:?}`: {ty:?}",
697                            );
698                        }
699                    }
700                    UnOp::PtrMetadata => {
701                        // Getting the metadata from a pointer is always const.
702                        // We already validated the type is valid in the validator.
703                    }
704                }
705            }
706
707            Rvalue::BinaryOp(op, box (lhs, rhs)) => {
708                let lhs_ty = lhs.ty(self.body, self.tcx);
709                let rhs_ty = rhs.ty(self.body, self.tcx);
710
711                if is_int_bool_float_or_char(lhs_ty) && is_int_bool_float_or_char(rhs_ty) {
712                    // Int, bool, float, and char operations are fine.
713                } else if lhs_ty.is_fn_ptr() || lhs_ty.is_raw_ptr() {
714                    assert_matches!(
715                        op,
716                        BinOp::Eq
717                            | BinOp::Ne
718                            | BinOp::Le
719                            | BinOp::Lt
720                            | BinOp::Ge
721                            | BinOp::Gt
722                            | BinOp::Offset
723                    );
724
725                    self.check_op(ops::RawPtrComparison);
726                } else {
727                    span_bug!(
728                        self.span,
729                        "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
730                        lhs_ty,
731                        rhs_ty
732                    );
733                }
734            }
735
736            Rvalue::WrapUnsafeBinder(..) => {
737                // Unsafe binders are always trivial to create.
738            }
739        }
740    }
741
742    fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
743        self.super_operand(op, location);
744        if let Operand::Constant(c) = op {
745            if let Some(def_id) = c.check_static_ptr(self.tcx) {
746                self.check_static(def_id, self.span);
747            }
748        }
749    }
750
751    fn visit_source_info(&mut self, source_info: &SourceInfo) {
752        trace!("visit_source_info: source_info={:?}", source_info);
753        self.span = source_info.span;
754    }
755
756    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
757        trace!("visit_statement: statement={:?} location={:?}", statement, location);
758
759        self.super_statement(statement, location);
760
761        match statement.kind {
762            StatementKind::Assign(..)
763            | StatementKind::SetDiscriminant { .. }
764            | StatementKind::Deinit(..)
765            | StatementKind::FakeRead(..)
766            | StatementKind::StorageLive(_)
767            | StatementKind::StorageDead(_)
768            | StatementKind::Retag { .. }
769            | StatementKind::PlaceMention(..)
770            | StatementKind::AscribeUserType(..)
771            | StatementKind::Coverage(..)
772            | StatementKind::Intrinsic(..)
773            | StatementKind::ConstEvalCounter
774            | StatementKind::BackwardIncompatibleDropHint { .. }
775            | StatementKind::Nop => {}
776        }
777    }
778
779    #[instrument(level = "debug", skip(self))]
780    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
781        self.super_terminator(terminator, location);
782
783        match &terminator.kind {
784            TerminatorKind::Call { func, args, fn_span, .. }
785            | TerminatorKind::TailCall { func, args, fn_span, .. } => {
786                let call_source = match terminator.kind {
787                    TerminatorKind::Call { call_source, .. } => call_source,
788                    TerminatorKind::TailCall { .. } => CallSource::Normal,
789                    _ => unreachable!(),
790                };
791
792                let ConstCx { tcx, body, .. } = *self.ccx;
793
794                let fn_ty = func.ty(body, tcx);
795
796                let (callee, fn_args) = match *fn_ty.kind() {
797                    ty::FnDef(def_id, fn_args) => (def_id, fn_args),
798
799                    ty::FnPtr(..) => {
800                        self.check_op(ops::FnCallIndirect);
801                        // We can get here without an error in miri-unleashed mode... might as well
802                        // skip the rest of the checks as well then.
803                        return;
804                    }
805                    _ => {
806                        span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
807                    }
808                };
809
810                let has_const_conditions =
811                    self.revalidate_conditional_constness(callee, fn_args, *fn_span);
812
813                // Attempting to call a trait method?
814                if let Some(trait_did) = tcx.trait_of_item(callee) {
815                    // We can't determine the actual callee here, so we have to do different checks
816                    // than usual.
817
818                    trace!("attempting to call a trait method");
819                    let trait_is_const = tcx.is_const_trait(trait_did);
820
821                    // Only consider a trait to be const if the const conditions hold.
822                    // Otherwise, it's really misleading to call something "conditionally"
823                    // const when it's very obviously not conditionally const.
824                    if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) {
825                        // Trait calls are always conditionally-const.
826                        self.check_op(ops::ConditionallyConstCall {
827                            callee,
828                            args: fn_args,
829                            span: *fn_span,
830                            call_source,
831                        });
832                        self.check_callee_stability(trait_did);
833                    } else {
834                        // Not even a const trait.
835                        self.check_op(ops::FnCallNonConst {
836                            callee,
837                            args: fn_args,
838                            span: *fn_span,
839                            call_source,
840                        });
841                    }
842                    // That's all we can check here.
843                    return;
844                }
845
846                // Even if we know the callee, ensure we can use conditionally-const calls.
847                if has_const_conditions.is_some() {
848                    self.check_op(ops::ConditionallyConstCall {
849                        callee,
850                        args: fn_args,
851                        span: *fn_span,
852                        call_source,
853                    });
854                }
855
856                // At this point, we are calling a function, `callee`, whose `DefId` is known...
857
858                // `begin_panic` and `#[rustc_const_panic_str]` functions accept generic
859                // types other than str. Check to enforce that only str can be used in
860                // const-eval.
861
862                // const-eval of the `begin_panic` fn assumes the argument is `&str`
863                if tcx.is_lang_item(callee, LangItem::BeginPanic) {
864                    match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
865                        ty::Ref(_, ty, _) if ty.is_str() => {}
866                        _ => self.check_op(ops::PanicNonStr),
867                    }
868                    // Allow this call, skip all the checks below.
869                    return;
870                }
871
872                // const-eval of `#[rustc_const_panic_str]` functions assumes the argument is `&&str`
873                if tcx.has_attr(callee, sym::rustc_const_panic_str) {
874                    match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
875                        ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
876                            {}
877                        _ => {
878                            self.check_op(ops::PanicNonStr);
879                        }
880                    }
881                    // Allow this call, skip all the checks below.
882                    return;
883                }
884
885                // This can be called on stable via the `vec!` macro.
886                if tcx.is_lang_item(callee, LangItem::ExchangeMalloc) {
887                    self.check_op(ops::HeapAllocation);
888                    // Allow this call, skip all the checks below.
889                    return;
890                }
891
892                // Intrinsics are language primitives, not regular calls, so treat them separately.
893                if let Some(intrinsic) = tcx.intrinsic(callee) {
894                    if !tcx.is_const_fn(callee) {
895                        // Non-const intrinsic.
896                        self.check_op(ops::IntrinsicNonConst { name: intrinsic.name });
897                        // If we allowed this, we're in miri-unleashed mode, so we might
898                        // as well skip the remaining checks.
899                        return;
900                    }
901                    // We use `intrinsic.const_stable` to determine if this can be safely exposed to
902                    // stable code, rather than `const_stable_indirect`. This is to make
903                    // `#[rustc_const_stable_indirect]` an attribute that is always safe to add.
904                    // We also ask is_safe_to_expose_on_stable_const_fn; this determines whether the intrinsic
905                    // fallback body is safe to expose on stable.
906                    let is_const_stable = intrinsic.const_stable
907                        || (!intrinsic.must_be_overridden
908                            && is_fn_or_trait_safe_to_expose_on_stable(tcx, callee));
909                    match tcx.lookup_const_stability(callee) {
910                        None => {
911                            // This doesn't need a separate const-stability check -- const-stability equals
912                            // regular stability, and regular stability is checked separately.
913                            // However, we *do* have to worry about *recursive* const stability.
914                            if !is_const_stable && self.enforce_recursive_const_stability() {
915                                self.dcx().emit_err(errors::UnmarkedIntrinsicExposed {
916                                    span: self.span,
917                                    def_path: self.tcx.def_path_str(callee),
918                                });
919                            }
920                        }
921                        Some(ConstStability {
922                            level: StabilityLevel::Unstable { .. },
923                            feature,
924                            ..
925                        }) => {
926                            self.check_op(ops::IntrinsicUnstable {
927                                name: intrinsic.name,
928                                feature,
929                                const_stable_indirect: is_const_stable,
930                                suggestion: self.crate_inject_span(),
931                            });
932                        }
933                        Some(ConstStability { level: StabilityLevel::Stable { .. }, .. }) => {
934                            // All good. Note that a `#[rustc_const_stable]` intrinsic (meaning it
935                            // can be *directly* invoked from stable const code) does not always
936                            // have the `#[rustc_intrinsic_const_stable_indirect]` attribute (which controls
937                            // exposing an intrinsic indirectly); we accept this call anyway.
938                        }
939                    }
940                    // This completes the checks for intrinsics.
941                    return;
942                }
943
944                if !tcx.is_const_fn(callee) {
945                    self.check_op(ops::FnCallNonConst {
946                        callee,
947                        args: fn_args,
948                        span: *fn_span,
949                        call_source,
950                    });
951                    // If we allowed this, we're in miri-unleashed mode, so we might
952                    // as well skip the remaining checks.
953                    return;
954                }
955
956                // Finally, stability for regular function calls -- this is the big one.
957                self.check_callee_stability(callee);
958            }
959
960            // Forbid all `Drop` terminators unless the place being dropped is a local with no
961            // projections that cannot be `NeedsNonConstDrop`.
962            TerminatorKind::Drop { place: dropped_place, .. } => {
963                // If we are checking live drops after drop-elaboration, don't emit duplicate
964                // errors here.
965                if super::post_drop_elaboration::checking_enabled(self.ccx) {
966                    return;
967                }
968
969                self.check_drop_terminator(*dropped_place, location, terminator.source_info.span);
970            }
971
972            TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
973
974            TerminatorKind::Yield { .. } => {
975                self.check_op(ops::Coroutine(
976                    self.tcx
977                        .coroutine_kind(self.body.source.def_id())
978                        .expect("Only expected to have a yield in a coroutine"),
979                ));
980            }
981
982            TerminatorKind::CoroutineDrop => {
983                span_bug!(
984                    self.body.source_info(location).span,
985                    "We should not encounter TerminatorKind::CoroutineDrop after coroutine transform"
986                );
987            }
988
989            TerminatorKind::UnwindTerminate(_) => {
990                // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
991                span_bug!(self.span, "`Terminate` terminator outside of cleanup block")
992            }
993
994            TerminatorKind::Assert { .. }
995            | TerminatorKind::FalseEdge { .. }
996            | TerminatorKind::FalseUnwind { .. }
997            | TerminatorKind::Goto { .. }
998            | TerminatorKind::UnwindResume
999            | TerminatorKind::Return
1000            | TerminatorKind::SwitchInt { .. }
1001            | TerminatorKind::Unreachable => {}
1002        }
1003    }
1004}
1005
1006fn is_int_bool_float_or_char(ty: Ty<'_>) -> bool {
1007    ty.is_bool() || ty.is_integral() || ty.is_char() || ty.is_floating_point()
1008}
1009
1010fn emit_unstable_in_stable_exposed_error(
1011    ccx: &ConstCx<'_, '_>,
1012    span: Span,
1013    gate: Symbol,
1014    is_function_call: bool,
1015) -> ErrorGuaranteed {
1016    let attr_span = ccx.tcx.def_span(ccx.def_id()).shrink_to_lo();
1017
1018    ccx.dcx().emit_err(errors::UnstableInStableExposed {
1019        gate: gate.to_string(),
1020        span,
1021        attr_span,
1022        is_function_call,
1023        is_function_call2: is_function_call,
1024    })
1025}