Skip to main content

rustc_const_eval/check_consts/
check.rs

1//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3use std::borrow::Cow;
4use std::num::NonZero;
5use std::ops::Deref;
6use std::{assert_matches, mem};
7
8use rustc_errors::{Diag, ErrorGuaranteed};
9use rustc_hir::def::DefKind;
10use rustc_hir::def_id::DefId;
11use rustc_hir::{self as hir, LangItem, find_attr};
12use rustc_index::bit_set::DenseBitSet;
13use rustc_infer::infer::TyCtxtInferExt;
14use rustc_middle::mir::visit::Visitor;
15use rustc_middle::mir::*;
16use rustc_middle::span_bug;
17use rustc_middle::ty::adjustment::PointerCoercion;
18use rustc_middle::ty::{self, Ty, TypeVisitableExt};
19use rustc_mir_dataflow::Analysis;
20use rustc_mir_dataflow::impls::{MaybeStorageLive, always_storage_live_locals};
21use rustc_span::{Span, Symbol, sym};
22use rustc_trait_selection::traits::{
23    Obligation, ObligationCause, ObligationCauseCode, ObligationCtxt,
24};
25use tracing::{instrument, trace};
26
27use super::ops::{self, NonConstOp, Status};
28use super::qualifs::{self, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
29use super::resolver::FlowSensitiveAnalysis;
30use super::{ConstCx, Qualif};
31use crate::check_consts::is_fn_or_trait_safe_to_expose_on_stable;
32use crate::errors;
33
34type QualifResults<'mir, 'tcx, Q> =
35    rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'tcx, Q>>;
36
37#[derive(#[automatically_derived]
impl ::core::marker::Copy for ConstConditionsHold { }Copy, #[automatically_derived]
impl ::core::clone::Clone for ConstConditionsHold {
    #[inline]
    fn clone(&self) -> ConstConditionsHold { *self }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for ConstConditionsHold {
    #[inline]
    fn eq(&self, other: &ConstConditionsHold) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ConstConditionsHold {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_fields_are_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for ConstConditionsHold {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f,
            match self {
                ConstConditionsHold::Yes => "Yes",
                ConstConditionsHold::No => "No",
            })
    }
}Debug)]
38enum ConstConditionsHold {
39    Yes,
40    No,
41}
42
43#[derive(#[automatically_derived]
impl<'mir, 'tcx> ::core::default::Default for Qualifs<'mir, 'tcx> {
    #[inline]
    fn default() -> Qualifs<'mir, 'tcx> {
        Qualifs {
            has_mut_interior: ::core::default::Default::default(),
            needs_drop: ::core::default::Default::default(),
            needs_non_const_drop: ::core::default::Default::default(),
        }
    }
}Default)]
44pub(crate) struct Qualifs<'mir, 'tcx> {
45    has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
46    needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
47    needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
48}
49
50impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
51    /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
52    ///
53    /// Only updates the cursor if absolutely necessary
54    pub(crate) fn needs_drop(
55        &mut self,
56        ccx: &'mir ConstCx<'mir, 'tcx>,
57        local: Local,
58        location: Location,
59    ) -> bool {
60        let ty = ccx.body.local_decls[local].ty;
61        // Peeking into opaque types causes cycles if the current function declares said opaque
62        // type. Thus we avoid short circuiting on the type and instead run the more expensive
63        // analysis that looks at the actual usage within this function
64        if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
65            return false;
66        }
67
68        let needs_drop = self.needs_drop.get_or_insert_with(|| {
69            let ConstCx { tcx, body, .. } = *ccx;
70
71            FlowSensitiveAnalysis::new(NeedsDrop, ccx)
72                .iterate_to_fixpoint(tcx, body, None)
73                .into_results_cursor(body)
74        });
75
76        needs_drop.seek_before_primary_effect(location);
77        needs_drop.get().contains(local)
78    }
79
80    /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
81    ///
82    /// Only updates the cursor if absolutely necessary
83    pub(crate) fn needs_non_const_drop(
84        &mut self,
85        ccx: &'mir ConstCx<'mir, 'tcx>,
86        local: Local,
87        location: Location,
88    ) -> bool {
89        let ty = ccx.body.local_decls[local].ty;
90        // Peeking into opaque types causes cycles if the current function declares said opaque
91        // type. Thus we avoid short circuiting on the type and instead run the more expensive
92        // analysis that looks at the actual usage within this function
93        if !ty.has_opaque_types() && !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
94            return false;
95        }
96
97        let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
98            let ConstCx { tcx, body, .. } = *ccx;
99
100            FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
101                .iterate_to_fixpoint(tcx, body, None)
102                .into_results_cursor(body)
103        });
104
105        needs_non_const_drop.seek_before_primary_effect(location);
106        needs_non_const_drop.get().contains(local)
107    }
108
109    /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
110    ///
111    /// Only updates the cursor if absolutely necessary.
112    fn has_mut_interior(
113        &mut self,
114        ccx: &'mir ConstCx<'mir, 'tcx>,
115        local: Local,
116        location: Location,
117    ) -> bool {
118        let ty = ccx.body.local_decls[local].ty;
119        // Peeking into opaque types causes cycles if the current function declares said opaque
120        // type. Thus we avoid short circuiting on the type and instead run the more expensive
121        // analysis that looks at the actual usage within this function
122        if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
123            return false;
124        }
125
126        let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
127            let ConstCx { tcx, body, .. } = *ccx;
128
129            FlowSensitiveAnalysis::new(HasMutInterior, ccx)
130                .iterate_to_fixpoint(tcx, body, None)
131                .into_results_cursor(body)
132        });
133
134        has_mut_interior.seek_before_primary_effect(location);
135        has_mut_interior.get().contains(local)
136    }
137
138    fn in_return_place(
139        &mut self,
140        ccx: &'mir ConstCx<'mir, 'tcx>,
141        tainted_by_errors: Option<ErrorGuaranteed>,
142    ) -> ConstQualifs {
143        // FIXME(explicit_tail_calls): uhhhh I think we can return without return now, does it change anything
144
145        // Find the `Return` terminator if one exists.
146        //
147        // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
148        // qualifs for the return type.
149        let return_block = ccx
150            .body
151            .basic_blocks
152            .iter_enumerated()
153            .find(|(_, block)| #[allow(non_exhaustive_omitted_patterns)] match block.terminator().kind {
    TerminatorKind::Return => true,
    _ => false,
}matches!(block.terminator().kind, TerminatorKind::Return))
154            .map(|(bb, _)| bb);
155
156        let Some(return_block) = return_block else {
157            return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
158        };
159
160        let return_loc = ccx.body.terminator_loc(return_block);
161
162        ConstQualifs {
163            needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
164            needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
165            has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
166            tainted_by_errors,
167        }
168    }
169}
170
171pub struct Checker<'mir, 'tcx> {
172    ccx: &'mir ConstCx<'mir, 'tcx>,
173    qualifs: Qualifs<'mir, 'tcx>,
174
175    /// The span of the current statement.
176    span: Span,
177
178    /// A set that stores for each local whether it is "transient", i.e. guaranteed to be dead
179    /// when this MIR body returns.
180    transient_locals: Option<DenseBitSet<Local>>,
181
182    error_emitted: Option<ErrorGuaranteed>,
183    secondary_errors: Vec<Diag<'tcx>>,
184}
185
186impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
187    type Target = ConstCx<'mir, 'tcx>;
188
189    fn deref(&self) -> &Self::Target {
190        self.ccx
191    }
192}
193
194impl<'mir, 'tcx> Checker<'mir, 'tcx> {
195    pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
196        Checker {
197            span: ccx.body.span,
198            ccx,
199            qualifs: Default::default(),
200            transient_locals: None,
201            error_emitted: None,
202            secondary_errors: Vec::new(),
203        }
204    }
205
206    pub fn check_body(&mut self) {
207        let ConstCx { tcx, body, .. } = *self.ccx;
208        let def_id = self.ccx.def_id();
209
210        // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
211        // no need to emit duplicate errors here.
212        if self.ccx.is_async() || body.coroutine.is_some() {
213            tcx.dcx().span_delayed_bug(body.span, "`async` functions cannot be `const fn`");
214            return;
215        }
216
217        if !{

        #[allow(deprecated)]
        {
            {
                'done:
                    {
                    for i in tcx.get_all_attrs(def_id) {
                        #[allow(unused_imports)]
                        use rustc_hir::attrs::AttributeKind::*;
                        let i: &rustc_hir::Attribute = i;
                        match i {
                            rustc_hir::Attribute::Parsed(RustcDoNotConstCheck) => {
                                break 'done Some(());
                            }
                            rustc_hir::Attribute::Unparsed(..) =>
                                {}
                                #[deny(unreachable_patterns)]
                                _ => {}
                        }
                    }
                    None
                }
            }
        }
    }.is_some()find_attr!(tcx, def_id, RustcDoNotConstCheck) {
218            self.visit_body(body);
219        }
220
221        // If we got through const-checking without emitting any "primary" errors, emit any
222        // "secondary" errors if they occurred. Otherwise, cancel the "secondary" errors.
223        let secondary_errors = mem::take(&mut self.secondary_errors);
224        if self.error_emitted.is_none() {
225            for error in secondary_errors {
226                self.error_emitted = Some(error.emit());
227            }
228        } else {
229            if !self.tcx.dcx().has_errors().is_some() {
    ::core::panicking::panic("assertion failed: self.tcx.dcx().has_errors().is_some()")
};assert!(self.tcx.dcx().has_errors().is_some());
230            for error in secondary_errors {
231                error.cancel();
232            }
233        }
234    }
235
236    fn local_is_transient(&mut self, local: Local) -> bool {
237        let ccx = self.ccx;
238        self.transient_locals
239            .get_or_insert_with(|| {
240                // A local is "transient" if it is guaranteed dead at all `Return`.
241                // So first compute the say of "maybe live" locals at each program point.
242                let always_live_locals = &always_storage_live_locals(&ccx.body);
243                let mut maybe_storage_live =
244                    MaybeStorageLive::new(Cow::Borrowed(always_live_locals))
245                        .iterate_to_fixpoint(ccx.tcx, &ccx.body, None)
246                        .into_results_cursor(&ccx.body);
247
248                // And then check all `Return` in the MIR, and if a local is "maybe live" at a
249                // `Return` then it is definitely not transient.
250                let mut transient = DenseBitSet::new_filled(ccx.body.local_decls.len());
251                // Make sure to only visit reachable blocks, the dataflow engine can ICE otherwise.
252                for (bb, data) in traversal::reachable(&ccx.body) {
253                    if data.terminator().kind == TerminatorKind::Return {
254                        let location = ccx.body.terminator_loc(bb);
255                        maybe_storage_live.seek_after_primary_effect(location);
256                        // If a local may be live here, it is definitely not transient.
257                        transient.subtract(maybe_storage_live.get());
258                    }
259                }
260
261                transient
262            })
263            .contains(local)
264    }
265
266    pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
267        self.qualifs.in_return_place(self.ccx, self.error_emitted)
268    }
269
270    /// Emits an error if an expression cannot be evaluated in the current context.
271    pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
272        self.check_op_spanned(op, self.span);
273    }
274
275    /// Emits an error at the given `span` if an expression cannot be evaluated in the current
276    /// context.
277    pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
278        let gate = match op.status_in_item(self.ccx) {
279            Status::Unstable {
280                gate,
281                safe_to_expose_on_stable,
282                is_function_call,
283                gate_already_checked,
284            } if gate_already_checked || self.tcx.features().enabled(gate) => {
285                if gate_already_checked {
286                    if !!safe_to_expose_on_stable {
    {
        ::core::panicking::panic_fmt(format_args!("setting `gate_already_checked` without `safe_to_expose_on_stable` makes no sense"));
    }
};assert!(
287                        !safe_to_expose_on_stable,
288                        "setting `gate_already_checked` without `safe_to_expose_on_stable` makes no sense"
289                    );
290                }
291                // Generally this is allowed since the feature gate is enabled -- except
292                // if this function wants to be safe-to-expose-on-stable.
293                if !safe_to_expose_on_stable
294                    && self.enforce_recursive_const_stability()
295                    && !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate)
296                {
297                    emit_unstable_in_stable_exposed_error(self.ccx, span, gate, is_function_call);
298                }
299
300                return;
301            }
302
303            Status::Unstable { gate, .. } => Some(gate),
304            Status::Forbidden => None,
305        };
306
307        if self.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
308            self.tcx.sess.miri_unleashed_feature(span, gate);
309            return;
310        }
311
312        let err = op.build_error(self.ccx, span);
313        if !err.is_error() {
    ::core::panicking::panic("assertion failed: err.is_error()")
};assert!(err.is_error());
314
315        match op.importance() {
316            ops::DiagImportance::Primary => {
317                let reported = err.emit();
318                self.error_emitted = Some(reported);
319            }
320
321            ops::DiagImportance::Secondary => {
322                self.secondary_errors.push(err);
323                self.tcx.dcx().span_delayed_bug(
324                    span,
325                    "compilation must fail when there is a secondary const checker error",
326                );
327            }
328        }
329    }
330
331    fn check_static(&mut self, def_id: DefId, span: Span) {
332        if self.tcx.is_thread_local_static(def_id) {
333            self.tcx.dcx().span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef`");
334        }
335        if let Some(def_id) = def_id.as_local()
336            && let Err(guar) = self.tcx.ensure_result().check_well_formed(hir::OwnerId { def_id })
337        {
338            self.error_emitted = Some(guar);
339        }
340    }
341
342    /// Returns whether this place can possibly escape the evaluation of the current const/static
343    /// initializer. The check assumes that all already existing pointers and references point to
344    /// non-escaping places.
345    fn place_may_escape(&mut self, place: &Place<'_>) -> bool {
346        let is_transient = match self.const_kind() {
347            // In a const fn all borrows are transient or point to the places given via
348            // references in the arguments (so we already checked them with
349            // TransientMutBorrow/MutBorrow as appropriate).
350            // The borrow checker guarantees that no new non-transient borrows are created.
351            // NOTE: Once we have heap allocations during CTFE we need to figure out
352            // how to prevent `const fn` to create long-lived allocations that point
353            // to mutable memory.
354            hir::ConstContext::ConstFn => true,
355            _ => {
356                // For indirect places, we are not creating a new permanent borrow, it's just as
357                // transient as the already existing one.
358                // Locals with StorageDead do not live beyond the evaluation and can
359                // thus safely be borrowed without being able to be leaked to the final
360                // value of the constant.
361                // Note: This is only sound if every local that has a `StorageDead` has a
362                // `StorageDead` in every control flow path leading to a `return` terminator.
363                // If anything slips through, there's no safety net -- safe code can create
364                // references to variants of `!Freeze` enums as long as that variant is `Freeze`, so
365                // interning can't protect us here. (There *is* a safety net for mutable references
366                // though, interning will ICE if we miss something here.)
367                place.is_indirect() || self.local_is_transient(place.local)
368            }
369        };
370        // Transient places cannot possibly escape because the place doesn't exist any more at the
371        // end of evaluation.
372        !is_transient
373    }
374
375    /// Returns whether there are const-conditions.
376    fn revalidate_conditional_constness(
377        &mut self,
378        callee: DefId,
379        callee_args: ty::GenericArgsRef<'tcx>,
380        call_span: Span,
381    ) -> Option<ConstConditionsHold> {
382        let tcx = self.tcx;
383        if !tcx.is_conditionally_const(callee) {
384            return None;
385        }
386
387        let const_conditions = tcx.const_conditions(callee).instantiate(tcx, callee_args);
388        if const_conditions.is_empty() {
389            return None;
390        }
391
392        let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(self.body.typing_env(tcx));
393        let ocx = ObligationCtxt::new(&infcx);
394
395        let body_id = self.body.source.def_id().expect_local();
396        let host_polarity = match self.const_kind() {
397            hir::ConstContext::ConstFn => ty::BoundConstness::Maybe,
398            hir::ConstContext::Static(_) | hir::ConstContext::Const { .. } => {
399                ty::BoundConstness::Const
400            }
401        };
402        let const_conditions =
403            ocx.normalize(&ObligationCause::misc(call_span, body_id), param_env, const_conditions);
404        ocx.register_obligations(const_conditions.into_iter().map(|(trait_ref, span)| {
405            Obligation::new(
406                tcx,
407                ObligationCause::new(
408                    call_span,
409                    body_id,
410                    ObligationCauseCode::WhereClause(callee, span),
411                ),
412                param_env,
413                trait_ref.to_host_effect_clause(tcx, host_polarity),
414            )
415        }));
416
417        let errors = ocx.evaluate_obligations_error_on_ambiguity();
418        if errors.is_empty() {
419            Some(ConstConditionsHold::Yes)
420        } else {
421            tcx.dcx()
422                .span_delayed_bug(call_span, "this should have reported a [const] error in HIR");
423            Some(ConstConditionsHold::No)
424        }
425    }
426
427    pub fn check_drop_terminator(
428        &mut self,
429        dropped_place: Place<'tcx>,
430        location: Location,
431        terminator_span: Span,
432    ) {
433        let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
434
435        let needs_drop = if let Some(local) = dropped_place.as_local() {
436            self.qualifs.needs_drop(self.ccx, local, location)
437        } else {
438            qualifs::NeedsDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
439        };
440        // If this type doesn't need a drop at all, then there's nothing to enforce.
441        if !needs_drop {
442            return;
443        }
444
445        let mut err_span = self.span;
446        let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
447            // Use the span where the local was declared as the span of the drop error.
448            err_span = self.body.local_decls[local].source_info.span;
449            self.qualifs.needs_non_const_drop(self.ccx, local, location)
450        } else {
451            qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
452        };
453
454        self.check_op_spanned(
455            ops::LiveDrop {
456                dropped_at: terminator_span,
457                dropped_ty: ty_of_dropped_place,
458                needs_non_const_drop,
459            },
460            err_span,
461        );
462    }
463
464    /// Check the const stability of the given item (fn or trait).
465    fn check_callee_stability(&mut self, def_id: DefId) {
466        match self.tcx.lookup_const_stability(def_id) {
467            Some(hir::ConstStability { level: hir::StabilityLevel::Stable { .. }, .. }) => {
468                // All good.
469            }
470            None => {
471                // This doesn't need a separate const-stability check -- const-stability equals
472                // regular stability, and regular stability is checked separately.
473                // However, we *do* have to worry about *recursive* const stability.
474                if self.enforce_recursive_const_stability()
475                    && !is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id)
476                {
477                    self.dcx().emit_err(errors::UnmarkedConstItemExposed {
478                        span: self.span,
479                        def_path: self.tcx.def_path_str(def_id),
480                    });
481                }
482            }
483            Some(hir::ConstStability {
484                level: hir::StabilityLevel::Unstable { implied_by: implied_feature, issue, .. },
485                feature,
486                ..
487            }) => {
488                // An unstable const fn/trait with a feature gate.
489                let callee_safe_to_expose_on_stable =
490                    is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id);
491
492                // We only honor `span.allows_unstable` aka `#[allow_internal_unstable]` if
493                // the callee is safe to expose, to avoid bypassing recursive stability.
494                // This is not ideal since it means the user sees an error, not the macro
495                // author, but that's also the case if one forgets to set
496                // `#[allow_internal_unstable]` in the first place. Note that this cannot be
497                // integrated in the check below since we want to enforce
498                // `callee_safe_to_expose_on_stable` even if
499                // `!self.enforce_recursive_const_stability()`.
500                if (self.span.allows_unstable(feature)
501                    || implied_feature.is_some_and(|f| self.span.allows_unstable(f)))
502                    && callee_safe_to_expose_on_stable
503                {
504                    return;
505                }
506
507                // We can't use `check_op` to check whether the feature is enabled because
508                // the logic is a bit different than elsewhere: local functions don't need
509                // the feature gate, and there might be an "implied" gate that also suffices
510                // to allow this.
511                let feature_enabled = def_id.is_local()
512                    || self.tcx.features().enabled(feature)
513                    || implied_feature.is_some_and(|f| self.tcx.features().enabled(f))
514                    || {
515                        // When we're compiling the compiler itself we may pull in
516                        // crates from crates.io, but those crates may depend on other
517                        // crates also pulled in from crates.io. We want to ideally be
518                        // able to compile everything without requiring upstream
519                        // modifications, so in the case that this looks like a
520                        // `rustc_private` crate (e.g., a compiler crate) and we also have
521                        // the `-Z force-unstable-if-unmarked` flag present (we're
522                        // compiling a compiler crate), then let this missing feature
523                        // annotation slide.
524                        // This matches what we do in `eval_stability_allow_unstable` for
525                        // regular stability.
526                        feature == sym::rustc_private
527                            && issue == NonZero::new(27812)
528                            && self.tcx.sess.opts.unstable_opts.force_unstable_if_unmarked
529                    };
530                // Even if the feature is enabled, we still need check_op to double-check
531                // this if the callee is not safe to expose on stable.
532                if !feature_enabled || !callee_safe_to_expose_on_stable {
533                    self.check_op(ops::CallUnstable {
534                        def_id,
535                        feature,
536                        feature_enabled,
537                        safe_to_expose_on_stable: callee_safe_to_expose_on_stable,
538                        is_function_call: self.tcx.def_kind(def_id) != DefKind::Trait,
539                    });
540                }
541            }
542        }
543    }
544}
545
546impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
547    fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
548        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/check_consts/check.rs:548",
                        "rustc_const_eval::check_consts::check",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/check_consts/check.rs"),
                        ::tracing_core::__macro_support::Option::Some(548u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::check_consts::check"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("visit_basic_block_data: bb={0:?} is_cleanup={1:?}",
                                                    bb, block.is_cleanup) as &dyn Value))])
            });
    } else { ; }
};trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
549
550        // We don't const-check basic blocks on the cleanup path since we never unwind during
551        // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
552        // are unreachable during const-eval.
553        //
554        // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
555        // locals that would never be dropped during normal execution are sometimes dropped during
556        // unwinding, which means backwards-incompatible live-drop errors.
557        if block.is_cleanup {
558            return;
559        }
560
561        self.super_basic_block_data(bb, block);
562    }
563
564    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
565        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/check_consts/check.rs:565",
                        "rustc_const_eval::check_consts::check",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/check_consts/check.rs"),
                        ::tracing_core::__macro_support::Option::Some(565u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::check_consts::check"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("visit_rvalue: rvalue={0:?} location={1:?}",
                                                    rvalue, location) as &dyn Value))])
            });
    } else { ; }
};trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
566
567        self.super_rvalue(rvalue, location);
568
569        match rvalue {
570            Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
571
572            Rvalue::Use(_)
573            | Rvalue::CopyForDeref(..)
574            | Rvalue::Repeat(..)
575            | Rvalue::Discriminant(..) => {}
576
577            Rvalue::Aggregate(kind, ..) => {
578                if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref()
579                    && let Some(coroutine_kind) = self.tcx.coroutine_kind(*def_id)
580                {
581                    self.check_op(ops::Coroutine(coroutine_kind));
582                }
583            }
584
585            Rvalue::Ref(_, BorrowKind::Mut { .. }, place)
586            | Rvalue::RawPtr(RawPtrKind::Mut, place) => {
587                // Inside mutable statics, we allow arbitrary mutable references.
588                // We've allowed `static mut FOO = &mut [elements];` for a long time (the exact
589                // reasons why are lost to history), and there is no reason to restrict that to
590                // arrays and slices.
591                let is_allowed =
592                    self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut);
593
594                if !is_allowed && self.place_may_escape(place) {
595                    self.check_op(ops::EscapingMutBorrow);
596                }
597            }
598
599            Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake(_), place)
600            | Rvalue::RawPtr(RawPtrKind::Const, place) => {
601                let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
602                    self.ccx,
603                    &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
604                    place.as_ref(),
605                );
606
607                if borrowed_place_has_mut_interior && self.place_may_escape(place) {
608                    self.check_op(ops::EscapingCellBorrow);
609                }
610            }
611
612            Rvalue::RawPtr(RawPtrKind::FakeForPtrMetadata, place) => {
613                // These are only inserted for slice length, so the place must already be indirect.
614                // This implies we do not have to worry about whether the borrow escapes.
615                if !place.is_indirect() {
616                    self.tcx.dcx().span_delayed_bug(
617                        self.body.source_info(location).span,
618                        "fake borrows are always indirect",
619                    );
620                }
621            }
622
623            Rvalue::Cast(
624                CastKind::PointerCoercion(
625                    PointerCoercion::MutToConstPointer
626                    | PointerCoercion::ArrayToPointer
627                    | PointerCoercion::UnsafeFnPointer
628                    | PointerCoercion::ClosureFnPointer(_)
629                    | PointerCoercion::ReifyFnPointer(_),
630                    _,
631                ),
632                _,
633                _,
634            ) => {
635                // These are all okay; they only change the type, not the data.
636            }
637
638            Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => {
639                self.check_op(ops::RawPtrToIntCast);
640            }
641            Rvalue::Cast(CastKind::PointerWithExposedProvenance, _, _) => {
642                // Since no pointer can ever get exposed (rejected above), this is easy to support.
643            }
644
645            Rvalue::Cast(_, _, _) => {}
646
647            Rvalue::UnaryOp(op, operand) => {
648                let ty = operand.ty(self.body, self.tcx);
649                match op {
650                    UnOp::Not | UnOp::Neg => {
651                        if is_int_bool_float_or_char(ty) {
652                            // Int, bool, float, and char operations are fine.
653                        } else {
654                            ::rustc_middle::util::bug::span_bug_fmt(self.span,
    format_args!("non-primitive type in `Rvalue::UnaryOp{0:?}`: {1:?}", op,
        ty));span_bug!(
655                                self.span,
656                                "non-primitive type in `Rvalue::UnaryOp{op:?}`: {ty:?}",
657                            );
658                        }
659                    }
660                    UnOp::PtrMetadata => {
661                        // Getting the metadata from a pointer is always const.
662                        // We already validated the type is valid in the validator.
663                    }
664                }
665            }
666
667            Rvalue::BinaryOp(op, box (lhs, rhs)) => {
668                let lhs_ty = lhs.ty(self.body, self.tcx);
669                let rhs_ty = rhs.ty(self.body, self.tcx);
670
671                if is_int_bool_float_or_char(lhs_ty) && is_int_bool_float_or_char(rhs_ty) {
672                    // Int, bool, float, and char operations are fine.
673                } else if lhs_ty.is_fn_ptr() || lhs_ty.is_raw_ptr() {
674                    match op {
    BinOp::Eq | BinOp::Ne | BinOp::Le | BinOp::Lt | BinOp::Ge | BinOp::Gt |
        BinOp::Offset => {}
    ref left_val => {
        ::core::panicking::assert_matches_failed(left_val,
            "BinOp::Eq | BinOp::Ne | BinOp::Le | BinOp::Lt | BinOp::Ge | BinOp::Gt |\nBinOp::Offset",
            ::core::option::Option::None);
    }
};assert_matches!(
675                        op,
676                        BinOp::Eq
677                            | BinOp::Ne
678                            | BinOp::Le
679                            | BinOp::Lt
680                            | BinOp::Ge
681                            | BinOp::Gt
682                            | BinOp::Offset
683                    );
684
685                    self.check_op(ops::RawPtrComparison);
686                } else {
687                    ::rustc_middle::util::bug::span_bug_fmt(self.span,
    format_args!("non-primitive type in `Rvalue::BinaryOp`: {0:?} ⚬ {1:?}",
        lhs_ty, rhs_ty));span_bug!(
688                        self.span,
689                        "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
690                        lhs_ty,
691                        rhs_ty
692                    );
693                }
694            }
695
696            Rvalue::WrapUnsafeBinder(..) => {
697                // Unsafe binders are always trivial to create.
698            }
699        }
700    }
701
702    fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
703        self.super_operand(op, location);
704        if let Operand::Constant(c) = op
705            && let Some(def_id) = c.check_static_ptr(self.tcx)
706        {
707            self.check_static(def_id, self.span);
708        }
709    }
710
711    fn visit_source_info(&mut self, source_info: &SourceInfo) {
712        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/check_consts/check.rs:712",
                        "rustc_const_eval::check_consts::check",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/check_consts/check.rs"),
                        ::tracing_core::__macro_support::Option::Some(712u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::check_consts::check"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("visit_source_info: source_info={0:?}",
                                                    source_info) as &dyn Value))])
            });
    } else { ; }
};trace!("visit_source_info: source_info={:?}", source_info);
713        self.span = source_info.span;
714    }
715
716    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
717        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/check_consts/check.rs:717",
                        "rustc_const_eval::check_consts::check",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/check_consts/check.rs"),
                        ::tracing_core::__macro_support::Option::Some(717u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::check_consts::check"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("visit_statement: statement={0:?} location={1:?}",
                                                    statement, location) as &dyn Value))])
            });
    } else { ; }
};trace!("visit_statement: statement={:?} location={:?}", statement, location);
718
719        self.super_statement(statement, location);
720
721        match statement.kind {
722            StatementKind::Assign(..)
723            | StatementKind::SetDiscriminant { .. }
724            | StatementKind::FakeRead(..)
725            | StatementKind::StorageLive(_)
726            | StatementKind::StorageDead(_)
727            | StatementKind::Retag { .. }
728            | StatementKind::PlaceMention(..)
729            | StatementKind::AscribeUserType(..)
730            | StatementKind::Coverage(..)
731            | StatementKind::Intrinsic(..)
732            | StatementKind::ConstEvalCounter
733            | StatementKind::BackwardIncompatibleDropHint { .. }
734            | StatementKind::Nop => {}
735        }
736    }
737
738    #[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("visit_terminator",
                                    "rustc_const_eval::check_consts::check",
                                    ::tracing::Level::DEBUG,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/check_consts/check.rs"),
                                    ::tracing_core::__macro_support::Option::Some(738u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_const_eval::check_consts::check"),
                                    ::tracing_core::field::FieldSet::new(&["terminator",
                                                    "location"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&terminator)
                                                            as &dyn Value)),
                                                (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&location)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: () = loop {};
            return __tracing_attr_fake_return;
        }
        {
            self.super_terminator(terminator, location);
            match &terminator.kind {
                TerminatorKind::Call { func, args, fn_span, .. } |
                    TerminatorKind::TailCall { func, args, fn_span, .. } => {
                    let call_source =
                        match terminator.kind {
                            TerminatorKind::Call { call_source, .. } => call_source,
                            TerminatorKind::TailCall { .. } => CallSource::Normal,
                            _ =>
                                ::core::panicking::panic("internal error: entered unreachable code"),
                        };
                    let ConstCx { tcx, body, .. } = *self.ccx;
                    let fn_ty = func.ty(body, tcx);
                    let (callee, fn_args) =
                        match *fn_ty.kind() {
                            ty::FnDef(def_id, fn_args) => (def_id, fn_args),
                            ty::FnPtr(..) => {
                                self.check_op(ops::FnCallIndirect);
                                return;
                            }
                            _ => {
                                ::rustc_middle::util::bug::span_bug_fmt(terminator.source_info.span,
                                    format_args!("invalid callee of type {0:?}", fn_ty))
                            }
                        };
                    let has_const_conditions =
                        self.revalidate_conditional_constness(callee, fn_args,
                            *fn_span);
                    if let Some(trait_did) = tcx.trait_of_assoc(callee) {
                        {
                            use ::tracing::__macro_support::Callsite as _;
                            static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                                {
                                    static META: ::tracing::Metadata<'static> =
                                        {
                                            ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/check_consts/check.rs:777",
                                                "rustc_const_eval::check_consts::check",
                                                ::tracing::Level::TRACE,
                                                ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/check_consts/check.rs"),
                                                ::tracing_core::__macro_support::Option::Some(777u32),
                                                ::tracing_core::__macro_support::Option::Some("rustc_const_eval::check_consts::check"),
                                                ::tracing_core::field::FieldSet::new(&["message"],
                                                    ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                                ::tracing::metadata::Kind::EVENT)
                                        };
                                    ::tracing::callsite::DefaultCallsite::new(&META)
                                };
                            let enabled =
                                ::tracing::Level::TRACE <=
                                            ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                        ::tracing::Level::TRACE <=
                                            ::tracing::level_filters::LevelFilter::current() &&
                                    {
                                        let interest = __CALLSITE.interest();
                                        !interest.is_never() &&
                                            ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                                                interest)
                                    };
                            if enabled {
                                (|value_set: ::tracing::field::ValueSet|
                                            {
                                                let meta = __CALLSITE.metadata();
                                                ::tracing::Event::dispatch(meta, &value_set);
                                                ;
                                            })({
                                        #[allow(unused_imports)]
                                        use ::tracing::field::{debug, display, Value};
                                        let mut iter = __CALLSITE.metadata().fields().iter();
                                        __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                            ::tracing::__macro_support::Option::Some(&format_args!("attempting to call a trait method")
                                                                    as &dyn Value))])
                                    });
                            } else { ; }
                        };
                        let is_const =
                            tcx.constness(callee) == hir::Constness::Const;
                        if is_const &&
                                has_const_conditions == Some(ConstConditionsHold::Yes) {
                            self.check_op(ops::ConditionallyConstCall {
                                    callee,
                                    args: fn_args,
                                    span: *fn_span,
                                    call_source,
                                });
                            self.check_callee_stability(trait_did);
                        } else {
                            self.check_op(ops::FnCallNonConst {
                                    callee,
                                    args: fn_args,
                                    span: *fn_span,
                                    call_source,
                                });
                        }
                        return;
                    }
                    if has_const_conditions.is_some() {
                        self.check_op(ops::ConditionallyConstCall {
                                callee,
                                args: fn_args,
                                span: *fn_span,
                                call_source,
                            });
                    }
                    if self.tcx.fn_sig(callee).skip_binder().c_variadic() {
                        self.check_op(ops::FnCallCVariadic)
                    }
                    if tcx.is_lang_item(callee, LangItem::BeginPanic) {
                        match args[0].node.ty(&self.ccx.body.local_decls,
                                    tcx).kind() {
                            ty::Ref(_, ty, _) if ty.is_str() => {}
                            _ => self.check_op(ops::PanicNonStr),
                        }
                        return;
                    }
                    if tcx.is_lang_item(callee, LangItem::PanicDisplay) {
                        if let ty::Ref(_, ty, _) =
                                        args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() &&
                                    let ty::Ref(_, ty, _) = ty.kind() && ty.is_str()
                            {} else { self.check_op(ops::PanicNonStr); }
                        return;
                    }
                    if let Some(intrinsic) = tcx.intrinsic(callee) {
                        if !tcx.is_const_fn(callee) {
                            self.check_op(ops::IntrinsicNonConst {
                                    name: intrinsic.name,
                                });
                            return;
                        }
                        let is_const_stable =
                            intrinsic.const_stable ||
                                (!intrinsic.must_be_overridden &&
                                        is_fn_or_trait_safe_to_expose_on_stable(tcx, callee));
                        match tcx.lookup_const_stability(callee) {
                            None => {
                                if !is_const_stable &&
                                        self.enforce_recursive_const_stability() {
                                    self.dcx().emit_err(errors::UnmarkedIntrinsicExposed {
                                            span: self.span,
                                            def_path: self.tcx.def_path_str(callee),
                                        });
                                }
                            }
                            Some(hir::ConstStability {
                                level: hir::StabilityLevel::Unstable { .. }, feature, .. })
                                => {
                                if self.span.allows_unstable(feature) && is_const_stable {
                                    return;
                                }
                                self.check_op(ops::IntrinsicUnstable {
                                        name: intrinsic.name,
                                        feature,
                                        const_stable_indirect: is_const_stable,
                                    });
                            }
                            Some(hir::ConstStability {
                                level: hir::StabilityLevel::Stable { .. }, .. }) => {}
                        }
                        return;
                    }
                    if !tcx.is_const_fn(callee) {
                        self.check_op(ops::FnCallNonConst {
                                callee,
                                args: fn_args,
                                span: *fn_span,
                                call_source,
                            });
                        return;
                    }
                    self.check_callee_stability(callee);
                }
                TerminatorKind::Drop { place: dropped_place, .. } => {
                    if super::post_drop_elaboration::checking_enabled(self.ccx)
                        {
                        return;
                    }
                    self.check_drop_terminator(*dropped_place, location,
                        terminator.source_info.span);
                }
                TerminatorKind::InlineAsm { .. } =>
                    self.check_op(ops::InlineAsm),
                TerminatorKind::Yield { .. } => {
                    self.check_op(ops::Coroutine(self.tcx.coroutine_kind(self.body.source.def_id()).expect("Only expected to have a yield in a coroutine")));
                }
                TerminatorKind::CoroutineDrop => {
                    ::rustc_middle::util::bug::span_bug_fmt(self.body.source_info(location).span,
                        format_args!("We should not encounter TerminatorKind::CoroutineDrop after coroutine transform"));
                }
                TerminatorKind::UnwindTerminate(_) => {
                    ::rustc_middle::util::bug::span_bug_fmt(self.span,
                        format_args!("`Terminate` terminator outside of cleanup block"))
                }
                TerminatorKind::Assert { .. } | TerminatorKind::FalseEdge { ..
                    } | TerminatorKind::FalseUnwind { .. } |
                    TerminatorKind::Goto { .. } | TerminatorKind::UnwindResume |
                    TerminatorKind::Return | TerminatorKind::SwitchInt { .. } |
                    TerminatorKind::Unreachable => {}
            }
        }
    }
}#[instrument(level = "debug", skip(self))]
739    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
740        self.super_terminator(terminator, location);
741
742        match &terminator.kind {
743            TerminatorKind::Call { func, args, fn_span, .. }
744            | TerminatorKind::TailCall { func, args, fn_span, .. } => {
745                let call_source = match terminator.kind {
746                    TerminatorKind::Call { call_source, .. } => call_source,
747                    TerminatorKind::TailCall { .. } => CallSource::Normal,
748                    _ => unreachable!(),
749                };
750
751                let ConstCx { tcx, body, .. } = *self.ccx;
752
753                let fn_ty = func.ty(body, tcx);
754
755                let (callee, fn_args) = match *fn_ty.kind() {
756                    ty::FnDef(def_id, fn_args) => (def_id, fn_args),
757
758                    ty::FnPtr(..) => {
759                        self.check_op(ops::FnCallIndirect);
760                        // We can get here without an error in miri-unleashed mode... might as well
761                        // skip the rest of the checks as well then.
762                        return;
763                    }
764                    _ => {
765                        span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
766                    }
767                };
768
769                let has_const_conditions =
770                    self.revalidate_conditional_constness(callee, fn_args, *fn_span);
771
772                // Attempting to call a trait method?
773                if let Some(trait_did) = tcx.trait_of_assoc(callee) {
774                    // We can't determine the actual callee (the underlying impl of the trait) here, so we have
775                    // to do different checks than usual.
776
777                    trace!("attempting to call a trait method");
778                    let is_const = tcx.constness(callee) == hir::Constness::Const;
779
780                    // Only consider a trait to be const if the const conditions hold.
781                    // Otherwise, it's really misleading to call something "conditionally"
782                    // const when it's very obviously not conditionally const.
783                    if is_const && has_const_conditions == Some(ConstConditionsHold::Yes) {
784                        // Trait calls are always conditionally-const.
785                        self.check_op(ops::ConditionallyConstCall {
786                            callee,
787                            args: fn_args,
788                            span: *fn_span,
789                            call_source,
790                        });
791                        self.check_callee_stability(trait_did);
792                    } else {
793                        // Not even a const trait.
794                        self.check_op(ops::FnCallNonConst {
795                            callee,
796                            args: fn_args,
797                            span: *fn_span,
798                            call_source,
799                        });
800                    }
801                    // That's all we can check here.
802                    return;
803                }
804
805                // Even if we know the callee, ensure we can use conditionally-const calls.
806                if has_const_conditions.is_some() {
807                    self.check_op(ops::ConditionallyConstCall {
808                        callee,
809                        args: fn_args,
810                        span: *fn_span,
811                        call_source,
812                    });
813                }
814
815                if self.tcx.fn_sig(callee).skip_binder().c_variadic() {
816                    self.check_op(ops::FnCallCVariadic)
817                }
818
819                // At this point, we are calling a function, `callee`, whose `DefId` is known...
820
821                // `begin_panic` and `panic_display` functions accept generic
822                // types other than str. Check to enforce that only str can be used in
823                // const-eval.
824
825                // const-eval of the `begin_panic` fn assumes the argument is `&str`
826                if tcx.is_lang_item(callee, LangItem::BeginPanic) {
827                    match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
828                        ty::Ref(_, ty, _) if ty.is_str() => {}
829                        _ => self.check_op(ops::PanicNonStr),
830                    }
831                    // Allow this call, skip all the checks below.
832                    return;
833                }
834
835                // const-eval of `panic_display` assumes the argument is `&&str`
836                if tcx.is_lang_item(callee, LangItem::PanicDisplay) {
837                    if let ty::Ref(_, ty, _) =
838                        args[0].node.ty(&self.ccx.body.local_decls, tcx).kind()
839                        && let ty::Ref(_, ty, _) = ty.kind()
840                        && ty.is_str()
841                    {
842                    } else {
843                        self.check_op(ops::PanicNonStr);
844                    }
845                    // Allow this call, skip all the checks below.
846                    return;
847                }
848
849                // Intrinsics are language primitives, not regular calls, so treat them separately.
850                if let Some(intrinsic) = tcx.intrinsic(callee) {
851                    if !tcx.is_const_fn(callee) {
852                        // Non-const intrinsic.
853                        self.check_op(ops::IntrinsicNonConst { name: intrinsic.name });
854                        // If we allowed this, we're in miri-unleashed mode, so we might
855                        // as well skip the remaining checks.
856                        return;
857                    }
858                    // We use `intrinsic.const_stable` to determine if this can be safely exposed to
859                    // stable code, rather than `const_stable_indirect`. This is to make
860                    // `#[rustc_const_stable_indirect]` an attribute that is always safe to add.
861                    // We also ask is_safe_to_expose_on_stable_const_fn; this determines whether the intrinsic
862                    // fallback body is safe to expose on stable.
863                    let is_const_stable = intrinsic.const_stable
864                        || (!intrinsic.must_be_overridden
865                            && is_fn_or_trait_safe_to_expose_on_stable(tcx, callee));
866                    match tcx.lookup_const_stability(callee) {
867                        None => {
868                            // This doesn't need a separate const-stability check -- const-stability equals
869                            // regular stability, and regular stability is checked separately.
870                            // However, we *do* have to worry about *recursive* const stability.
871                            if !is_const_stable && self.enforce_recursive_const_stability() {
872                                self.dcx().emit_err(errors::UnmarkedIntrinsicExposed {
873                                    span: self.span,
874                                    def_path: self.tcx.def_path_str(callee),
875                                });
876                            }
877                        }
878                        Some(hir::ConstStability {
879                            level: hir::StabilityLevel::Unstable { .. },
880                            feature,
881                            ..
882                        }) => {
883                            // We only honor `span.allows_unstable` aka `#[allow_internal_unstable]`
884                            // if the callee is safe to expose, to avoid bypassing recursive stability.
885                            // This is not ideal since it means the user sees an error, not the macro
886                            // author, but that's also the case if one forgets to set
887                            // `#[allow_internal_unstable]` in the first place.
888                            if self.span.allows_unstable(feature) && is_const_stable {
889                                return;
890                            }
891
892                            self.check_op(ops::IntrinsicUnstable {
893                                name: intrinsic.name,
894                                feature,
895                                const_stable_indirect: is_const_stable,
896                            });
897                        }
898                        Some(hir::ConstStability {
899                            level: hir::StabilityLevel::Stable { .. },
900                            ..
901                        }) => {
902                            // All good. Note that a `#[rustc_const_stable]` intrinsic (meaning it
903                            // can be *directly* invoked from stable const code) does not always
904                            // have the `#[rustc_intrinsic_const_stable_indirect]` attribute (which controls
905                            // exposing an intrinsic indirectly); we accept this call anyway.
906                        }
907                    }
908                    // This completes the checks for intrinsics.
909                    return;
910                }
911
912                if !tcx.is_const_fn(callee) {
913                    self.check_op(ops::FnCallNonConst {
914                        callee,
915                        args: fn_args,
916                        span: *fn_span,
917                        call_source,
918                    });
919                    // If we allowed this, we're in miri-unleashed mode, so we might
920                    // as well skip the remaining checks.
921                    return;
922                }
923
924                // Finally, stability for regular function calls -- this is the big one.
925                self.check_callee_stability(callee);
926            }
927
928            // Forbid all `Drop` terminators unless the place being dropped is a local with no
929            // projections that cannot be `NeedsNonConstDrop`.
930            TerminatorKind::Drop { place: dropped_place, .. } => {
931                // If we are checking live drops after drop-elaboration, don't emit duplicate
932                // errors here.
933                if super::post_drop_elaboration::checking_enabled(self.ccx) {
934                    return;
935                }
936
937                self.check_drop_terminator(*dropped_place, location, terminator.source_info.span);
938            }
939
940            TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
941
942            TerminatorKind::Yield { .. } => {
943                self.check_op(ops::Coroutine(
944                    self.tcx
945                        .coroutine_kind(self.body.source.def_id())
946                        .expect("Only expected to have a yield in a coroutine"),
947                ));
948            }
949
950            TerminatorKind::CoroutineDrop => {
951                span_bug!(
952                    self.body.source_info(location).span,
953                    "We should not encounter TerminatorKind::CoroutineDrop after coroutine transform"
954                );
955            }
956
957            TerminatorKind::UnwindTerminate(_) => {
958                // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
959                span_bug!(self.span, "`Terminate` terminator outside of cleanup block")
960            }
961
962            TerminatorKind::Assert { .. }
963            | TerminatorKind::FalseEdge { .. }
964            | TerminatorKind::FalseUnwind { .. }
965            | TerminatorKind::Goto { .. }
966            | TerminatorKind::UnwindResume
967            | TerminatorKind::Return
968            | TerminatorKind::SwitchInt { .. }
969            | TerminatorKind::Unreachable => {}
970        }
971    }
972}
973
974fn is_int_bool_float_or_char(ty: Ty<'_>) -> bool {
975    ty.is_bool() || ty.is_integral() || ty.is_char() || ty.is_floating_point()
976}
977
978fn emit_unstable_in_stable_exposed_error(
979    ccx: &ConstCx<'_, '_>,
980    span: Span,
981    gate: Symbol,
982    is_function_call: bool,
983) -> ErrorGuaranteed {
984    let attr_span = ccx.tcx.def_span(ccx.def_id()).shrink_to_lo();
985
986    ccx.dcx().emit_err(errors::UnstableInStableExposed {
987        gate: gate.to_string(),
988        span,
989        attr_span,
990        is_function_call,
991        is_function_call2: is_function_call,
992    })
993}