rustc_const_eval/check_consts/
check.rs1use std::assert_matches::assert_matches;
4use std::borrow::Cow;
5use std::mem;
6use std::num::NonZero;
7use std::ops::Deref;
8
9use rustc_errors::{Diag, ErrorGuaranteed};
10use rustc_hir::def::DefKind;
11use rustc_hir::def_id::DefId;
12use rustc_hir::{self as hir, LangItem};
13use rustc_index::bit_set::DenseBitSet;
14use rustc_infer::infer::TyCtxtInferExt;
15use rustc_middle::mir::visit::Visitor;
16use rustc_middle::mir::*;
17use rustc_middle::span_bug;
18use rustc_middle::ty::adjustment::PointerCoercion;
19use rustc_middle::ty::{self, Ty, TypeVisitableExt};
20use rustc_mir_dataflow::Analysis;
21use rustc_mir_dataflow::impls::{MaybeStorageLive, always_storage_live_locals};
22use rustc_span::{Span, Symbol, sym};
23use rustc_trait_selection::traits::{
24 Obligation, ObligationCause, ObligationCauseCode, ObligationCtxt,
25};
26use tracing::{instrument, trace};
27
28use super::ops::{self, NonConstOp, Status};
29use super::qualifs::{self, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
30use super::resolver::FlowSensitiveAnalysis;
31use super::{ConstCx, Qualif};
32use crate::check_consts::is_fn_or_trait_safe_to_expose_on_stable;
33use crate::errors;
34
35type QualifResults<'mir, 'tcx, Q> =
36 rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'tcx, Q>>;
37
38#[derive(Copy, Clone, PartialEq, Eq, Debug)]
39enum ConstConditionsHold {
40 Yes,
41 No,
42}
43
44#[derive(Default)]
45pub(crate) struct Qualifs<'mir, 'tcx> {
46 has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
47 needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
48 needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
49}
50
51impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
52 pub(crate) fn needs_drop(
56 &mut self,
57 ccx: &'mir ConstCx<'mir, 'tcx>,
58 local: Local,
59 location: Location,
60 ) -> bool {
61 let ty = ccx.body.local_decls[local].ty;
62 if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
66 return false;
67 }
68
69 let needs_drop = self.needs_drop.get_or_insert_with(|| {
70 let ConstCx { tcx, body, .. } = *ccx;
71
72 FlowSensitiveAnalysis::new(NeedsDrop, ccx)
73 .iterate_to_fixpoint(tcx, body, None)
74 .into_results_cursor(body)
75 });
76
77 needs_drop.seek_before_primary_effect(location);
78 needs_drop.get().contains(local)
79 }
80
81 pub(crate) fn needs_non_const_drop(
85 &mut self,
86 ccx: &'mir ConstCx<'mir, 'tcx>,
87 local: Local,
88 location: Location,
89 ) -> bool {
90 let ty = ccx.body.local_decls[local].ty;
91 if !ty.has_opaque_types() && !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
95 return false;
96 }
97
98 let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
99 let ConstCx { tcx, body, .. } = *ccx;
100
101 FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
102 .iterate_to_fixpoint(tcx, body, None)
103 .into_results_cursor(body)
104 });
105
106 needs_non_const_drop.seek_before_primary_effect(location);
107 needs_non_const_drop.get().contains(local)
108 }
109
110 fn has_mut_interior(
114 &mut self,
115 ccx: &'mir ConstCx<'mir, 'tcx>,
116 local: Local,
117 location: Location,
118 ) -> bool {
119 let ty = ccx.body.local_decls[local].ty;
120 if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
124 return false;
125 }
126
127 let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
128 let ConstCx { tcx, body, .. } = *ccx;
129
130 FlowSensitiveAnalysis::new(HasMutInterior, ccx)
131 .iterate_to_fixpoint(tcx, body, None)
132 .into_results_cursor(body)
133 });
134
135 has_mut_interior.seek_before_primary_effect(location);
136 has_mut_interior.get().contains(local)
137 }
138
139 fn in_return_place(
140 &mut self,
141 ccx: &'mir ConstCx<'mir, 'tcx>,
142 tainted_by_errors: Option<ErrorGuaranteed>,
143 ) -> ConstQualifs {
144 let return_block = ccx
151 .body
152 .basic_blocks
153 .iter_enumerated()
154 .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
155 .map(|(bb, _)| bb);
156
157 let Some(return_block) = return_block else {
158 return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
159 };
160
161 let return_loc = ccx.body.terminator_loc(return_block);
162
163 ConstQualifs {
164 needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
165 needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
166 has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
167 tainted_by_errors,
168 }
169 }
170}
171
172pub struct Checker<'mir, 'tcx> {
173 ccx: &'mir ConstCx<'mir, 'tcx>,
174 qualifs: Qualifs<'mir, 'tcx>,
175
176 span: Span,
178
179 transient_locals: Option<DenseBitSet<Local>>,
182
183 error_emitted: Option<ErrorGuaranteed>,
184 secondary_errors: Vec<Diag<'tcx>>,
185}
186
187impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
188 type Target = ConstCx<'mir, 'tcx>;
189
190 fn deref(&self) -> &Self::Target {
191 self.ccx
192 }
193}
194
195impl<'mir, 'tcx> Checker<'mir, 'tcx> {
196 pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
197 Checker {
198 span: ccx.body.span,
199 ccx,
200 qualifs: Default::default(),
201 transient_locals: None,
202 error_emitted: None,
203 secondary_errors: Vec::new(),
204 }
205 }
206
207 pub fn check_body(&mut self) {
208 let ConstCx { tcx, body, .. } = *self.ccx;
209 let def_id = self.ccx.def_id();
210
211 if self.ccx.is_async() || body.coroutine.is_some() {
214 tcx.dcx().span_delayed_bug(body.span, "`async` functions cannot be `const fn`");
215 return;
216 }
217
218 if !tcx.has_attr(def_id, sym::rustc_do_not_const_check) {
219 self.visit_body(body);
220 }
221
222 let secondary_errors = mem::take(&mut self.secondary_errors);
225 if self.error_emitted.is_none() {
226 for error in secondary_errors {
227 self.error_emitted = Some(error.emit());
228 }
229 } else {
230 assert!(self.tcx.dcx().has_errors().is_some());
231 for error in secondary_errors {
232 error.cancel();
233 }
234 }
235 }
236
237 fn local_is_transient(&mut self, local: Local) -> bool {
238 let ccx = self.ccx;
239 self.transient_locals
240 .get_or_insert_with(|| {
241 let always_live_locals = &always_storage_live_locals(&ccx.body);
244 let mut maybe_storage_live =
245 MaybeStorageLive::new(Cow::Borrowed(always_live_locals))
246 .iterate_to_fixpoint(ccx.tcx, &ccx.body, None)
247 .into_results_cursor(&ccx.body);
248
249 let mut transient = DenseBitSet::new_filled(ccx.body.local_decls.len());
252 for (bb, data) in traversal::reachable(&ccx.body) {
254 if matches!(data.terminator().kind, TerminatorKind::Return) {
255 let location = ccx.body.terminator_loc(bb);
256 maybe_storage_live.seek_after_primary_effect(location);
257 transient.subtract(maybe_storage_live.get());
259 }
260 }
261
262 transient
263 })
264 .contains(local)
265 }
266
267 pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
268 self.qualifs.in_return_place(self.ccx, self.error_emitted)
269 }
270
271 pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
273 self.check_op_spanned(op, self.span);
274 }
275
276 pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
279 let gate = match op.status_in_item(self.ccx) {
280 Status::Unstable {
281 gate,
282 safe_to_expose_on_stable,
283 is_function_call,
284 gate_already_checked,
285 } if gate_already_checked || self.tcx.features().enabled(gate) => {
286 if gate_already_checked {
287 assert!(
288 !safe_to_expose_on_stable,
289 "setting `gate_already_checked` without `safe_to_expose_on_stable` makes no sense"
290 );
291 }
292 if !safe_to_expose_on_stable
295 && self.enforce_recursive_const_stability()
296 && !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate)
297 {
298 emit_unstable_in_stable_exposed_error(self.ccx, span, gate, is_function_call);
299 }
300
301 return;
302 }
303
304 Status::Unstable { gate, .. } => Some(gate),
305 Status::Forbidden => None,
306 };
307
308 if self.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
309 self.tcx.sess.miri_unleashed_feature(span, gate);
310 return;
311 }
312
313 let err = op.build_error(self.ccx, span);
314 assert!(err.is_error());
315
316 match op.importance() {
317 ops::DiagImportance::Primary => {
318 let reported = err.emit();
319 self.error_emitted = Some(reported);
320 }
321
322 ops::DiagImportance::Secondary => {
323 self.secondary_errors.push(err);
324 self.tcx.dcx().span_delayed_bug(
325 span,
326 "compilation must fail when there is a secondary const checker error",
327 );
328 }
329 }
330 }
331
332 fn check_static(&mut self, def_id: DefId, span: Span) {
333 if self.tcx.is_thread_local_static(def_id) {
334 self.tcx.dcx().span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef`");
335 }
336 if let Some(def_id) = def_id.as_local()
337 && let Err(guar) = self.tcx.ensure_ok().check_well_formed(hir::OwnerId { def_id })
338 {
339 self.error_emitted = Some(guar);
340 }
341 }
342
343 fn place_may_escape(&mut self, place: &Place<'_>) -> bool {
347 let is_transient = match self.const_kind() {
348 hir::ConstContext::ConstFn => true,
356 _ => {
357 place.is_indirect() || self.local_is_transient(place.local)
369 }
370 };
371 !is_transient
374 }
375
376 fn revalidate_conditional_constness(
378 &mut self,
379 callee: DefId,
380 callee_args: ty::GenericArgsRef<'tcx>,
381 call_span: Span,
382 ) -> Option<ConstConditionsHold> {
383 let tcx = self.tcx;
384 if !tcx.is_conditionally_const(callee) {
385 return None;
386 }
387
388 let const_conditions = tcx.const_conditions(callee).instantiate(tcx, callee_args);
389 if const_conditions.is_empty() {
390 return None;
391 }
392
393 let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(self.body.typing_env(tcx));
394 let ocx = ObligationCtxt::new(&infcx);
395
396 let body_id = self.body.source.def_id().expect_local();
397 let host_polarity = match self.const_kind() {
398 hir::ConstContext::ConstFn => ty::BoundConstness::Maybe,
399 hir::ConstContext::Static(_) | hir::ConstContext::Const { .. } => {
400 ty::BoundConstness::Const
401 }
402 };
403 let const_conditions =
404 ocx.normalize(&ObligationCause::misc(call_span, body_id), param_env, const_conditions);
405 ocx.register_obligations(const_conditions.into_iter().map(|(trait_ref, span)| {
406 Obligation::new(
407 tcx,
408 ObligationCause::new(
409 call_span,
410 body_id,
411 ObligationCauseCode::WhereClause(callee, span),
412 ),
413 param_env,
414 trait_ref.to_host_effect_clause(tcx, host_polarity),
415 )
416 }));
417
418 let errors = ocx.select_all_or_error();
419 if errors.is_empty() {
420 Some(ConstConditionsHold::Yes)
421 } else {
422 tcx.dcx()
423 .span_delayed_bug(call_span, "this should have reported a [const] error in HIR");
424 Some(ConstConditionsHold::No)
425 }
426 }
427
428 pub fn check_drop_terminator(
429 &mut self,
430 dropped_place: Place<'tcx>,
431 location: Location,
432 terminator_span: Span,
433 ) {
434 let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
435
436 let needs_drop = if let Some(local) = dropped_place.as_local() {
437 self.qualifs.needs_drop(self.ccx, local, location)
438 } else {
439 qualifs::NeedsDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
440 };
441 if !needs_drop {
443 return;
444 }
445
446 let mut err_span = self.span;
447 let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
448 err_span = self.body.local_decls[local].source_info.span;
450 self.qualifs.needs_non_const_drop(self.ccx, local, location)
451 } else {
452 qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
453 };
454
455 self.check_op_spanned(
456 ops::LiveDrop {
457 dropped_at: terminator_span,
458 dropped_ty: ty_of_dropped_place,
459 needs_non_const_drop,
460 },
461 err_span,
462 );
463 }
464
465 fn check_callee_stability(&mut self, def_id: DefId) {
467 match self.tcx.lookup_const_stability(def_id) {
468 Some(hir::ConstStability { level: hir::StabilityLevel::Stable { .. }, .. }) => {
469 }
471 None => {
472 if self.enforce_recursive_const_stability()
476 && !is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id)
477 {
478 self.dcx().emit_err(errors::UnmarkedConstItemExposed {
479 span: self.span,
480 def_path: self.tcx.def_path_str(def_id),
481 });
482 }
483 }
484 Some(hir::ConstStability {
485 level: hir::StabilityLevel::Unstable { implied_by: implied_feature, issue, .. },
486 feature,
487 ..
488 }) => {
489 let callee_safe_to_expose_on_stable =
491 is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id);
492
493 if (self.span.allows_unstable(feature)
502 || implied_feature.is_some_and(|f| self.span.allows_unstable(f)))
503 && callee_safe_to_expose_on_stable
504 {
505 return;
506 }
507
508 let feature_enabled = def_id.is_local()
513 || self.tcx.features().enabled(feature)
514 || implied_feature.is_some_and(|f| self.tcx.features().enabled(f))
515 || {
516 feature == sym::rustc_private
528 && issue == NonZero::new(27812)
529 && self.tcx.sess.opts.unstable_opts.force_unstable_if_unmarked
530 };
531 if !feature_enabled || !callee_safe_to_expose_on_stable {
534 self.check_op(ops::CallUnstable {
535 def_id,
536 feature,
537 feature_enabled,
538 safe_to_expose_on_stable: callee_safe_to_expose_on_stable,
539 is_function_call: self.tcx.def_kind(def_id) != DefKind::Trait,
540 });
541 }
542 }
543 }
544 }
545}
546
547impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
548 fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
549 trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
550
551 if block.is_cleanup {
559 return;
560 }
561
562 self.super_basic_block_data(bb, block);
563 }
564
565 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
566 trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
567
568 self.super_rvalue(rvalue, location);
569
570 match rvalue {
571 Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
572
573 Rvalue::Use(_)
574 | Rvalue::CopyForDeref(..)
575 | Rvalue::Repeat(..)
576 | Rvalue::Discriminant(..) => {}
577
578 Rvalue::Aggregate(kind, ..) => {
579 if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref()
580 && let Some(coroutine_kind) = self.tcx.coroutine_kind(def_id)
581 {
582 self.check_op(ops::Coroutine(coroutine_kind));
583 }
584 }
585
586 Rvalue::Ref(_, BorrowKind::Mut { .. }, place)
587 | Rvalue::RawPtr(RawPtrKind::Mut, place) => {
588 let is_allowed =
593 self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut);
594
595 if !is_allowed && self.place_may_escape(place) {
596 self.check_op(ops::EscapingMutBorrow);
597 }
598 }
599
600 Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake(_), place)
601 | Rvalue::RawPtr(RawPtrKind::Const, place) => {
602 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
603 self.ccx,
604 &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
605 place.as_ref(),
606 );
607
608 if borrowed_place_has_mut_interior && self.place_may_escape(place) {
609 self.check_op(ops::EscapingCellBorrow);
610 }
611 }
612
613 Rvalue::RawPtr(RawPtrKind::FakeForPtrMetadata, place) => {
614 if !place.is_indirect() {
617 self.tcx.dcx().span_delayed_bug(
618 self.body.source_info(location).span,
619 "fake borrows are always indirect",
620 );
621 }
622 }
623
624 Rvalue::Cast(
625 CastKind::PointerCoercion(
626 PointerCoercion::MutToConstPointer
627 | PointerCoercion::ArrayToPointer
628 | PointerCoercion::UnsafeFnPointer
629 | PointerCoercion::ClosureFnPointer(_)
630 | PointerCoercion::ReifyFnPointer,
631 _,
632 ),
633 _,
634 _,
635 ) => {
636 }
638
639 Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => {
640 self.check_op(ops::RawPtrToIntCast);
641 }
642 Rvalue::Cast(CastKind::PointerWithExposedProvenance, _, _) => {
643 }
645
646 Rvalue::Cast(_, _, _) => {}
647
648 Rvalue::NullaryOp(
649 NullOp::SizeOf
650 | NullOp::AlignOf
651 | NullOp::OffsetOf(_)
652 | NullOp::UbChecks
653 | NullOp::ContractChecks,
654 _,
655 ) => {}
656 Rvalue::ShallowInitBox(_, _) => {}
657
658 Rvalue::UnaryOp(op, operand) => {
659 let ty = operand.ty(self.body, self.tcx);
660 match op {
661 UnOp::Not | UnOp::Neg => {
662 if is_int_bool_float_or_char(ty) {
663 } else {
665 span_bug!(
666 self.span,
667 "non-primitive type in `Rvalue::UnaryOp{op:?}`: {ty:?}",
668 );
669 }
670 }
671 UnOp::PtrMetadata => {
672 }
675 }
676 }
677
678 Rvalue::BinaryOp(op, box (lhs, rhs)) => {
679 let lhs_ty = lhs.ty(self.body, self.tcx);
680 let rhs_ty = rhs.ty(self.body, self.tcx);
681
682 if is_int_bool_float_or_char(lhs_ty) && is_int_bool_float_or_char(rhs_ty) {
683 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_raw_ptr() {
685 assert_matches!(
686 op,
687 BinOp::Eq
688 | BinOp::Ne
689 | BinOp::Le
690 | BinOp::Lt
691 | BinOp::Ge
692 | BinOp::Gt
693 | BinOp::Offset
694 );
695
696 self.check_op(ops::RawPtrComparison);
697 } else {
698 span_bug!(
699 self.span,
700 "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
701 lhs_ty,
702 rhs_ty
703 );
704 }
705 }
706
707 Rvalue::WrapUnsafeBinder(..) => {
708 }
710 }
711 }
712
713 fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
714 self.super_operand(op, location);
715 if let Operand::Constant(c) = op
716 && let Some(def_id) = c.check_static_ptr(self.tcx)
717 {
718 self.check_static(def_id, self.span);
719 }
720 }
721
722 fn visit_source_info(&mut self, source_info: &SourceInfo) {
723 trace!("visit_source_info: source_info={:?}", source_info);
724 self.span = source_info.span;
725 }
726
727 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
728 trace!("visit_statement: statement={:?} location={:?}", statement, location);
729
730 self.super_statement(statement, location);
731
732 match statement.kind {
733 StatementKind::Assign(..)
734 | StatementKind::SetDiscriminant { .. }
735 | StatementKind::Deinit(..)
736 | StatementKind::FakeRead(..)
737 | StatementKind::StorageLive(_)
738 | StatementKind::StorageDead(_)
739 | StatementKind::Retag { .. }
740 | StatementKind::PlaceMention(..)
741 | StatementKind::AscribeUserType(..)
742 | StatementKind::Coverage(..)
743 | StatementKind::Intrinsic(..)
744 | StatementKind::ConstEvalCounter
745 | StatementKind::BackwardIncompatibleDropHint { .. }
746 | StatementKind::Nop => {}
747 }
748 }
749
750 #[instrument(level = "debug", skip(self))]
751 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
752 self.super_terminator(terminator, location);
753
754 match &terminator.kind {
755 TerminatorKind::Call { func, args, fn_span, .. }
756 | TerminatorKind::TailCall { func, args, fn_span, .. } => {
757 let call_source = match terminator.kind {
758 TerminatorKind::Call { call_source, .. } => call_source,
759 TerminatorKind::TailCall { .. } => CallSource::Normal,
760 _ => unreachable!(),
761 };
762
763 let ConstCx { tcx, body, .. } = *self.ccx;
764
765 let fn_ty = func.ty(body, tcx);
766
767 let (callee, fn_args) = match *fn_ty.kind() {
768 ty::FnDef(def_id, fn_args) => (def_id, fn_args),
769
770 ty::FnPtr(..) => {
771 self.check_op(ops::FnCallIndirect);
772 return;
775 }
776 _ => {
777 span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
778 }
779 };
780
781 let has_const_conditions =
782 self.revalidate_conditional_constness(callee, fn_args, *fn_span);
783
784 if let Some(trait_did) = tcx.trait_of_assoc(callee) {
786 trace!("attempting to call a trait method");
790 let trait_is_const = tcx.is_const_trait(trait_did);
791
792 if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) {
796 self.check_op(ops::ConditionallyConstCall {
798 callee,
799 args: fn_args,
800 span: *fn_span,
801 call_source,
802 });
803 self.check_callee_stability(trait_did);
804 } else {
805 self.check_op(ops::FnCallNonConst {
807 callee,
808 args: fn_args,
809 span: *fn_span,
810 call_source,
811 });
812 }
813 return;
815 }
816
817 if has_const_conditions.is_some() {
819 self.check_op(ops::ConditionallyConstCall {
820 callee,
821 args: fn_args,
822 span: *fn_span,
823 call_source,
824 });
825 }
826
827 if tcx.is_lang_item(callee, LangItem::BeginPanic) {
835 match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
836 ty::Ref(_, ty, _) if ty.is_str() => {}
837 _ => self.check_op(ops::PanicNonStr),
838 }
839 return;
841 }
842
843 if tcx.is_lang_item(callee, LangItem::PanicDisplay) {
845 match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
846 ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
847 {}
848 _ => {
849 self.check_op(ops::PanicNonStr);
850 }
851 }
852 return;
854 }
855
856 if tcx.is_lang_item(callee, LangItem::ExchangeMalloc) {
858 self.check_op(ops::HeapAllocation);
859 return;
861 }
862
863 if let Some(intrinsic) = tcx.intrinsic(callee) {
865 if !tcx.is_const_fn(callee) {
866 self.check_op(ops::IntrinsicNonConst { name: intrinsic.name });
868 return;
871 }
872 let is_const_stable = intrinsic.const_stable
878 || (!intrinsic.must_be_overridden
879 && is_fn_or_trait_safe_to_expose_on_stable(tcx, callee));
880 match tcx.lookup_const_stability(callee) {
881 None => {
882 if !is_const_stable && self.enforce_recursive_const_stability() {
886 self.dcx().emit_err(errors::UnmarkedIntrinsicExposed {
887 span: self.span,
888 def_path: self.tcx.def_path_str(callee),
889 });
890 }
891 }
892 Some(hir::ConstStability {
893 level: hir::StabilityLevel::Unstable { .. },
894 feature,
895 ..
896 }) => {
897 self.check_op(ops::IntrinsicUnstable {
898 name: intrinsic.name,
899 feature,
900 const_stable_indirect: is_const_stable,
901 });
902 }
903 Some(hir::ConstStability {
904 level: hir::StabilityLevel::Stable { .. },
905 ..
906 }) => {
907 }
912 }
913 return;
915 }
916
917 if !tcx.is_const_fn(callee) {
918 self.check_op(ops::FnCallNonConst {
919 callee,
920 args: fn_args,
921 span: *fn_span,
922 call_source,
923 });
924 return;
927 }
928
929 self.check_callee_stability(callee);
931 }
932
933 TerminatorKind::Drop { place: dropped_place, .. } => {
936 if super::post_drop_elaboration::checking_enabled(self.ccx) {
939 return;
940 }
941
942 self.check_drop_terminator(*dropped_place, location, terminator.source_info.span);
943 }
944
945 TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
946
947 TerminatorKind::Yield { .. } => {
948 self.check_op(ops::Coroutine(
949 self.tcx
950 .coroutine_kind(self.body.source.def_id())
951 .expect("Only expected to have a yield in a coroutine"),
952 ));
953 }
954
955 TerminatorKind::CoroutineDrop => {
956 span_bug!(
957 self.body.source_info(location).span,
958 "We should not encounter TerminatorKind::CoroutineDrop after coroutine transform"
959 );
960 }
961
962 TerminatorKind::UnwindTerminate(_) => {
963 span_bug!(self.span, "`Terminate` terminator outside of cleanup block")
965 }
966
967 TerminatorKind::Assert { .. }
968 | TerminatorKind::FalseEdge { .. }
969 | TerminatorKind::FalseUnwind { .. }
970 | TerminatorKind::Goto { .. }
971 | TerminatorKind::UnwindResume
972 | TerminatorKind::Return
973 | TerminatorKind::SwitchInt { .. }
974 | TerminatorKind::Unreachable => {}
975 }
976 }
977}
978
979fn is_int_bool_float_or_char(ty: Ty<'_>) -> bool {
980 ty.is_bool() || ty.is_integral() || ty.is_char() || ty.is_floating_point()
981}
982
983fn emit_unstable_in_stable_exposed_error(
984 ccx: &ConstCx<'_, '_>,
985 span: Span,
986 gate: Symbol,
987 is_function_call: bool,
988) -> ErrorGuaranteed {
989 let attr_span = ccx.tcx.def_span(ccx.def_id()).shrink_to_lo();
990
991 ccx.dcx().emit_err(errors::UnstableInStableExposed {
992 gate: gate.to_string(),
993 span,
994 attr_span,
995 is_function_call,
996 is_function_call2: is_function_call,
997 })
998}