1use std::fmt::Debug;
6
7use rustc_abi::{BackendRepr, FieldIdx, HasDataLayout, Size, TargetDataLayout, VariantIdx};
8use rustc_const_eval::const_eval::DummyMachine;
9use rustc_const_eval::interpret::{
10 ImmTy, InterpCx, InterpResult, Projectable, Scalar, format_interp_error, interp_ok,
11};
12use rustc_data_structures::fx::FxHashSet;
13use rustc_hir::HirId;
14use rustc_hir::def::DefKind;
15use rustc_index::IndexVec;
16use rustc_index::bit_set::DenseBitSet;
17use rustc_middle::bug;
18use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
19use rustc_middle::mir::*;
20use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout};
21use rustc_middle::ty::{self, ConstInt, ScalarInt, Ty, TyCtxt, TypeVisitableExt};
22use rustc_span::Span;
23use tracing::{debug, instrument, trace};
24
25use crate::errors::{AssertLint, AssertLintKind};
26
27pub(super) struct KnownPanicsLint;
28
29impl<'tcx> crate::MirLint<'tcx> for KnownPanicsLint {
30 fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
31 if body.tainted_by_errors.is_some() {
32 return;
33 }
34
35 let def_id = body.source.def_id().expect_local();
36 let def_kind = tcx.def_kind(def_id);
37 let is_fn_like = def_kind.is_fn_like();
38 let is_assoc_const = def_kind == DefKind::AssocConst;
39
40 if !is_fn_like && !is_assoc_const {
42 trace!("KnownPanicsLint skipped for {:?}", def_id);
44 return;
45 }
46
47 if tcx.is_coroutine(def_id.to_def_id()) {
50 trace!("KnownPanicsLint skipped for coroutine {:?}", def_id);
51 return;
52 }
53
54 trace!("KnownPanicsLint starting for {:?}", def_id);
55
56 let mut linter = ConstPropagator::new(body, tcx);
57 linter.visit_body(body);
58
59 trace!("KnownPanicsLint done for {:?}", def_id);
60 }
61}
62
63struct ConstPropagator<'mir, 'tcx> {
66 ecx: InterpCx<'tcx, DummyMachine>,
67 tcx: TyCtxt<'tcx>,
68 typing_env: ty::TypingEnv<'tcx>,
69 worklist: Vec<BasicBlock>,
70 visited_blocks: DenseBitSet<BasicBlock>,
71 locals: IndexVec<Local, Value<'tcx>>,
72 body: &'mir Body<'tcx>,
73 written_only_inside_own_block_locals: FxHashSet<Local>,
74 can_const_prop: IndexVec<Local, ConstPropMode>,
75}
76
77#[derive(Debug, Clone)]
78enum Value<'tcx> {
79 Immediate(ImmTy<'tcx>),
80 Aggregate { variant: VariantIdx, fields: IndexVec<FieldIdx, Value<'tcx>> },
81 Uninit,
82}
83
84impl<'tcx> From<ImmTy<'tcx>> for Value<'tcx> {
85 fn from(v: ImmTy<'tcx>) -> Self {
86 Self::Immediate(v)
87 }
88}
89
90impl<'tcx> Value<'tcx> {
91 fn project(
92 &self,
93 proj: &[PlaceElem<'tcx>],
94 prop: &ConstPropagator<'_, 'tcx>,
95 ) -> Option<&Value<'tcx>> {
96 let mut this = self;
97 for proj in proj {
98 this = match (*proj, this) {
99 (PlaceElem::Field(idx, _), Value::Aggregate { fields, .. }) => {
100 fields.get(idx).unwrap_or(&Value::Uninit)
101 }
102 (PlaceElem::Index(idx), Value::Aggregate { fields, .. }) => {
103 let idx = prop.get_const(idx.into())?.immediate()?;
104 let idx = prop.ecx.read_target_usize(idx).discard_err()?.try_into().ok()?;
105 if idx <= FieldIdx::MAX_AS_U32 {
106 fields.get(FieldIdx::from_u32(idx)).unwrap_or(&Value::Uninit)
107 } else {
108 return None;
109 }
110 }
111 (
112 PlaceElem::ConstantIndex { offset, min_length: _, from_end: false },
113 Value::Aggregate { fields, .. },
114 ) => fields
115 .get(FieldIdx::from_u32(offset.try_into().ok()?))
116 .unwrap_or(&Value::Uninit),
117 _ => return None,
118 };
119 }
120 Some(this)
121 }
122
123 fn project_mut(&mut self, proj: &[PlaceElem<'_>]) -> Option<&mut Value<'tcx>> {
124 let mut this = self;
125 for proj in proj {
126 this = match (proj, this) {
127 (PlaceElem::Field(idx, _), Value::Aggregate { fields, .. }) => {
128 fields.ensure_contains_elem(*idx, || Value::Uninit)
129 }
130 (PlaceElem::Field(..), val @ Value::Uninit) => {
131 *val =
132 Value::Aggregate { variant: VariantIdx::ZERO, fields: Default::default() };
133 val.project_mut(&[*proj])?
134 }
135 _ => return None,
136 };
137 }
138 Some(this)
139 }
140
141 fn immediate(&self) -> Option<&ImmTy<'tcx>> {
142 match self {
143 Value::Immediate(op) => Some(op),
144 _ => None,
145 }
146 }
147}
148
149impl<'tcx> LayoutOfHelpers<'tcx> for ConstPropagator<'_, 'tcx> {
150 type LayoutOfResult = Result<TyAndLayout<'tcx>, LayoutError<'tcx>>;
151
152 #[inline]
153 fn handle_layout_err(&self, err: LayoutError<'tcx>, _: Span, _: Ty<'tcx>) -> LayoutError<'tcx> {
154 err
155 }
156}
157
158impl HasDataLayout for ConstPropagator<'_, '_> {
159 #[inline]
160 fn data_layout(&self) -> &TargetDataLayout {
161 &self.tcx.data_layout
162 }
163}
164
165impl<'tcx> ty::layout::HasTyCtxt<'tcx> for ConstPropagator<'_, 'tcx> {
166 #[inline]
167 fn tcx(&self) -> TyCtxt<'tcx> {
168 self.tcx
169 }
170}
171
172impl<'tcx> ty::layout::HasTypingEnv<'tcx> for ConstPropagator<'_, 'tcx> {
173 #[inline]
174 fn typing_env(&self) -> ty::TypingEnv<'tcx> {
175 self.typing_env
176 }
177}
178
179impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
180 fn new(body: &'mir Body<'tcx>, tcx: TyCtxt<'tcx>) -> ConstPropagator<'mir, 'tcx> {
181 let def_id = body.source.def_id();
182 let typing_env = ty::TypingEnv::post_analysis(tcx, body.source.def_id());
185 let can_const_prop = CanConstProp::check(tcx, typing_env, body);
186 let ecx = InterpCx::new(tcx, tcx.def_span(def_id), typing_env, DummyMachine);
187
188 ConstPropagator {
189 ecx,
190 tcx,
191 typing_env,
192 worklist: vec![START_BLOCK],
193 visited_blocks: DenseBitSet::new_empty(body.basic_blocks.len()),
194 locals: IndexVec::from_elem_n(Value::Uninit, body.local_decls.len()),
195 body,
196 can_const_prop,
197 written_only_inside_own_block_locals: Default::default(),
198 }
199 }
200
201 fn local_decls(&self) -> &'mir LocalDecls<'tcx> {
202 &self.body.local_decls
203 }
204
205 fn get_const(&self, place: Place<'tcx>) -> Option<&Value<'tcx>> {
206 self.locals[place.local].project(&place.projection, self)
207 }
208
209 fn remove_const(&mut self, local: Local) {
212 self.locals[local] = Value::Uninit;
213 self.written_only_inside_own_block_locals.remove(&local);
214 }
215
216 fn access_mut(&mut self, place: &Place<'_>) -> Option<&mut Value<'tcx>> {
217 match self.can_const_prop[place.local] {
218 ConstPropMode::NoPropagation => return None,
219 ConstPropMode::OnlyInsideOwnBlock => {
220 self.written_only_inside_own_block_locals.insert(place.local);
221 }
222 ConstPropMode::FullConstProp => {}
223 }
224 self.locals[place.local].project_mut(place.projection)
225 }
226
227 fn lint_root(&self, source_info: SourceInfo) -> Option<HirId> {
228 source_info.scope.lint_root(&self.body.source_scopes)
229 }
230
231 fn use_ecx<F, T>(&mut self, f: F) -> Option<T>
232 where
233 F: FnOnce(&mut Self) -> InterpResult<'tcx, T>,
234 {
235 f(self)
236 .map_err_info(|err| {
237 trace!("InterpCx operation failed: {:?}", err);
238 assert!(
242 !err.kind().formatted_string(),
243 "known panics lint encountered formatting error: {}",
244 format_interp_error(self.ecx.tcx.dcx(), err),
245 );
246 err
247 })
248 .discard_err()
249 }
250
251 fn eval_constant(&mut self, c: &ConstOperand<'tcx>) -> Option<ImmTy<'tcx>> {
253 if c.has_param() {
255 return None;
256 }
257
258 let val = self.tcx.try_normalize_erasing_regions(self.typing_env, c.const_).ok()?;
265
266 self.use_ecx(|this| this.ecx.eval_mir_constant(&val, c.span, None))?
267 .as_mplace_or_imm()
268 .right()
269 }
270
271 #[instrument(level = "trace", skip(self), ret)]
273 fn eval_place(&mut self, place: Place<'tcx>) -> Option<ImmTy<'tcx>> {
274 match self.get_const(place)? {
275 Value::Immediate(imm) => Some(imm.clone()),
276 Value::Aggregate { .. } => None,
277 Value::Uninit => None,
278 }
279 }
280
281 fn eval_operand(&mut self, op: &Operand<'tcx>) -> Option<ImmTy<'tcx>> {
284 match *op {
285 Operand::RuntimeChecks(_) => None,
286 Operand::Constant(ref c) => self.eval_constant(c),
287 Operand::Move(place) | Operand::Copy(place) => self.eval_place(place),
288 }
289 }
290
291 fn report_assert_as_lint(
292 &self,
293 location: Location,
294 lint_kind: AssertLintKind,
295 assert_kind: AssertKind<impl Debug>,
296 ) {
297 let source_info = self.body.source_info(location);
298 if let Some(lint_root) = self.lint_root(*source_info) {
299 let span = source_info.span;
300 self.tcx.emit_node_span_lint(
301 lint_kind.lint(),
302 lint_root,
303 span,
304 AssertLint { span, assert_kind, lint_kind },
305 );
306 }
307 }
308
309 fn check_unary_op(&mut self, op: UnOp, arg: &Operand<'tcx>, location: Location) -> Option<()> {
310 let arg = self.eval_operand(arg)?;
311 if op == UnOp::Neg && arg.layout.ty.is_integral() {
313 let (arg, overflow) = self.use_ecx(|this| {
315 let arg = this.ecx.read_immediate(&arg)?;
316 let (_res, overflow) = this
317 .ecx
318 .binary_op(BinOp::SubWithOverflow, &ImmTy::from_int(0, arg.layout), &arg)?
319 .to_scalar_pair();
320 interp_ok((arg, overflow.to_bool()?))
321 })?;
322 if overflow {
323 self.report_assert_as_lint(
324 location,
325 AssertLintKind::ArithmeticOverflow,
326 AssertKind::OverflowNeg(arg.to_const_int()),
327 );
328 return None;
329 }
330 }
331
332 Some(())
333 }
334
335 fn check_binary_op(
336 &mut self,
337 op: BinOp,
338 left: &Operand<'tcx>,
339 right: &Operand<'tcx>,
340 location: Location,
341 ) -> Option<()> {
342 let r =
343 self.eval_operand(right).and_then(|r| self.use_ecx(|this| this.ecx.read_immediate(&r)));
344 let l =
345 self.eval_operand(left).and_then(|l| self.use_ecx(|this| this.ecx.read_immediate(&l)));
346 if matches!(op, BinOp::Shr | BinOp::Shl) {
348 let r = r.clone()?;
349 let left_ty = left.ty(self.local_decls(), self.tcx);
352 let left_size = self.ecx.layout_of(left_ty).ok()?.size;
353 let right_size = r.layout.size;
354 let r_bits = r.to_scalar().to_bits(right_size).discard_err();
355 if r_bits.is_some_and(|b| b >= left_size.bits() as u128) {
356 debug!("check_binary_op: reporting assert for {:?}", location);
357 let panic = AssertKind::Overflow(
358 op,
359 ConstInt::new(
361 ScalarInt::try_from_uint(1_u8, left_size).unwrap(),
362 left_ty.is_signed(),
363 left_ty.is_ptr_sized_integral(),
364 ),
365 r.to_const_int(),
366 );
367 self.report_assert_as_lint(location, AssertLintKind::ArithmeticOverflow, panic);
368 return None;
369 }
370 }
371
372 let op = op.wrapping_to_overflowing().unwrap_or(op);
378 if let (Some(l), Some(r)) = (l, r)
380 && l.layout.ty.is_integral()
381 && op.is_overflowing()
382 && self.use_ecx(|this| {
383 let (_res, overflow) = this.ecx.binary_op(op, &l, &r)?.to_scalar_pair();
384 overflow.to_bool()
385 })?
386 {
387 self.report_assert_as_lint(
388 location,
389 AssertLintKind::ArithmeticOverflow,
390 AssertKind::Overflow(op, l.to_const_int(), r.to_const_int()),
391 );
392 return None;
393 }
394
395 Some(())
396 }
397
398 fn check_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) -> Option<()> {
399 match rvalue {
407 Rvalue::UnaryOp(op, arg) => {
412 trace!("checking UnaryOp(op = {:?}, arg = {:?})", op, arg);
413 self.check_unary_op(*op, arg, location)?;
414 }
415 Rvalue::BinaryOp(op, box (left, right)) => {
416 trace!("checking BinaryOp(op = {:?}, left = {:?}, right = {:?})", op, left, right);
417 self.check_binary_op(*op, left, right, location)?;
418 }
419
420 Rvalue::RawPtr(_, place) | Rvalue::Ref(_, _, place) => {
422 trace!("skipping RawPtr | Ref for {:?}", place);
423
424 self.remove_const(place.local);
431
432 return None;
433 }
434 Rvalue::ThreadLocalRef(def_id) => {
435 trace!("skipping ThreadLocalRef({:?})", def_id);
436
437 return None;
438 }
439
440 Rvalue::Aggregate(..)
442 | Rvalue::Use(..)
443 | Rvalue::CopyForDeref(..)
444 | Rvalue::Repeat(..)
445 | Rvalue::Cast(..)
446 | Rvalue::ShallowInitBox(..)
447 | Rvalue::Discriminant(..)
448 | Rvalue::WrapUnsafeBinder(..) => {}
449 }
450
451 if rvalue.has_param() {
453 return None;
454 }
455 if !rvalue.ty(self.local_decls(), self.tcx).is_sized(self.tcx, self.typing_env) {
456 return None;
459 }
460
461 Some(())
462 }
463
464 fn check_assertion(
465 &mut self,
466 expected: bool,
467 msg: &AssertKind<Operand<'tcx>>,
468 cond: &Operand<'tcx>,
469 location: Location,
470 ) {
471 let Some(value) = &self.eval_operand(cond) else { return };
472 trace!("assertion on {:?} should be {:?}", value, expected);
473
474 let expected = Scalar::from_bool(expected);
475 let Some(value_const) = self.use_ecx(|this| this.ecx.read_scalar(value)) else { return };
476
477 if expected != value_const {
478 if let Some(place) = cond.place() {
481 self.remove_const(place.local);
482 }
483
484 enum DbgVal<T> {
485 Val(T),
486 Underscore,
487 }
488 impl<T: std::fmt::Debug> std::fmt::Debug for DbgVal<T> {
489 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
490 match self {
491 Self::Val(val) => val.fmt(fmt),
492 Self::Underscore => fmt.write_str("_"),
493 }
494 }
495 }
496 let mut eval_to_int = |op| {
497 self.eval_operand(op)
500 .and_then(|op| self.ecx.read_immediate(&op).discard_err())
501 .map_or(DbgVal::Underscore, |op| DbgVal::Val(op.to_const_int()))
502 };
503 let msg = match msg {
504 AssertKind::DivisionByZero(op) => AssertKind::DivisionByZero(eval_to_int(op)),
505 AssertKind::RemainderByZero(op) => AssertKind::RemainderByZero(eval_to_int(op)),
506 AssertKind::Overflow(bin_op @ (BinOp::Div | BinOp::Rem), op1, op2) => {
507 AssertKind::Overflow(*bin_op, eval_to_int(op1), eval_to_int(op2))
510 }
511 AssertKind::BoundsCheck { len, index } => {
512 let len = eval_to_int(len);
513 let index = eval_to_int(index);
514 AssertKind::BoundsCheck { len, index }
515 }
516 AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => return,
518 _ => return,
520 };
521 self.report_assert_as_lint(location, AssertLintKind::UnconditionalPanic, msg);
522 }
523 }
524
525 fn ensure_not_propagated(&self, local: Local) {
526 if cfg!(debug_assertions) {
527 let val = self.get_const(local.into());
528 assert!(
529 matches!(val, Some(Value::Uninit))
530 || self
531 .layout_of(self.local_decls()[local].ty)
532 .map_or(true, |layout| layout.is_zst()),
533 "failed to remove values for `{local:?}`, value={val:?}",
534 )
535 }
536 }
537
538 #[instrument(level = "trace", skip(self), ret)]
539 fn eval_rvalue(&mut self, rvalue: &Rvalue<'tcx>, dest: &Place<'tcx>) -> Option<()> {
540 if !dest.projection.is_empty() {
541 return None;
542 }
543 use rustc_middle::mir::Rvalue::*;
544 let layout = self.ecx.layout_of(dest.ty(self.body, self.tcx).ty).ok()?;
545 trace!(?layout);
546
547 let val: Value<'_> = match *rvalue {
548 ThreadLocalRef(_) => return None,
549
550 Use(ref operand) | WrapUnsafeBinder(ref operand, _) => {
551 self.eval_operand(operand)?.into()
552 }
553
554 CopyForDeref(place) => self.eval_place(place)?.into(),
555
556 BinaryOp(bin_op, box (ref left, ref right)) => {
557 let left = self.eval_operand(left)?;
558 let left = self.use_ecx(|this| this.ecx.read_immediate(&left))?;
559
560 let right = self.eval_operand(right)?;
561 let right = self.use_ecx(|this| this.ecx.read_immediate(&right))?;
562
563 let val = self.use_ecx(|this| this.ecx.binary_op(bin_op, &left, &right))?;
564 if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
565 let (val, overflow) = val.to_pair(&self.ecx);
568 Value::Aggregate {
569 variant: VariantIdx::ZERO,
570 fields: [val.into(), overflow.into()].into_iter().collect(),
571 }
572 } else {
573 val.into()
574 }
575 }
576
577 UnaryOp(un_op, ref operand) => {
578 let operand = self.eval_operand(operand)?;
579 let val = self.use_ecx(|this| this.ecx.read_immediate(&operand))?;
580
581 let val = self.use_ecx(|this| this.ecx.unary_op(un_op, &val))?;
582 val.into()
583 }
584
585 Aggregate(ref kind, ref fields) => Value::Aggregate {
586 fields: fields
587 .iter()
588 .map(|field| self.eval_operand(field).map_or(Value::Uninit, Value::Immediate))
589 .collect(),
590 variant: match **kind {
591 AggregateKind::Adt(_, variant, _, _, _) => variant,
592 AggregateKind::Array(_)
593 | AggregateKind::Tuple
594 | AggregateKind::RawPtr(_, _)
595 | AggregateKind::Closure(_, _)
596 | AggregateKind::Coroutine(_, _)
597 | AggregateKind::CoroutineClosure(_, _) => VariantIdx::ZERO,
598 },
599 },
600
601 Repeat(ref op, n) => {
602 trace!(?op, ?n);
603 return None;
604 }
605
606 Ref(..) | RawPtr(..) => return None,
607
608 ShallowInitBox(..) => return None,
609
610 Cast(ref kind, ref value, to) => match kind {
611 CastKind::IntToInt | CastKind::IntToFloat => {
612 let value = self.eval_operand(value)?;
613 let value = self.ecx.read_immediate(&value).discard_err()?;
614 let to = self.ecx.layout_of(to).ok()?;
615 let res = self.ecx.int_to_int_or_float(&value, to).discard_err()?;
616 res.into()
617 }
618 CastKind::FloatToFloat | CastKind::FloatToInt => {
619 let value = self.eval_operand(value)?;
620 let value = self.ecx.read_immediate(&value).discard_err()?;
621 let to = self.ecx.layout_of(to).ok()?;
622 let res = self.ecx.float_to_float_or_int(&value, to).discard_err()?;
623 res.into()
624 }
625 CastKind::Transmute | CastKind::Subtype => {
626 let value = self.eval_operand(value)?;
627 let to = self.ecx.layout_of(to).ok()?;
628 match (value.layout.backend_repr, to.backend_repr) {
631 (BackendRepr::Scalar(..), BackendRepr::Scalar(..)) => {}
632 (BackendRepr::ScalarPair(..), BackendRepr::ScalarPair(..)) => {}
633 _ => return None,
634 }
635
636 value.offset(Size::ZERO, to, &self.ecx).discard_err()?.into()
637 }
638 _ => return None,
639 },
640
641 Discriminant(place) => {
642 let variant = match self.get_const(place)? {
643 Value::Immediate(op) => {
644 let op = op.clone();
645 self.use_ecx(|this| this.ecx.read_discriminant(&op))?
646 }
647 Value::Aggregate { variant, .. } => *variant,
648 Value::Uninit => return None,
649 };
650 let imm = self.use_ecx(|this| {
651 this.ecx.discriminant_for_variant(
652 place.ty(this.local_decls(), this.tcx).ty,
653 variant,
654 )
655 })?;
656 imm.into()
657 }
658 };
659 trace!(?val);
660
661 *self.access_mut(dest)? = val;
662
663 Some(())
664 }
665}
666
667impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
668 fn visit_body(&mut self, body: &Body<'tcx>) {
669 while let Some(bb) = self.worklist.pop() {
670 if !self.visited_blocks.insert(bb) {
671 continue;
672 }
673
674 let data = &body.basic_blocks[bb];
675 self.visit_basic_block_data(bb, data);
676 }
677 }
678
679 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
680 self.super_operand(operand, location);
681 }
682
683 fn visit_const_operand(&mut self, constant: &ConstOperand<'tcx>, location: Location) {
684 trace!("visit_const_operand: {:?}", constant);
685 self.super_const_operand(constant, location);
686 self.eval_constant(constant);
687 }
688
689 fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
690 self.super_assign(place, rvalue, location);
691
692 let Some(()) = self.check_rvalue(rvalue, location) else { return };
693
694 match self.can_const_prop[place.local] {
695 _ if place.is_indirect() => {}
697 ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local),
698 ConstPropMode::OnlyInsideOwnBlock | ConstPropMode::FullConstProp => {
699 if self.eval_rvalue(rvalue, place).is_none() {
700 trace!(
711 "propagation into {:?} failed.
712 Nuking the entire site from orbit, it's the only way to be sure",
713 place,
714 );
715 self.remove_const(place.local);
716 }
717 }
718 }
719 }
720
721 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
722 trace!("visit_statement: {:?}", statement);
723
724 self.super_statement(statement, location);
727
728 match statement.kind {
729 StatementKind::SetDiscriminant { ref place, variant_index } => {
730 match self.can_const_prop[place.local] {
731 _ if place.is_indirect() => {}
733 ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local),
734 ConstPropMode::FullConstProp | ConstPropMode::OnlyInsideOwnBlock => {
735 match self.access_mut(place) {
736 Some(Value::Aggregate { variant, .. }) => *variant = variant_index,
737 _ => self.remove_const(place.local),
738 }
739 }
740 }
741 }
742 StatementKind::StorageLive(local) => {
743 self.remove_const(local);
744 }
745 StatementKind::StorageDead(local) => {
746 self.remove_const(local);
747 }
748 _ => {}
749 }
750 }
751
752 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
753 self.super_terminator(terminator, location);
754 match &terminator.kind {
755 TerminatorKind::Assert { expected, msg, cond, .. } => {
756 self.check_assertion(*expected, msg, cond, location);
757 }
758 TerminatorKind::SwitchInt { discr, targets } => {
759 if let Some(ref value) = self.eval_operand(discr)
760 && let Some(value_const) = self.use_ecx(|this| this.ecx.read_scalar(value))
761 && let Some(constant) = value_const.to_bits(value_const.size()).discard_err()
762 {
763 let target = targets.target_for_value(constant);
766 self.worklist.push(target);
767 return;
768 }
769 }
771 TerminatorKind::Goto { .. }
773 | TerminatorKind::UnwindResume
774 | TerminatorKind::UnwindTerminate(_)
775 | TerminatorKind::Return
776 | TerminatorKind::TailCall { .. }
777 | TerminatorKind::Unreachable
778 | TerminatorKind::Drop { .. }
779 | TerminatorKind::Yield { .. }
780 | TerminatorKind::CoroutineDrop
781 | TerminatorKind::FalseEdge { .. }
782 | TerminatorKind::FalseUnwind { .. }
783 | TerminatorKind::Call { .. }
784 | TerminatorKind::InlineAsm { .. } => {}
785 }
786
787 self.worklist.extend(terminator.successors());
788 }
789
790 fn visit_basic_block_data(&mut self, block: BasicBlock, data: &BasicBlockData<'tcx>) {
791 self.super_basic_block_data(block, data);
792
793 let mut written_only_inside_own_block_locals =
797 std::mem::take(&mut self.written_only_inside_own_block_locals);
798
799 #[allow(rustc::potential_query_instability)]
804 for local in written_only_inside_own_block_locals.drain() {
805 debug_assert_eq!(self.can_const_prop[local], ConstPropMode::OnlyInsideOwnBlock);
806 self.remove_const(local);
807 }
808 self.written_only_inside_own_block_locals = written_only_inside_own_block_locals;
809
810 if cfg!(debug_assertions) {
811 for (local, &mode) in self.can_const_prop.iter_enumerated() {
812 match mode {
813 ConstPropMode::FullConstProp => {}
814 ConstPropMode::NoPropagation | ConstPropMode::OnlyInsideOwnBlock => {
815 self.ensure_not_propagated(local);
816 }
817 }
818 }
819 }
820 }
821}
822
823const MAX_ALLOC_LIMIT: u64 = 1024;
827
828#[derive(Clone, Copy, Debug, PartialEq)]
830enum ConstPropMode {
831 FullConstProp,
833 OnlyInsideOwnBlock,
835 NoPropagation,
838}
839
840struct CanConstProp {
843 can_const_prop: IndexVec<Local, ConstPropMode>,
844 found_assignment: DenseBitSet<Local>,
846}
847
848impl CanConstProp {
849 fn check<'tcx>(
851 tcx: TyCtxt<'tcx>,
852 typing_env: ty::TypingEnv<'tcx>,
853 body: &Body<'tcx>,
854 ) -> IndexVec<Local, ConstPropMode> {
855 let mut cpv = CanConstProp {
856 can_const_prop: IndexVec::from_elem(ConstPropMode::FullConstProp, &body.local_decls),
857 found_assignment: DenseBitSet::new_empty(body.local_decls.len()),
858 };
859 for (local, val) in cpv.can_const_prop.iter_enumerated_mut() {
860 let ty = body.local_decls[local].ty;
861 if ty.is_async_drop_in_place_coroutine(tcx) {
862 *val = ConstPropMode::NoPropagation;
867 continue;
868 } else if ty.is_union() {
869 *val = ConstPropMode::NoPropagation;
873 } else {
874 match tcx.layout_of(typing_env.as_query_input(ty)) {
875 Ok(layout) if layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) => {}
876 _ => {
879 *val = ConstPropMode::NoPropagation;
880 continue;
881 }
882 }
883 }
884 }
885 for arg in body.args_iter() {
887 cpv.found_assignment.insert(arg);
888 }
889 cpv.visit_body(body);
890 cpv.can_const_prop
891 }
892}
893
894impl<'tcx> Visitor<'tcx> for CanConstProp {
895 fn visit_place(&mut self, place: &Place<'tcx>, mut context: PlaceContext, loc: Location) {
896 use rustc_middle::mir::visit::PlaceContext::*;
897
898 if place.projection.first() == Some(&PlaceElem::Deref) {
900 context = NonMutatingUse(NonMutatingUseContext::Copy);
901 }
902
903 self.visit_local(place.local, context, loc);
904 self.visit_projection(place.as_ref(), context, loc);
905 }
906
907 fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) {
908 use rustc_middle::mir::visit::PlaceContext::*;
909 match context {
910 | MutatingUse(MutatingUseContext::Call)
913 | MutatingUse(MutatingUseContext::AsmOutput)
914 | MutatingUse(MutatingUseContext::Store)
916 | MutatingUse(MutatingUseContext::SetDiscriminant) => {
917 if !self.found_assignment.insert(local) {
918 match &mut self.can_const_prop[local] {
919 ConstPropMode::OnlyInsideOwnBlock => {}
924 ConstPropMode::NoPropagation => {}
925 other @ ConstPropMode::FullConstProp => {
926 trace!(
927 "local {:?} can't be propagated because of multiple assignments. Previous state: {:?}",
928 local, other,
929 );
930 *other = ConstPropMode::OnlyInsideOwnBlock;
931 }
932 }
933 }
934 }
935 NonMutatingUse(NonMutatingUseContext::Copy)
937 | NonMutatingUse(NonMutatingUseContext::Move)
938 | NonMutatingUse(NonMutatingUseContext::Inspect)
939 | NonMutatingUse(NonMutatingUseContext::PlaceMention)
940 | NonUse(_) => {}
941
942 MutatingUse(MutatingUseContext::Yield)
945 | MutatingUse(MutatingUseContext::Drop)
946 | MutatingUse(MutatingUseContext::Retag)
947 | NonMutatingUse(NonMutatingUseContext::SharedBorrow)
950 | NonMutatingUse(NonMutatingUseContext::FakeBorrow)
951 | NonMutatingUse(NonMutatingUseContext::RawBorrow)
952 | MutatingUse(MutatingUseContext::Borrow)
953 | MutatingUse(MutatingUseContext::RawBorrow) => {
954 trace!("local {:?} can't be propagated because it's used: {:?}", local, context);
955 self.can_const_prop[local] = ConstPropMode::NoPropagation;
956 }
957 MutatingUse(MutatingUseContext::Projection)
958 | NonMutatingUse(NonMutatingUseContext::Projection) => bug!("visit_place should not pass {context:?} for {local:?}"),
959 }
960 }
961}