rustc_mir_transform/
known_panics_lint.rs

1//! A lint that checks for known panics like overflows, division by zero,
2//! out-of-bound access etc. Uses const propagation to determine the values of
3//! operands during checks.
4
5use std::fmt::Debug;
6
7use rustc_abi::{BackendRepr, FieldIdx, HasDataLayout, Size, TargetDataLayout, VariantIdx};
8use rustc_const_eval::const_eval::DummyMachine;
9use rustc_const_eval::interpret::{
10    ImmTy, InterpCx, InterpResult, Projectable, Scalar, format_interp_error, interp_ok,
11};
12use rustc_data_structures::fx::FxHashSet;
13use rustc_hir::HirId;
14use rustc_hir::def::DefKind;
15use rustc_index::IndexVec;
16use rustc_index::bit_set::DenseBitSet;
17use rustc_middle::bug;
18use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
19use rustc_middle::mir::*;
20use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout};
21use rustc_middle::ty::{self, ConstInt, ScalarInt, Ty, TyCtxt, TypeVisitableExt};
22use rustc_span::Span;
23use tracing::{debug, instrument, trace};
24
25use crate::errors::{AssertLint, AssertLintKind};
26
27pub(super) struct KnownPanicsLint;
28
29impl<'tcx> crate::MirLint<'tcx> for KnownPanicsLint {
30    fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
31        if body.tainted_by_errors.is_some() {
32            return;
33        }
34
35        let def_id = body.source.def_id().expect_local();
36        let def_kind = tcx.def_kind(def_id);
37        let is_fn_like = def_kind.is_fn_like();
38        let is_assoc_const = def_kind == DefKind::AssocConst;
39
40        // Only run const prop on functions, methods, closures and associated constants
41        if !is_fn_like && !is_assoc_const {
42            // skip anon_const/statics/consts because they'll be evaluated by miri anyway
43            trace!("KnownPanicsLint skipped for {:?}", def_id);
44            return;
45        }
46
47        // FIXME(welseywiser) const prop doesn't work on coroutines because of query cycles
48        // computing their layout.
49        if tcx.is_coroutine(def_id.to_def_id()) {
50            trace!("KnownPanicsLint skipped for coroutine {:?}", def_id);
51            return;
52        }
53
54        trace!("KnownPanicsLint starting for {:?}", def_id);
55
56        let mut linter = ConstPropagator::new(body, tcx);
57        linter.visit_body(body);
58
59        trace!("KnownPanicsLint done for {:?}", def_id);
60    }
61}
62
63/// Visits MIR nodes, performs const propagation
64/// and runs lint checks as it goes
65struct ConstPropagator<'mir, 'tcx> {
66    ecx: InterpCx<'tcx, DummyMachine>,
67    tcx: TyCtxt<'tcx>,
68    typing_env: ty::TypingEnv<'tcx>,
69    worklist: Vec<BasicBlock>,
70    visited_blocks: DenseBitSet<BasicBlock>,
71    locals: IndexVec<Local, Value<'tcx>>,
72    body: &'mir Body<'tcx>,
73    written_only_inside_own_block_locals: FxHashSet<Local>,
74    can_const_prop: IndexVec<Local, ConstPropMode>,
75}
76
77#[derive(Debug, Clone)]
78enum Value<'tcx> {
79    Immediate(ImmTy<'tcx>),
80    Aggregate { variant: VariantIdx, fields: IndexVec<FieldIdx, Value<'tcx>> },
81    Uninit,
82}
83
84impl<'tcx> From<ImmTy<'tcx>> for Value<'tcx> {
85    fn from(v: ImmTy<'tcx>) -> Self {
86        Self::Immediate(v)
87    }
88}
89
90impl<'tcx> Value<'tcx> {
91    fn project(
92        &self,
93        proj: &[PlaceElem<'tcx>],
94        prop: &ConstPropagator<'_, 'tcx>,
95    ) -> Option<&Value<'tcx>> {
96        let mut this = self;
97        for proj in proj {
98            this = match (*proj, this) {
99                (PlaceElem::Field(idx, _), Value::Aggregate { fields, .. }) => {
100                    fields.get(idx).unwrap_or(&Value::Uninit)
101                }
102                (PlaceElem::Index(idx), Value::Aggregate { fields, .. }) => {
103                    let idx = prop.get_const(idx.into())?.immediate()?;
104                    let idx = prop.ecx.read_target_usize(idx).discard_err()?.try_into().ok()?;
105                    if idx <= FieldIdx::MAX_AS_U32 {
106                        fields.get(FieldIdx::from_u32(idx)).unwrap_or(&Value::Uninit)
107                    } else {
108                        return None;
109                    }
110                }
111                (
112                    PlaceElem::ConstantIndex { offset, min_length: _, from_end: false },
113                    Value::Aggregate { fields, .. },
114                ) => fields
115                    .get(FieldIdx::from_u32(offset.try_into().ok()?))
116                    .unwrap_or(&Value::Uninit),
117                _ => return None,
118            };
119        }
120        Some(this)
121    }
122
123    fn project_mut(&mut self, proj: &[PlaceElem<'_>]) -> Option<&mut Value<'tcx>> {
124        let mut this = self;
125        for proj in proj {
126            this = match (proj, this) {
127                (PlaceElem::Field(idx, _), Value::Aggregate { fields, .. }) => {
128                    fields.ensure_contains_elem(*idx, || Value::Uninit)
129                }
130                (PlaceElem::Field(..), val @ Value::Uninit) => {
131                    *val =
132                        Value::Aggregate { variant: VariantIdx::ZERO, fields: Default::default() };
133                    val.project_mut(&[*proj])?
134                }
135                _ => return None,
136            };
137        }
138        Some(this)
139    }
140
141    fn immediate(&self) -> Option<&ImmTy<'tcx>> {
142        match self {
143            Value::Immediate(op) => Some(op),
144            _ => None,
145        }
146    }
147}
148
149impl<'tcx> LayoutOfHelpers<'tcx> for ConstPropagator<'_, 'tcx> {
150    type LayoutOfResult = Result<TyAndLayout<'tcx>, LayoutError<'tcx>>;
151
152    #[inline]
153    fn handle_layout_err(&self, err: LayoutError<'tcx>, _: Span, _: Ty<'tcx>) -> LayoutError<'tcx> {
154        err
155    }
156}
157
158impl HasDataLayout for ConstPropagator<'_, '_> {
159    #[inline]
160    fn data_layout(&self) -> &TargetDataLayout {
161        &self.tcx.data_layout
162    }
163}
164
165impl<'tcx> ty::layout::HasTyCtxt<'tcx> for ConstPropagator<'_, 'tcx> {
166    #[inline]
167    fn tcx(&self) -> TyCtxt<'tcx> {
168        self.tcx
169    }
170}
171
172impl<'tcx> ty::layout::HasTypingEnv<'tcx> for ConstPropagator<'_, 'tcx> {
173    #[inline]
174    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
175        self.typing_env
176    }
177}
178
179impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
180    fn new(body: &'mir Body<'tcx>, tcx: TyCtxt<'tcx>) -> ConstPropagator<'mir, 'tcx> {
181        let def_id = body.source.def_id();
182        // FIXME(#132279): This is used during the phase transition from analysis
183        // to runtime, so we have to manually specify the correct typing mode.
184        let typing_env = ty::TypingEnv::post_analysis(tcx, body.source.def_id());
185        let can_const_prop = CanConstProp::check(tcx, typing_env, body);
186        let ecx = InterpCx::new(tcx, tcx.def_span(def_id), typing_env, DummyMachine);
187
188        ConstPropagator {
189            ecx,
190            tcx,
191            typing_env,
192            worklist: vec![START_BLOCK],
193            visited_blocks: DenseBitSet::new_empty(body.basic_blocks.len()),
194            locals: IndexVec::from_elem_n(Value::Uninit, body.local_decls.len()),
195            body,
196            can_const_prop,
197            written_only_inside_own_block_locals: Default::default(),
198        }
199    }
200
201    fn local_decls(&self) -> &'mir LocalDecls<'tcx> {
202        &self.body.local_decls
203    }
204
205    fn get_const(&self, place: Place<'tcx>) -> Option<&Value<'tcx>> {
206        self.locals[place.local].project(&place.projection, self)
207    }
208
209    /// Remove `local` from the pool of `Locals`. Allows writing to them,
210    /// but not reading from them anymore.
211    fn remove_const(&mut self, local: Local) {
212        self.locals[local] = Value::Uninit;
213        self.written_only_inside_own_block_locals.remove(&local);
214    }
215
216    fn access_mut(&mut self, place: &Place<'_>) -> Option<&mut Value<'tcx>> {
217        match self.can_const_prop[place.local] {
218            ConstPropMode::NoPropagation => return None,
219            ConstPropMode::OnlyInsideOwnBlock => {
220                self.written_only_inside_own_block_locals.insert(place.local);
221            }
222            ConstPropMode::FullConstProp => {}
223        }
224        self.locals[place.local].project_mut(place.projection)
225    }
226
227    fn lint_root(&self, source_info: SourceInfo) -> Option<HirId> {
228        source_info.scope.lint_root(&self.body.source_scopes)
229    }
230
231    fn use_ecx<F, T>(&mut self, f: F) -> Option<T>
232    where
233        F: FnOnce(&mut Self) -> InterpResult<'tcx, T>,
234    {
235        f(self)
236            .map_err_info(|err| {
237                trace!("InterpCx operation failed: {:?}", err);
238                // Some errors shouldn't come up because creating them causes
239                // an allocation, which we should avoid. When that happens,
240                // dedicated error variants should be introduced instead.
241                assert!(
242                    !err.kind().formatted_string(),
243                    "known panics lint encountered formatting error: {}",
244                    format_interp_error(self.ecx.tcx.dcx(), err),
245                );
246                err
247            })
248            .discard_err()
249    }
250
251    /// Returns the value, if any, of evaluating `c`.
252    fn eval_constant(&mut self, c: &ConstOperand<'tcx>) -> Option<ImmTy<'tcx>> {
253        // FIXME we need to revisit this for #67176
254        if c.has_param() {
255            return None;
256        }
257
258        // Normalization needed b/c known panics lint runs in
259        // `mir_drops_elaborated_and_const_checked`, which happens before
260        // optimized MIR. Only after optimizing the MIR can we guarantee
261        // that the `PostAnalysisNormalize` pass has happened and that the body's consts
262        // are normalized, so any call to resolve before that needs to be
263        // manually normalized.
264        let val = self.tcx.try_normalize_erasing_regions(self.typing_env, c.const_).ok()?;
265
266        self.use_ecx(|this| this.ecx.eval_mir_constant(&val, c.span, None))?
267            .as_mplace_or_imm()
268            .right()
269    }
270
271    /// Returns the value, if any, of evaluating `place`.
272    #[instrument(level = "trace", skip(self), ret)]
273    fn eval_place(&mut self, place: Place<'tcx>) -> Option<ImmTy<'tcx>> {
274        match self.get_const(place)? {
275            Value::Immediate(imm) => Some(imm.clone()),
276            Value::Aggregate { .. } => None,
277            Value::Uninit => None,
278        }
279    }
280
281    /// Returns the value, if any, of evaluating `op`. Calls upon `eval_constant`
282    /// or `eval_place`, depending on the variant of `Operand` used.
283    fn eval_operand(&mut self, op: &Operand<'tcx>) -> Option<ImmTy<'tcx>> {
284        match *op {
285            Operand::Constant(ref c) => self.eval_constant(c),
286            Operand::Move(place) | Operand::Copy(place) => self.eval_place(place),
287        }
288    }
289
290    fn report_assert_as_lint(
291        &self,
292        location: Location,
293        lint_kind: AssertLintKind,
294        assert_kind: AssertKind<impl Debug>,
295    ) {
296        let source_info = self.body.source_info(location);
297        if let Some(lint_root) = self.lint_root(*source_info) {
298            let span = source_info.span;
299            self.tcx.emit_node_span_lint(
300                lint_kind.lint(),
301                lint_root,
302                span,
303                AssertLint { span, assert_kind, lint_kind },
304            );
305        }
306    }
307
308    fn check_unary_op(&mut self, op: UnOp, arg: &Operand<'tcx>, location: Location) -> Option<()> {
309        let arg = self.eval_operand(arg)?;
310        // The only operator that can overflow is `Neg`.
311        if op == UnOp::Neg && arg.layout.ty.is_integral() {
312            // Compute this as `0 - arg` so we can use `SubWithOverflow` to check for overflow.
313            let (arg, overflow) = self.use_ecx(|this| {
314                let arg = this.ecx.read_immediate(&arg)?;
315                let (_res, overflow) = this
316                    .ecx
317                    .binary_op(BinOp::SubWithOverflow, &ImmTy::from_int(0, arg.layout), &arg)?
318                    .to_scalar_pair();
319                interp_ok((arg, overflow.to_bool()?))
320            })?;
321            if overflow {
322                self.report_assert_as_lint(
323                    location,
324                    AssertLintKind::ArithmeticOverflow,
325                    AssertKind::OverflowNeg(arg.to_const_int()),
326                );
327                return None;
328            }
329        }
330
331        Some(())
332    }
333
334    fn check_binary_op(
335        &mut self,
336        op: BinOp,
337        left: &Operand<'tcx>,
338        right: &Operand<'tcx>,
339        location: Location,
340    ) -> Option<()> {
341        let r =
342            self.eval_operand(right).and_then(|r| self.use_ecx(|this| this.ecx.read_immediate(&r)));
343        let l =
344            self.eval_operand(left).and_then(|l| self.use_ecx(|this| this.ecx.read_immediate(&l)));
345        // Check for exceeding shifts *even if* we cannot evaluate the LHS.
346        if matches!(op, BinOp::Shr | BinOp::Shl) {
347            let r = r.clone()?;
348            // We need the type of the LHS. We cannot use `place_layout` as that is the type
349            // of the result, which for checked binops is not the same!
350            let left_ty = left.ty(self.local_decls(), self.tcx);
351            let left_size = self.ecx.layout_of(left_ty).ok()?.size;
352            let right_size = r.layout.size;
353            let r_bits = r.to_scalar().to_bits(right_size).discard_err();
354            if r_bits.is_some_and(|b| b >= left_size.bits() as u128) {
355                debug!("check_binary_op: reporting assert for {:?}", location);
356                let panic = AssertKind::Overflow(
357                    op,
358                    // Invent a dummy value, the diagnostic ignores it anyway
359                    ConstInt::new(
360                        ScalarInt::try_from_uint(1_u8, left_size).unwrap(),
361                        left_ty.is_signed(),
362                        left_ty.is_ptr_sized_integral(),
363                    ),
364                    r.to_const_int(),
365                );
366                self.report_assert_as_lint(location, AssertLintKind::ArithmeticOverflow, panic);
367                return None;
368            }
369        }
370
371        // Div/Rem are handled via the assertions they trigger.
372        // But for Add/Sub/Mul, those assertions only exist in debug builds, and we want to
373        // lint in release builds as well, so we check on the operation instead.
374        // So normalize to the "overflowing" operator, and then ensure that it
375        // actually is an overflowing operator.
376        let op = op.wrapping_to_overflowing().unwrap_or(op);
377        // The remaining operators are handled through `wrapping_to_overflowing`.
378        if let (Some(l), Some(r)) = (l, r)
379            && l.layout.ty.is_integral()
380            && op.is_overflowing()
381            && self.use_ecx(|this| {
382                let (_res, overflow) = this.ecx.binary_op(op, &l, &r)?.to_scalar_pair();
383                overflow.to_bool()
384            })?
385        {
386            self.report_assert_as_lint(
387                location,
388                AssertLintKind::ArithmeticOverflow,
389                AssertKind::Overflow(op, l.to_const_int(), r.to_const_int()),
390            );
391            return None;
392        }
393
394        Some(())
395    }
396
397    fn check_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) -> Option<()> {
398        // Perform any special handling for specific Rvalue types.
399        // Generally, checks here fall into one of two categories:
400        //   1. Additional checking to provide useful lints to the user
401        //        - In this case, we will do some validation and then fall through to the
402        //          end of the function which evals the assignment.
403        //   2. Working around bugs in other parts of the compiler
404        //        - In this case, we'll return `None` from this function to stop evaluation.
405        match rvalue {
406            // Additional checking: give lints to the user if an overflow would occur.
407            // We do this here and not in the `Assert` terminator as that terminator is
408            // only sometimes emitted (overflow checks can be disabled), but we want to always
409            // lint.
410            Rvalue::UnaryOp(op, arg) => {
411                trace!("checking UnaryOp(op = {:?}, arg = {:?})", op, arg);
412                self.check_unary_op(*op, arg, location)?;
413            }
414            Rvalue::BinaryOp(op, box (left, right)) => {
415                trace!("checking BinaryOp(op = {:?}, left = {:?}, right = {:?})", op, left, right);
416                self.check_binary_op(*op, left, right, location)?;
417            }
418
419            // Do not try creating references (#67862)
420            Rvalue::RawPtr(_, place) | Rvalue::Ref(_, _, place) => {
421                trace!("skipping RawPtr | Ref for {:?}", place);
422
423                // This may be creating mutable references or immutable references to cells.
424                // If that happens, the pointed to value could be mutated via that reference.
425                // Since we aren't tracking references, the const propagator loses track of what
426                // value the local has right now.
427                // Thus, all locals that have their reference taken
428                // must not take part in propagation.
429                self.remove_const(place.local);
430
431                return None;
432            }
433            Rvalue::ThreadLocalRef(def_id) => {
434                trace!("skipping ThreadLocalRef({:?})", def_id);
435
436                return None;
437            }
438
439            // There's no other checking to do at this time.
440            Rvalue::Aggregate(..)
441            | Rvalue::Use(..)
442            | Rvalue::CopyForDeref(..)
443            | Rvalue::Repeat(..)
444            | Rvalue::Len(..)
445            | Rvalue::Cast(..)
446            | Rvalue::ShallowInitBox(..)
447            | Rvalue::Discriminant(..)
448            | Rvalue::NullaryOp(..)
449            | Rvalue::WrapUnsafeBinder(..) => {}
450        }
451
452        // FIXME we need to revisit this for #67176
453        if rvalue.has_param() {
454            return None;
455        }
456        if !rvalue.ty(self.local_decls(), self.tcx).is_sized(self.tcx, self.typing_env) {
457            // the interpreter doesn't support unsized locals (only unsized arguments),
458            // but rustc does (in a kinda broken way), so we have to skip them here
459            return None;
460        }
461
462        Some(())
463    }
464
465    fn check_assertion(
466        &mut self,
467        expected: bool,
468        msg: &AssertKind<Operand<'tcx>>,
469        cond: &Operand<'tcx>,
470        location: Location,
471    ) {
472        let Some(value) = &self.eval_operand(cond) else { return };
473        trace!("assertion on {:?} should be {:?}", value, expected);
474
475        let expected = Scalar::from_bool(expected);
476        let Some(value_const) = self.use_ecx(|this| this.ecx.read_scalar(value)) else { return };
477
478        if expected != value_const {
479            // Poison all places this operand references so that further code
480            // doesn't use the invalid value
481            if let Some(place) = cond.place() {
482                self.remove_const(place.local);
483            }
484
485            enum DbgVal<T> {
486                Val(T),
487                Underscore,
488            }
489            impl<T: std::fmt::Debug> std::fmt::Debug for DbgVal<T> {
490                fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
491                    match self {
492                        Self::Val(val) => val.fmt(fmt),
493                        Self::Underscore => fmt.write_str("_"),
494                    }
495                }
496            }
497            let mut eval_to_int = |op| {
498                // This can be `None` if the lhs wasn't const propagated and we just
499                // triggered the assert on the value of the rhs.
500                self.eval_operand(op)
501                    .and_then(|op| self.ecx.read_immediate(&op).discard_err())
502                    .map_or(DbgVal::Underscore, |op| DbgVal::Val(op.to_const_int()))
503            };
504            let msg = match msg {
505                AssertKind::DivisionByZero(op) => AssertKind::DivisionByZero(eval_to_int(op)),
506                AssertKind::RemainderByZero(op) => AssertKind::RemainderByZero(eval_to_int(op)),
507                AssertKind::Overflow(bin_op @ (BinOp::Div | BinOp::Rem), op1, op2) => {
508                    // Division overflow is *UB* in the MIR, and different than the
509                    // other overflow checks.
510                    AssertKind::Overflow(*bin_op, eval_to_int(op1), eval_to_int(op2))
511                }
512                AssertKind::BoundsCheck { ref len, ref index } => {
513                    let len = eval_to_int(len);
514                    let index = eval_to_int(index);
515                    AssertKind::BoundsCheck { len, index }
516                }
517                // Remaining overflow errors are already covered by checks on the binary operators.
518                AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => return,
519                // Need proper const propagator for these.
520                _ => return,
521            };
522            self.report_assert_as_lint(location, AssertLintKind::UnconditionalPanic, msg);
523        }
524    }
525
526    fn ensure_not_propagated(&self, local: Local) {
527        if cfg!(debug_assertions) {
528            let val = self.get_const(local.into());
529            assert!(
530                matches!(val, Some(Value::Uninit))
531                    || self
532                        .layout_of(self.local_decls()[local].ty)
533                        .map_or(true, |layout| layout.is_zst()),
534                "failed to remove values for `{local:?}`, value={val:?}",
535            )
536        }
537    }
538
539    #[instrument(level = "trace", skip(self), ret)]
540    fn eval_rvalue(&mut self, rvalue: &Rvalue<'tcx>, dest: &Place<'tcx>) -> Option<()> {
541        if !dest.projection.is_empty() {
542            return None;
543        }
544        use rustc_middle::mir::Rvalue::*;
545        let layout = self.ecx.layout_of(dest.ty(self.body, self.tcx).ty).ok()?;
546        trace!(?layout);
547
548        let val: Value<'_> = match *rvalue {
549            ThreadLocalRef(_) => return None,
550
551            Use(ref operand) | WrapUnsafeBinder(ref operand, _) => {
552                self.eval_operand(operand)?.into()
553            }
554
555            CopyForDeref(place) => self.eval_place(place)?.into(),
556
557            BinaryOp(bin_op, box (ref left, ref right)) => {
558                let left = self.eval_operand(left)?;
559                let left = self.use_ecx(|this| this.ecx.read_immediate(&left))?;
560
561                let right = self.eval_operand(right)?;
562                let right = self.use_ecx(|this| this.ecx.read_immediate(&right))?;
563
564                let val = self.use_ecx(|this| this.ecx.binary_op(bin_op, &left, &right))?;
565                if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
566                    // FIXME `Value` should properly support pairs in `Immediate`... but currently
567                    // it does not.
568                    let (val, overflow) = val.to_pair(&self.ecx);
569                    Value::Aggregate {
570                        variant: VariantIdx::ZERO,
571                        fields: [val.into(), overflow.into()].into_iter().collect(),
572                    }
573                } else {
574                    val.into()
575                }
576            }
577
578            UnaryOp(un_op, ref operand) => {
579                let operand = self.eval_operand(operand)?;
580                let val = self.use_ecx(|this| this.ecx.read_immediate(&operand))?;
581
582                let val = self.use_ecx(|this| this.ecx.unary_op(un_op, &val))?;
583                val.into()
584            }
585
586            Aggregate(ref kind, ref fields) => Value::Aggregate {
587                fields: fields
588                    .iter()
589                    .map(|field| self.eval_operand(field).map_or(Value::Uninit, Value::Immediate))
590                    .collect(),
591                variant: match **kind {
592                    AggregateKind::Adt(_, variant, _, _, _) => variant,
593                    AggregateKind::Array(_)
594                    | AggregateKind::Tuple
595                    | AggregateKind::RawPtr(_, _)
596                    | AggregateKind::Closure(_, _)
597                    | AggregateKind::Coroutine(_, _)
598                    | AggregateKind::CoroutineClosure(_, _) => VariantIdx::ZERO,
599                },
600            },
601
602            Repeat(ref op, n) => {
603                trace!(?op, ?n);
604                return None;
605            }
606
607            Len(place) => {
608                let len = if let ty::Array(_, n) = place.ty(self.local_decls(), self.tcx).ty.kind()
609                {
610                    n.try_to_target_usize(self.tcx)?
611                } else {
612                    match self.get_const(place)? {
613                        Value::Immediate(src) => src.len(&self.ecx).discard_err()?,
614                        Value::Aggregate { fields, .. } => fields.len() as u64,
615                        Value::Uninit => return None,
616                    }
617                };
618                ImmTy::from_scalar(Scalar::from_target_usize(len, self), layout).into()
619            }
620
621            Ref(..) | RawPtr(..) => return None,
622
623            NullaryOp(ref null_op, ty) => {
624                let op_layout = self.ecx.layout_of(ty).ok()?;
625                let val = match null_op {
626                    NullOp::SizeOf => op_layout.size.bytes(),
627                    NullOp::AlignOf => op_layout.align.abi.bytes(),
628                    NullOp::OffsetOf(fields) => self
629                        .tcx
630                        .offset_of_subfield(self.typing_env, op_layout, fields.iter())
631                        .bytes(),
632                    NullOp::UbChecks => return None,
633                    NullOp::ContractChecks => return None,
634                };
635                ImmTy::from_scalar(Scalar::from_target_usize(val, self), layout).into()
636            }
637
638            ShallowInitBox(..) => return None,
639
640            Cast(ref kind, ref value, to) => match kind {
641                CastKind::IntToInt | CastKind::IntToFloat => {
642                    let value = self.eval_operand(value)?;
643                    let value = self.ecx.read_immediate(&value).discard_err()?;
644                    let to = self.ecx.layout_of(to).ok()?;
645                    let res = self.ecx.int_to_int_or_float(&value, to).discard_err()?;
646                    res.into()
647                }
648                CastKind::FloatToFloat | CastKind::FloatToInt => {
649                    let value = self.eval_operand(value)?;
650                    let value = self.ecx.read_immediate(&value).discard_err()?;
651                    let to = self.ecx.layout_of(to).ok()?;
652                    let res = self.ecx.float_to_float_or_int(&value, to).discard_err()?;
653                    res.into()
654                }
655                CastKind::Transmute => {
656                    let value = self.eval_operand(value)?;
657                    let to = self.ecx.layout_of(to).ok()?;
658                    // `offset` for immediates only supports scalar/scalar-pair ABIs,
659                    // so bail out if the target is not one.
660                    match (value.layout.backend_repr, to.backend_repr) {
661                        (BackendRepr::Scalar(..), BackendRepr::Scalar(..)) => {}
662                        (BackendRepr::ScalarPair(..), BackendRepr::ScalarPair(..)) => {}
663                        _ => return None,
664                    }
665
666                    value.offset(Size::ZERO, to, &self.ecx).discard_err()?.into()
667                }
668                _ => return None,
669            },
670
671            Discriminant(place) => {
672                let variant = match self.get_const(place)? {
673                    Value::Immediate(op) => {
674                        let op = op.clone();
675                        self.use_ecx(|this| this.ecx.read_discriminant(&op))?
676                    }
677                    Value::Aggregate { variant, .. } => *variant,
678                    Value::Uninit => return None,
679                };
680                let imm = self.use_ecx(|this| {
681                    this.ecx.discriminant_for_variant(
682                        place.ty(this.local_decls(), this.tcx).ty,
683                        variant,
684                    )
685                })?;
686                imm.into()
687            }
688        };
689        trace!(?val);
690
691        *self.access_mut(dest)? = val;
692
693        Some(())
694    }
695}
696
697impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
698    fn visit_body(&mut self, body: &Body<'tcx>) {
699        while let Some(bb) = self.worklist.pop() {
700            if !self.visited_blocks.insert(bb) {
701                continue;
702            }
703
704            let data = &body.basic_blocks[bb];
705            self.visit_basic_block_data(bb, data);
706        }
707    }
708
709    fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
710        self.super_operand(operand, location);
711    }
712
713    fn visit_const_operand(&mut self, constant: &ConstOperand<'tcx>, location: Location) {
714        trace!("visit_const_operand: {:?}", constant);
715        self.super_const_operand(constant, location);
716        self.eval_constant(constant);
717    }
718
719    fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
720        self.super_assign(place, rvalue, location);
721
722        let Some(()) = self.check_rvalue(rvalue, location) else { return };
723
724        match self.can_const_prop[place.local] {
725            // Do nothing if the place is indirect.
726            _ if place.is_indirect() => {}
727            ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local),
728            ConstPropMode::OnlyInsideOwnBlock | ConstPropMode::FullConstProp => {
729                if self.eval_rvalue(rvalue, place).is_none() {
730                    // Const prop failed, so erase the destination, ensuring that whatever happens
731                    // from here on, does not know about the previous value.
732                    // This is important in case we have
733                    // ```rust
734                    // let mut x = 42;
735                    // x = SOME_MUTABLE_STATIC;
736                    // // x must now be uninit
737                    // ```
738                    // FIXME: we overzealously erase the entire local, because that's easier to
739                    // implement.
740                    trace!(
741                        "propagation into {:?} failed.
742                        Nuking the entire site from orbit, it's the only way to be sure",
743                        place,
744                    );
745                    self.remove_const(place.local);
746                }
747            }
748        }
749    }
750
751    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
752        trace!("visit_statement: {:?}", statement);
753
754        // We want to evaluate operands before any change to the assigned-to value,
755        // so we recurse first.
756        self.super_statement(statement, location);
757
758        match statement.kind {
759            StatementKind::SetDiscriminant { ref place, variant_index } => {
760                match self.can_const_prop[place.local] {
761                    // Do nothing if the place is indirect.
762                    _ if place.is_indirect() => {}
763                    ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local),
764                    ConstPropMode::FullConstProp | ConstPropMode::OnlyInsideOwnBlock => {
765                        match self.access_mut(place) {
766                            Some(Value::Aggregate { variant, .. }) => *variant = variant_index,
767                            _ => self.remove_const(place.local),
768                        }
769                    }
770                }
771            }
772            StatementKind::StorageLive(local) => {
773                self.remove_const(local);
774            }
775            StatementKind::StorageDead(local) => {
776                self.remove_const(local);
777            }
778            _ => {}
779        }
780    }
781
782    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
783        self.super_terminator(terminator, location);
784        match &terminator.kind {
785            TerminatorKind::Assert { expected, ref msg, ref cond, .. } => {
786                self.check_assertion(*expected, msg, cond, location);
787            }
788            TerminatorKind::SwitchInt { ref discr, ref targets } => {
789                if let Some(ref value) = self.eval_operand(discr)
790                    && let Some(value_const) = self.use_ecx(|this| this.ecx.read_scalar(value))
791                    && let Some(constant) = value_const.to_bits(value_const.size()).discard_err()
792                {
793                    // We managed to evaluate the discriminant, so we know we only need to visit
794                    // one target.
795                    let target = targets.target_for_value(constant);
796                    self.worklist.push(target);
797                    return;
798                }
799                // We failed to evaluate the discriminant, fallback to visiting all successors.
800            }
801            // None of these have Operands to const-propagate.
802            TerminatorKind::Goto { .. }
803            | TerminatorKind::UnwindResume
804            | TerminatorKind::UnwindTerminate(_)
805            | TerminatorKind::Return
806            | TerminatorKind::TailCall { .. }
807            | TerminatorKind::Unreachable
808            | TerminatorKind::Drop { .. }
809            | TerminatorKind::Yield { .. }
810            | TerminatorKind::CoroutineDrop
811            | TerminatorKind::FalseEdge { .. }
812            | TerminatorKind::FalseUnwind { .. }
813            | TerminatorKind::Call { .. }
814            | TerminatorKind::InlineAsm { .. } => {}
815        }
816
817        self.worklist.extend(terminator.successors());
818    }
819
820    fn visit_basic_block_data(&mut self, block: BasicBlock, data: &BasicBlockData<'tcx>) {
821        self.super_basic_block_data(block, data);
822
823        // We remove all Locals which are restricted in propagation to their containing blocks and
824        // which were modified in the current block.
825        // Take it out of the ecx so we can get a mutable reference to the ecx for `remove_const`.
826        let mut written_only_inside_own_block_locals =
827            std::mem::take(&mut self.written_only_inside_own_block_locals);
828
829        // This loop can get very hot for some bodies: it check each local in each bb.
830        // To avoid this quadratic behaviour, we only clear the locals that were modified inside
831        // the current block.
832        // The order in which we remove consts does not matter.
833        #[allow(rustc::potential_query_instability)]
834        for local in written_only_inside_own_block_locals.drain() {
835            debug_assert_eq!(self.can_const_prop[local], ConstPropMode::OnlyInsideOwnBlock);
836            self.remove_const(local);
837        }
838        self.written_only_inside_own_block_locals = written_only_inside_own_block_locals;
839
840        if cfg!(debug_assertions) {
841            for (local, &mode) in self.can_const_prop.iter_enumerated() {
842                match mode {
843                    ConstPropMode::FullConstProp => {}
844                    ConstPropMode::NoPropagation | ConstPropMode::OnlyInsideOwnBlock => {
845                        self.ensure_not_propagated(local);
846                    }
847                }
848            }
849        }
850    }
851}
852
853/// The maximum number of bytes that we'll allocate space for a local or the return value.
854/// Needed for #66397, because otherwise we eval into large places and that can cause OOM or just
855/// Severely regress performance.
856const MAX_ALLOC_LIMIT: u64 = 1024;
857
858/// The mode that `ConstProp` is allowed to run in for a given `Local`.
859#[derive(Clone, Copy, Debug, PartialEq)]
860enum ConstPropMode {
861    /// The `Local` can be propagated into and reads of this `Local` can also be propagated.
862    FullConstProp,
863    /// The `Local` can only be propagated into and from its own block.
864    OnlyInsideOwnBlock,
865    /// The `Local` cannot be part of propagation at all. Any statement
866    /// referencing it either for reading or writing will not get propagated.
867    NoPropagation,
868}
869
870/// A visitor that determines locals in a MIR body
871/// that can be const propagated
872struct CanConstProp {
873    can_const_prop: IndexVec<Local, ConstPropMode>,
874    // False at the beginning. Once set, no more assignments are allowed to that local.
875    found_assignment: DenseBitSet<Local>,
876}
877
878impl CanConstProp {
879    /// Returns true if `local` can be propagated
880    fn check<'tcx>(
881        tcx: TyCtxt<'tcx>,
882        typing_env: ty::TypingEnv<'tcx>,
883        body: &Body<'tcx>,
884    ) -> IndexVec<Local, ConstPropMode> {
885        let mut cpv = CanConstProp {
886            can_const_prop: IndexVec::from_elem(ConstPropMode::FullConstProp, &body.local_decls),
887            found_assignment: DenseBitSet::new_empty(body.local_decls.len()),
888        };
889        for (local, val) in cpv.can_const_prop.iter_enumerated_mut() {
890            let ty = body.local_decls[local].ty;
891            if ty.is_union() {
892                // Unions are incompatible with the current implementation of
893                // const prop because Rust has no concept of an active
894                // variant of a union
895                *val = ConstPropMode::NoPropagation;
896            } else {
897                match tcx.layout_of(typing_env.as_query_input(ty)) {
898                    Ok(layout) if layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) => {}
899                    // Either the layout fails to compute, then we can't use this local anyway
900                    // or the local is too large, then we don't want to.
901                    _ => {
902                        *val = ConstPropMode::NoPropagation;
903                        continue;
904                    }
905                }
906            }
907        }
908        // Consider that arguments are assigned on entry.
909        for arg in body.args_iter() {
910            cpv.found_assignment.insert(arg);
911        }
912        cpv.visit_body(body);
913        cpv.can_const_prop
914    }
915}
916
917impl<'tcx> Visitor<'tcx> for CanConstProp {
918    fn visit_place(&mut self, place: &Place<'tcx>, mut context: PlaceContext, loc: Location) {
919        use rustc_middle::mir::visit::PlaceContext::*;
920
921        // Dereferencing just read the address of `place.local`.
922        if place.projection.first() == Some(&PlaceElem::Deref) {
923            context = NonMutatingUse(NonMutatingUseContext::Copy);
924        }
925
926        self.visit_local(place.local, context, loc);
927        self.visit_projection(place.as_ref(), context, loc);
928    }
929
930    fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) {
931        use rustc_middle::mir::visit::PlaceContext::*;
932        match context {
933            // These are just stores, where the storing is not propagatable, but there may be later
934            // mutations of the same local via `Store`
935            | MutatingUse(MutatingUseContext::Call)
936            | MutatingUse(MutatingUseContext::AsmOutput)
937            | MutatingUse(MutatingUseContext::Deinit)
938            // Actual store that can possibly even propagate a value
939            | MutatingUse(MutatingUseContext::Store)
940            | MutatingUse(MutatingUseContext::SetDiscriminant) => {
941                if !self.found_assignment.insert(local) {
942                    match &mut self.can_const_prop[local] {
943                        // If the local can only get propagated in its own block, then we don't have
944                        // to worry about multiple assignments, as we'll nuke the const state at the
945                        // end of the block anyway, and inside the block we overwrite previous
946                        // states as applicable.
947                        ConstPropMode::OnlyInsideOwnBlock => {}
948                        ConstPropMode::NoPropagation => {}
949                        other @ ConstPropMode::FullConstProp => {
950                            trace!(
951                                "local {:?} can't be propagated because of multiple assignments. Previous state: {:?}",
952                                local, other,
953                            );
954                            *other = ConstPropMode::OnlyInsideOwnBlock;
955                        }
956                    }
957                }
958            }
959            // Reading constants is allowed an arbitrary number of times
960            NonMutatingUse(NonMutatingUseContext::Copy)
961            | NonMutatingUse(NonMutatingUseContext::Move)
962            | NonMutatingUse(NonMutatingUseContext::Inspect)
963            | NonMutatingUse(NonMutatingUseContext::PlaceMention)
964            | NonUse(_) => {}
965
966            // These could be propagated with a smarter analysis or just some careful thinking about
967            // whether they'd be fine right now.
968            MutatingUse(MutatingUseContext::Yield)
969            | MutatingUse(MutatingUseContext::Drop)
970            | MutatingUse(MutatingUseContext::Retag)
971            // These can't ever be propagated under any scheme, as we can't reason about indirect
972            // mutation.
973            | NonMutatingUse(NonMutatingUseContext::SharedBorrow)
974            | NonMutatingUse(NonMutatingUseContext::FakeBorrow)
975            | NonMutatingUse(NonMutatingUseContext::RawBorrow)
976            | MutatingUse(MutatingUseContext::Borrow)
977            | MutatingUse(MutatingUseContext::RawBorrow) => {
978                trace!("local {:?} can't be propagated because it's used: {:?}", local, context);
979                self.can_const_prop[local] = ConstPropMode::NoPropagation;
980            }
981            MutatingUse(MutatingUseContext::Projection)
982            | NonMutatingUse(NonMutatingUseContext::Projection) => bug!("visit_place should not pass {context:?} for {local:?}"),
983        }
984    }
985}