1use std::assert_matches::assert_matches;
6use std::cell::RefCell;
7use std::fmt::Formatter;
8
9use rustc_abi::{BackendRepr, FIRST_VARIANT, FieldIdx, Size, VariantIdx};
10use rustc_const_eval::const_eval::{DummyMachine, throw_machine_stop_str};
11use rustc_const_eval::interpret::{
12 ImmTy, Immediate, InterpCx, OpTy, PlaceTy, Projectable, interp_ok,
13};
14use rustc_data_structures::fx::FxHashMap;
15use rustc_hir::def::DefKind;
16use rustc_middle::bug;
17use rustc_middle::mir::interpret::{InterpResult, Scalar};
18use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
19use rustc_middle::mir::*;
20use rustc_middle::ty::{self, Ty, TyCtxt};
21use rustc_mir_dataflow::fmt::DebugWithContext;
22use rustc_mir_dataflow::lattice::{FlatSet, HasBottom};
23use rustc_mir_dataflow::value_analysis::{
24 Map, PlaceIndex, State, TrackElem, ValueOrPlace, debug_with_context,
25};
26use rustc_mir_dataflow::{Analysis, ResultsVisitor, visit_reachable_results};
27use rustc_span::DUMMY_SP;
28use tracing::{debug, debug_span, instrument};
29
30const BLOCK_LIMIT: usize = 100;
33const PLACE_LIMIT: usize = 100;
34
35pub(super) struct DataflowConstProp;
36
37impl<'tcx> crate::MirPass<'tcx> for DataflowConstProp {
38 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
39 sess.mir_opt_level() >= 3
40 }
41
42 #[instrument(skip_all level = "debug")]
43 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
44 debug!(def_id = ?body.source.def_id());
45 if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
46 debug!("aborted dataflow const prop due too many basic blocks");
47 return;
48 }
49
50 let place_limit = if tcx.sess.mir_opt_level() < 4 { Some(PLACE_LIMIT) } else { None };
59
60 let map = Map::new(tcx, body, place_limit);
62
63 let const_ = debug_span!("analyze")
65 .in_scope(|| ConstAnalysis::new(tcx, body, map).iterate_to_fixpoint(tcx, body, None));
66
67 let mut visitor = Collector::new(tcx, &body.local_decls);
69 debug_span!("collect").in_scope(|| visit_reachable_results(body, &const_, &mut visitor));
70 let mut patch = visitor.patch;
71 debug_span!("patch").in_scope(|| patch.visit_body_preserves_cfg(body));
72 }
73
74 fn is_required(&self) -> bool {
75 false
76 }
77}
78
79struct ConstAnalysis<'a, 'tcx> {
84 map: Map<'tcx>,
85 tcx: TyCtxt<'tcx>,
86 local_decls: &'a LocalDecls<'tcx>,
87 ecx: RefCell<InterpCx<'tcx, DummyMachine>>,
88 typing_env: ty::TypingEnv<'tcx>,
89}
90
91impl<'tcx> Analysis<'tcx> for ConstAnalysis<'_, 'tcx> {
92 type Domain = State<FlatSet<Scalar>>;
93
94 const NAME: &'static str = "ConstAnalysis";
95
96 fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
100 State::Unreachable
101 }
102
103 fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
104 assert_matches!(state, State::Unreachable);
106 *state = State::new_reachable();
107 for arg in body.args_iter() {
108 state.flood(PlaceRef { local: arg, projection: &[] }, &self.map);
109 }
110 }
111
112 fn apply_primary_statement_effect(
113 &self,
114 state: &mut Self::Domain,
115 statement: &Statement<'tcx>,
116 _location: Location,
117 ) {
118 if state.is_reachable() {
119 self.handle_statement(statement, state);
120 }
121 }
122
123 fn apply_primary_terminator_effect<'mir>(
124 &self,
125 state: &mut Self::Domain,
126 terminator: &'mir Terminator<'tcx>,
127 _location: Location,
128 ) -> TerminatorEdges<'mir, 'tcx> {
129 if state.is_reachable() {
130 self.handle_terminator(terminator, state)
131 } else {
132 TerminatorEdges::None
133 }
134 }
135
136 fn apply_call_return_effect(
137 &self,
138 state: &mut Self::Domain,
139 _block: BasicBlock,
140 return_places: CallReturnPlaces<'_, 'tcx>,
141 ) {
142 if state.is_reachable() {
143 self.handle_call_return(return_places, state)
144 }
145 }
146}
147
148impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
149 fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, map: Map<'tcx>) -> Self {
150 let typing_env = body.typing_env(tcx);
151 Self {
152 map,
153 tcx,
154 local_decls: &body.local_decls,
155 ecx: RefCell::new(InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine)),
156 typing_env,
157 }
158 }
159
160 fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<FlatSet<Scalar>>) {
161 match &statement.kind {
162 StatementKind::Assign(box (place, rvalue)) => {
163 self.handle_assign(*place, rvalue, state);
164 }
165 StatementKind::SetDiscriminant { box place, variant_index } => {
166 self.handle_set_discriminant(*place, *variant_index, state);
167 }
168 StatementKind::Intrinsic(box intrinsic) => {
169 self.handle_intrinsic(intrinsic);
170 }
171 StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
172 state.flood_with(
175 Place::from(*local).as_ref(),
176 &self.map,
177 FlatSet::<Scalar>::BOTTOM,
178 );
179 }
180 StatementKind::Retag(..) => {
181 }
183 StatementKind::ConstEvalCounter
184 | StatementKind::Nop
185 | StatementKind::FakeRead(..)
186 | StatementKind::PlaceMention(..)
187 | StatementKind::Coverage(..)
188 | StatementKind::BackwardIncompatibleDropHint { .. }
189 | StatementKind::AscribeUserType(..) => {}
190 }
191 }
192
193 fn handle_intrinsic(&self, intrinsic: &NonDivergingIntrinsic<'tcx>) {
194 match intrinsic {
195 NonDivergingIntrinsic::Assume(..) => {
196 }
198 NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping {
199 dst: _,
200 src: _,
201 count: _,
202 }) => {
203 }
205 }
206 }
207
208 fn handle_operand(
209 &self,
210 operand: &Operand<'tcx>,
211 state: &mut State<FlatSet<Scalar>>,
212 ) -> ValueOrPlace<FlatSet<Scalar>> {
213 match operand {
214 Operand::RuntimeChecks(_) => ValueOrPlace::TOP,
215 Operand::Constant(box constant) => {
216 ValueOrPlace::Value(self.handle_constant(constant, state))
217 }
218 Operand::Copy(place) | Operand::Move(place) => {
219 self.map.find(place.as_ref()).map(ValueOrPlace::Place).unwrap_or(ValueOrPlace::TOP)
222 }
223 }
224 }
225
226 fn handle_terminator<'mir>(
229 &self,
230 terminator: &'mir Terminator<'tcx>,
231 state: &mut State<FlatSet<Scalar>>,
232 ) -> TerminatorEdges<'mir, 'tcx> {
233 match &terminator.kind {
234 TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
235 }
237 TerminatorKind::Drop { place, .. } => {
238 state.flood_with(place.as_ref(), &self.map, FlatSet::<Scalar>::BOTTOM);
239 }
240 TerminatorKind::Yield { .. } => {
241 bug!("encountered disallowed terminator");
243 }
244 TerminatorKind::SwitchInt { discr, targets } => {
245 return self.handle_switch_int(discr, targets, state);
246 }
247 TerminatorKind::TailCall { .. } => {
248 }
251 TerminatorKind::Goto { .. }
252 | TerminatorKind::UnwindResume
253 | TerminatorKind::UnwindTerminate(_)
254 | TerminatorKind::Return
255 | TerminatorKind::Unreachable
256 | TerminatorKind::Assert { .. }
257 | TerminatorKind::CoroutineDrop
258 | TerminatorKind::FalseEdge { .. }
259 | TerminatorKind::FalseUnwind { .. } => {
260 }
262 }
263 terminator.edges()
264 }
265
266 fn handle_call_return(
267 &self,
268 return_places: CallReturnPlaces<'_, 'tcx>,
269 state: &mut State<FlatSet<Scalar>>,
270 ) {
271 return_places.for_each(|place| {
272 state.flood(place.as_ref(), &self.map);
273 })
274 }
275
276 fn handle_set_discriminant(
277 &self,
278 place: Place<'tcx>,
279 variant_index: VariantIdx,
280 state: &mut State<FlatSet<Scalar>>,
281 ) {
282 state.flood_discr(place.as_ref(), &self.map);
283 if self.map.find_discr(place.as_ref()).is_some() {
284 let enum_ty = place.ty(self.local_decls, self.tcx).ty;
285 if let Some(discr) = self.eval_discriminant(enum_ty, variant_index) {
286 state.assign_discr(
287 place.as_ref(),
288 ValueOrPlace::Value(FlatSet::Elem(discr)),
289 &self.map,
290 );
291 }
292 }
293 }
294
295 fn handle_assign(
296 &self,
297 target: Place<'tcx>,
298 rvalue: &Rvalue<'tcx>,
299 state: &mut State<FlatSet<Scalar>>,
300 ) {
301 match rvalue {
302 Rvalue::Use(operand) => {
303 state.flood(target.as_ref(), &self.map);
304 if let Some(target) = self.map.find(target.as_ref()) {
305 self.assign_operand(state, target, operand);
306 }
307 }
308 Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in runtime MIR"),
309 Rvalue::Aggregate(kind, operands) => {
310 state.flood(target.as_ref(), &self.map);
313
314 let Some(target_idx) = self.map.find(target.as_ref()) else { return };
315
316 let (variant_target, variant_index) = match **kind {
317 AggregateKind::Tuple | AggregateKind::Closure(..) => (Some(target_idx), None),
318 AggregateKind::Adt(def_id, variant_index, ..) => {
319 match self.tcx.def_kind(def_id) {
320 DefKind::Struct => (Some(target_idx), None),
321 DefKind::Enum => (
322 self.map.apply(target_idx, TrackElem::Variant(variant_index)),
323 Some(variant_index),
324 ),
325 _ => return,
326 }
327 }
328 _ => return,
329 };
330 if let Some(variant_target_idx) = variant_target {
331 for (field_index, operand) in operands.iter_enumerated() {
332 if let Some(field) =
333 self.map.apply(variant_target_idx, TrackElem::Field(field_index))
334 {
335 self.assign_operand(state, field, operand);
336 }
337 }
338 }
339 if let Some(variant_index) = variant_index
340 && let Some(discr_idx) = self.map.apply(target_idx, TrackElem::Discriminant)
341 {
342 let enum_ty = target.ty(self.local_decls, self.tcx).ty;
348 if let Some(discr_val) = self.eval_discriminant(enum_ty, variant_index) {
349 state.insert_value_idx(discr_idx, FlatSet::Elem(discr_val), &self.map);
350 }
351 }
352 }
353 Rvalue::BinaryOp(op, box (left, right)) if op.is_overflowing() => {
354 state.flood(target.as_ref(), &self.map);
356
357 let Some(target) = self.map.find(target.as_ref()) else { return };
358
359 let value_target = self.map.apply(target, TrackElem::Field(0_u32.into()));
360 let overflow_target = self.map.apply(target, TrackElem::Field(1_u32.into()));
361
362 if value_target.is_some() || overflow_target.is_some() {
363 let (val, overflow) = self.binary_op(state, *op, left, right);
364
365 if let Some(value_target) = value_target {
366 state.insert_value_idx(value_target, val, &self.map);
368 }
369 if let Some(overflow_target) = overflow_target {
370 state.insert_value_idx(overflow_target, overflow, &self.map);
372 }
373 }
374 }
375 Rvalue::Cast(
376 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
377 operand,
378 _,
379 ) => {
380 let pointer = self.handle_operand(operand, state);
381 state.assign(target.as_ref(), pointer, &self.map);
382
383 if let Some(target_len) = self.map.find_len(target.as_ref())
384 && let operand_ty = operand.ty(self.local_decls, self.tcx)
385 && let Some(operand_ty) = operand_ty.builtin_deref(true)
386 && let ty::Array(_, len) = operand_ty.kind()
387 && let Some(len) = Const::Ty(self.tcx.types.usize, *len)
388 .try_eval_scalar_int(self.tcx, self.typing_env)
389 {
390 state.insert_value_idx(target_len, FlatSet::Elem(len.into()), &self.map);
391 }
392 }
393 _ => {
394 let result = self.handle_rvalue(rvalue, state);
395 state.assign(target.as_ref(), result, &self.map);
396 }
397 }
398 }
399
400 fn handle_rvalue(
401 &self,
402 rvalue: &Rvalue<'tcx>,
403 state: &mut State<FlatSet<Scalar>>,
404 ) -> ValueOrPlace<FlatSet<Scalar>> {
405 let val = match rvalue {
406 Rvalue::Cast(CastKind::IntToInt | CastKind::IntToFloat, operand, ty) => {
407 let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else {
408 return ValueOrPlace::Value(FlatSet::Top);
409 };
410 match self.eval_operand(operand, state) {
411 FlatSet::Elem(op) => self
412 .ecx
413 .borrow()
414 .int_to_int_or_float(&op, layout)
415 .discard_err()
416 .map_or(FlatSet::Top, |result| self.wrap_immediate(*result)),
417 FlatSet::Bottom => FlatSet::Bottom,
418 FlatSet::Top => FlatSet::Top,
419 }
420 }
421 Rvalue::Cast(CastKind::FloatToInt | CastKind::FloatToFloat, operand, ty) => {
422 let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else {
423 return ValueOrPlace::Value(FlatSet::Top);
424 };
425 match self.eval_operand(operand, state) {
426 FlatSet::Elem(op) => self
427 .ecx
428 .borrow()
429 .float_to_float_or_int(&op, layout)
430 .discard_err()
431 .map_or(FlatSet::Top, |result| self.wrap_immediate(*result)),
432 FlatSet::Bottom => FlatSet::Bottom,
433 FlatSet::Top => FlatSet::Top,
434 }
435 }
436 Rvalue::Cast(CastKind::Transmute | CastKind::Subtype, operand, _) => {
437 match self.eval_operand(operand, state) {
438 FlatSet::Elem(op) => self.wrap_immediate(*op),
439 FlatSet::Bottom => FlatSet::Bottom,
440 FlatSet::Top => FlatSet::Top,
441 }
442 }
443 Rvalue::BinaryOp(op, box (left, right)) if !op.is_overflowing() => {
444 let (val, _overflow) = self.binary_op(state, *op, left, right);
447 val
448 }
449 Rvalue::UnaryOp(op, operand) => {
450 if let UnOp::PtrMetadata = op
451 && let Some(place) = operand.place()
452 && let Some(len) = self.map.find_len(place.as_ref())
453 {
454 return ValueOrPlace::Place(len);
455 }
456 match self.eval_operand(operand, state) {
457 FlatSet::Elem(value) => self
458 .ecx
459 .borrow()
460 .unary_op(*op, &value)
461 .discard_err()
462 .map_or(FlatSet::Top, |val| self.wrap_immediate(*val)),
463 FlatSet::Bottom => FlatSet::Bottom,
464 FlatSet::Top => FlatSet::Top,
465 }
466 }
467 Rvalue::Discriminant(place) => state.get_discr(place.as_ref(), &self.map),
468 Rvalue::Use(operand) => return self.handle_operand(operand, state),
469 Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in runtime MIR"),
470 Rvalue::ShallowInitBox(..) => bug!("`ShallowInitBox` in runtime MIR"),
471 Rvalue::Ref(..) | Rvalue::RawPtr(..) => {
472 return ValueOrPlace::TOP;
474 }
475 Rvalue::Repeat(..)
476 | Rvalue::ThreadLocalRef(..)
477 | Rvalue::Cast(..)
478 | Rvalue::BinaryOp(..)
479 | Rvalue::Aggregate(..)
480 | Rvalue::WrapUnsafeBinder(..) => {
481 return ValueOrPlace::TOP;
483 }
484 };
485 ValueOrPlace::Value(val)
486 }
487
488 fn handle_constant(
489 &self,
490 constant: &ConstOperand<'tcx>,
491 _state: &mut State<FlatSet<Scalar>>,
492 ) -> FlatSet<Scalar> {
493 constant
494 .const_
495 .try_eval_scalar(self.tcx, self.typing_env)
496 .map_or(FlatSet::Top, FlatSet::Elem)
497 }
498
499 fn handle_switch_int<'mir>(
500 &self,
501 discr: &'mir Operand<'tcx>,
502 targets: &'mir SwitchTargets,
503 state: &mut State<FlatSet<Scalar>>,
504 ) -> TerminatorEdges<'mir, 'tcx> {
505 let value = match self.handle_operand(discr, state) {
506 ValueOrPlace::Value(value) => value,
507 ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
508 };
509 match value {
510 FlatSet::Bottom => TerminatorEdges::None,
513 FlatSet::Elem(scalar) => {
514 if let Ok(scalar_int) = scalar.try_to_scalar_int() {
515 TerminatorEdges::Single(
516 targets.target_for_value(scalar_int.to_bits_unchecked()),
517 )
518 } else {
519 TerminatorEdges::SwitchInt { discr, targets }
520 }
521 }
522 FlatSet::Top => TerminatorEdges::SwitchInt { discr, targets },
523 }
524 }
525
526 fn assign_operand(
528 &self,
529 state: &mut State<FlatSet<Scalar>>,
530 place: PlaceIndex,
531 operand: &Operand<'tcx>,
532 ) {
533 match operand {
534 Operand::RuntimeChecks(_) => {}
535 Operand::Copy(rhs) | Operand::Move(rhs) => {
536 if let Some(rhs) = self.map.find(rhs.as_ref()) {
537 state.insert_place_idx(place, rhs, &self.map);
538 } else if rhs.projection.first() == Some(&PlaceElem::Deref)
539 && let FlatSet::Elem(pointer) = state.get(rhs.local.into(), &self.map)
540 && let rhs_ty = self.local_decls[rhs.local].ty
541 && let Ok(rhs_layout) =
542 self.tcx.layout_of(self.typing_env.as_query_input(rhs_ty))
543 {
544 let op = ImmTy::from_scalar(pointer, rhs_layout).into();
545 self.assign_constant(state, place, op, rhs.projection);
546 }
547 }
548 Operand::Constant(box constant) => {
549 if let Some(constant) = self
550 .ecx
551 .borrow()
552 .eval_mir_constant(&constant.const_, constant.span, None)
553 .discard_err()
554 {
555 self.assign_constant(state, place, constant, &[]);
556 }
557 }
558 }
559 }
560
561 #[instrument(level = "trace", skip(self, state))]
565 fn assign_constant(
566 &self,
567 state: &mut State<FlatSet<Scalar>>,
568 place: PlaceIndex,
569 mut operand: OpTy<'tcx>,
570 projection: &[PlaceElem<'tcx>],
571 ) {
572 for &(mut proj_elem) in projection {
573 if let PlaceElem::Index(index) = proj_elem {
574 if let FlatSet::Elem(index) = state.get(index.into(), &self.map)
575 && let Some(offset) = index.to_target_usize(&self.tcx).discard_err()
576 && let Some(min_length) = offset.checked_add(1)
577 {
578 proj_elem = PlaceElem::ConstantIndex { offset, min_length, from_end: false };
579 } else {
580 return;
581 }
582 }
583 operand = if let Some(operand) =
584 self.ecx.borrow().project(&operand, proj_elem).discard_err()
585 {
586 operand
587 } else {
588 return;
589 }
590 }
591
592 self.map.for_each_projection_value(
593 place,
594 operand,
595 &mut |elem, op| match elem {
596 TrackElem::Field(idx) => self.ecx.borrow().project_field(op, idx).discard_err(),
597 TrackElem::Variant(idx) => {
598 self.ecx.borrow().project_downcast(op, idx).discard_err()
599 }
600 TrackElem::Discriminant => {
601 let variant = self.ecx.borrow().read_discriminant(op).discard_err()?;
602 let discr_value = self
603 .ecx
604 .borrow()
605 .discriminant_for_variant(op.layout.ty, variant)
606 .discard_err()?;
607 Some(discr_value.into())
608 }
609 TrackElem::DerefLen => {
610 let op: OpTy<'_> = self.ecx.borrow().deref_pointer(op).discard_err()?.into();
611 let len_usize = op.len(&self.ecx.borrow()).discard_err()?;
612 let layout = self
613 .tcx
614 .layout_of(self.typing_env.as_query_input(self.tcx.types.usize))
615 .unwrap();
616 Some(ImmTy::from_uint(len_usize, layout).into())
617 }
618 },
619 &mut |place, op| {
620 if let Some(imm) = self.ecx.borrow().read_immediate_raw(op).discard_err()
621 && let Some(imm) = imm.right()
622 {
623 let elem = self.wrap_immediate(*imm);
624 state.insert_value_idx(place, elem, &self.map);
625 }
626 },
627 );
628 }
629
630 fn binary_op(
631 &self,
632 state: &mut State<FlatSet<Scalar>>,
633 op: BinOp,
634 left: &Operand<'tcx>,
635 right: &Operand<'tcx>,
636 ) -> (FlatSet<Scalar>, FlatSet<Scalar>) {
637 let left = self.eval_operand(left, state);
638 let right = self.eval_operand(right, state);
639
640 match (left, right) {
641 (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
642 (FlatSet::Elem(left), FlatSet::Elem(right)) => {
644 match self.ecx.borrow().binary_op(op, &left, &right).discard_err() {
645 Some(val) => {
649 if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
650 let (val, overflow) = val.to_scalar_pair();
651 (FlatSet::Elem(val), FlatSet::Elem(overflow))
652 } else {
653 (FlatSet::Elem(val.to_scalar()), FlatSet::Bottom)
654 }
655 }
656 _ => (FlatSet::Top, FlatSet::Top),
657 }
658 }
659 (FlatSet::Elem(const_arg), _) | (_, FlatSet::Elem(const_arg)) => {
661 let layout = const_arg.layout;
662 if !matches!(layout.backend_repr, rustc_abi::BackendRepr::Scalar(..)) {
663 return (FlatSet::Top, FlatSet::Top);
664 }
665
666 let arg_scalar = const_arg.to_scalar();
667 let Some(arg_value) = arg_scalar.to_bits(layout.size).discard_err() else {
668 return (FlatSet::Top, FlatSet::Top);
669 };
670
671 match op {
672 BinOp::BitAnd if arg_value == 0 => (FlatSet::Elem(arg_scalar), FlatSet::Bottom),
673 BinOp::BitOr
674 if arg_value == layout.size.truncate(u128::MAX)
675 || (layout.ty.is_bool() && arg_value == 1) =>
676 {
677 (FlatSet::Elem(arg_scalar), FlatSet::Bottom)
678 }
679 BinOp::Mul if layout.ty.is_integral() && arg_value == 0 => {
680 (FlatSet::Elem(arg_scalar), FlatSet::Elem(Scalar::from_bool(false)))
681 }
682 _ => (FlatSet::Top, FlatSet::Top),
683 }
684 }
685 (FlatSet::Top, FlatSet::Top) => (FlatSet::Top, FlatSet::Top),
686 }
687 }
688
689 fn eval_operand(
690 &self,
691 op: &Operand<'tcx>,
692 state: &mut State<FlatSet<Scalar>>,
693 ) -> FlatSet<ImmTy<'tcx>> {
694 let value = match self.handle_operand(op, state) {
695 ValueOrPlace::Value(value) => value,
696 ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
697 };
698 match value {
699 FlatSet::Top => FlatSet::Top,
700 FlatSet::Elem(scalar) => {
701 let ty = op.ty(self.local_decls, self.tcx);
702 self.tcx
703 .layout_of(self.typing_env.as_query_input(ty))
704 .map_or(FlatSet::Top, |layout| {
705 FlatSet::Elem(ImmTy::from_scalar(scalar, layout))
706 })
707 }
708 FlatSet::Bottom => FlatSet::Bottom,
709 }
710 }
711
712 fn eval_discriminant(&self, enum_ty: Ty<'tcx>, variant_index: VariantIdx) -> Option<Scalar> {
713 if !enum_ty.is_enum() {
714 return None;
715 }
716 let enum_ty_layout = self.tcx.layout_of(self.typing_env.as_query_input(enum_ty)).ok()?;
717 let discr_value = self
718 .ecx
719 .borrow()
720 .discriminant_for_variant(enum_ty_layout.ty, variant_index)
721 .discard_err()?;
722 Some(discr_value.to_scalar())
723 }
724
725 fn wrap_immediate(&self, imm: Immediate) -> FlatSet<Scalar> {
726 match imm {
727 Immediate::Scalar(scalar) => FlatSet::Elem(scalar),
728 Immediate::Uninit => FlatSet::Bottom,
729 _ => FlatSet::Top,
730 }
731 }
732}
733
734impl<'tcx> DebugWithContext<ConstAnalysis<'_, 'tcx>> for State<FlatSet<Scalar>> {
736 fn fmt_with(&self, ctxt: &ConstAnalysis<'_, 'tcx>, f: &mut Formatter<'_>) -> std::fmt::Result {
737 match self {
738 State::Reachable(values) => debug_with_context(values, None, &ctxt.map, f),
739 State::Unreachable => write!(f, "unreachable"),
740 }
741 }
742
743 fn fmt_diff_with(
744 &self,
745 old: &Self,
746 ctxt: &ConstAnalysis<'_, 'tcx>,
747 f: &mut Formatter<'_>,
748 ) -> std::fmt::Result {
749 match (self, old) {
750 (State::Reachable(this), State::Reachable(old)) => {
751 debug_with_context(this, Some(old), &ctxt.map, f)
752 }
753 _ => Ok(()), }
755 }
756}
757
758struct Patch<'tcx> {
759 tcx: TyCtxt<'tcx>,
760
761 before_effect: FxHashMap<(Location, Place<'tcx>), Const<'tcx>>,
765
766 assignments: FxHashMap<Location, Const<'tcx>>,
768}
769
770impl<'tcx> Patch<'tcx> {
771 pub(crate) fn new(tcx: TyCtxt<'tcx>) -> Self {
772 Self { tcx, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
773 }
774
775 fn make_operand(&self, const_: Const<'tcx>) -> Operand<'tcx> {
776 Operand::Constant(Box::new(ConstOperand { span: DUMMY_SP, user_ty: None, const_ }))
777 }
778}
779
780struct Collector<'a, 'tcx> {
781 patch: Patch<'tcx>,
782 local_decls: &'a LocalDecls<'tcx>,
783}
784
785impl<'a, 'tcx> Collector<'a, 'tcx> {
786 pub(crate) fn new(tcx: TyCtxt<'tcx>, local_decls: &'a LocalDecls<'tcx>) -> Self {
787 Self { patch: Patch::new(tcx), local_decls }
788 }
789
790 #[instrument(level = "trace", skip(self, ecx, map), ret)]
791 fn try_make_constant(
792 &self,
793 ecx: &mut InterpCx<'tcx, DummyMachine>,
794 place: Place<'tcx>,
795 state: &State<FlatSet<Scalar>>,
796 map: &Map<'tcx>,
797 ) -> Option<Const<'tcx>> {
798 let ty = place.ty(self.local_decls, self.patch.tcx).ty;
799 let layout = ecx.layout_of(ty).ok()?;
800
801 if layout.is_zst() {
802 return Some(Const::zero_sized(ty));
803 }
804
805 if layout.is_unsized() {
806 return None;
807 }
808
809 let place = map.find(place.as_ref())?;
810 if layout.backend_repr.is_scalar()
811 && let Some(value) = propagatable_scalar(place, state, map)
812 {
813 return Some(Const::Val(ConstValue::Scalar(value), ty));
814 }
815
816 if matches!(layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
817 let alloc_id = ecx
818 .intern_with_temp_alloc(layout, |ecx, dest| {
819 try_write_constant(ecx, dest, place, ty, state, map)
820 })
821 .discard_err()?;
822 return Some(Const::Val(ConstValue::Indirect { alloc_id, offset: Size::ZERO }, ty));
823 }
824
825 None
826 }
827}
828
829#[instrument(level = "trace", skip(map), ret)]
830fn propagatable_scalar(
831 place: PlaceIndex,
832 state: &State<FlatSet<Scalar>>,
833 map: &Map<'_>,
834) -> Option<Scalar> {
835 if let FlatSet::Elem(value) = state.get_idx(place, map)
836 && value.try_to_scalar_int().is_ok()
837 {
838 Some(value)
840 } else {
841 None
842 }
843}
844
845#[instrument(level = "trace", skip(ecx, state, map), ret)]
846fn try_write_constant<'tcx>(
847 ecx: &mut InterpCx<'tcx, DummyMachine>,
848 dest: &PlaceTy<'tcx>,
849 place: PlaceIndex,
850 ty: Ty<'tcx>,
851 state: &State<FlatSet<Scalar>>,
852 map: &Map<'tcx>,
853) -> InterpResult<'tcx> {
854 let layout = ecx.layout_of(ty)?;
855
856 if layout.is_zst() {
858 return interp_ok(());
859 }
860
861 if layout.backend_repr.is_scalar()
863 && let Some(value) = propagatable_scalar(place, state, map)
864 {
865 return ecx.write_immediate(Immediate::Scalar(value), dest);
866 }
867
868 match ty.kind() {
869 ty::FnDef(..) => {}
871
872 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char =>
874 throw_machine_stop_str!("primitive type with provenance"),
875
876 ty::Tuple(elem_tys) => {
877 for (i, elem) in elem_tys.iter().enumerate() {
878 let i = FieldIdx::from_usize(i);
879 let Some(field) = map.apply(place, TrackElem::Field(i)) else {
880 throw_machine_stop_str!("missing field in tuple")
881 };
882 let field_dest = ecx.project_field(dest, i)?;
883 try_write_constant(ecx, &field_dest, field, elem, state, map)?;
884 }
885 }
886
887 ty::Adt(def, args) => {
888 if def.is_union() {
889 throw_machine_stop_str!("cannot propagate unions")
890 }
891
892 let (variant_idx, variant_def, variant_place, variant_dest) = if def.is_enum() {
893 let Some(discr) = map.apply(place, TrackElem::Discriminant) else {
894 throw_machine_stop_str!("missing discriminant for enum")
895 };
896 let FlatSet::Elem(Scalar::Int(discr)) = state.get_idx(discr, map) else {
897 throw_machine_stop_str!("discriminant with provenance")
898 };
899 let discr_bits = discr.to_bits(discr.size());
900 let Some((variant, _)) = def.discriminants(*ecx.tcx).find(|(_, var)| discr_bits == var.val) else {
901 throw_machine_stop_str!("illegal discriminant for enum")
902 };
903 let Some(variant_place) = map.apply(place, TrackElem::Variant(variant)) else {
904 throw_machine_stop_str!("missing variant for enum")
905 };
906 let variant_dest = ecx.project_downcast(dest, variant)?;
907 (variant, def.variant(variant), variant_place, variant_dest)
908 } else {
909 (FIRST_VARIANT, def.non_enum_variant(), place, dest.clone())
910 };
911
912 for (i, field) in variant_def.fields.iter_enumerated() {
913 let ty = field.ty(*ecx.tcx, args);
914 let Some(field) = map.apply(variant_place, TrackElem::Field(i)) else {
915 throw_machine_stop_str!("missing field in ADT")
916 };
917 let field_dest = ecx.project_field(&variant_dest, i)?;
918 try_write_constant(ecx, &field_dest, field, ty, state, map)?;
919 }
920 ecx.write_discriminant(variant_idx, dest)?;
921 }
922
923 ty::Array(_, _)
925 | ty::Pat(_, _)
926
927 | ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..) | ty::Str | ty::Slice(_)
929
930 | ty::Never
931 | ty::Foreign(..)
932 | ty::Alias(..)
933 | ty::Param(_)
934 | ty::Bound(..)
935 | ty::Placeholder(..)
936 | ty::Closure(..)
937 | ty::CoroutineClosure(..)
938 | ty::Coroutine(..)
939 | ty::Dynamic(..)
940 | ty::UnsafeBinder(_) => throw_machine_stop_str!("unsupported type"),
941
942 ty::Error(_) | ty::Infer(..) | ty::CoroutineWitness(..) => bug!(),
943 }
944
945 interp_ok(())
946}
947
948impl<'tcx> ResultsVisitor<'tcx, ConstAnalysis<'_, 'tcx>> for Collector<'_, 'tcx> {
949 #[instrument(level = "trace", skip(self, analysis, statement))]
950 fn visit_after_early_statement_effect(
951 &mut self,
952 analysis: &ConstAnalysis<'_, 'tcx>,
953 state: &State<FlatSet<Scalar>>,
954 statement: &Statement<'tcx>,
955 location: Location,
956 ) {
957 match &statement.kind {
958 StatementKind::Assign(box (_, rvalue)) => {
959 OperandCollector {
960 state,
961 visitor: self,
962 ecx: &mut analysis.ecx.borrow_mut(),
963 map: &analysis.map,
964 }
965 .visit_rvalue(rvalue, location);
966 }
967 _ => (),
968 }
969 }
970
971 #[instrument(level = "trace", skip(self, analysis, statement))]
972 fn visit_after_primary_statement_effect(
973 &mut self,
974 analysis: &ConstAnalysis<'_, 'tcx>,
975 state: &State<FlatSet<Scalar>>,
976 statement: &Statement<'tcx>,
977 location: Location,
978 ) {
979 match statement.kind {
980 StatementKind::Assign(box (_, Rvalue::Use(Operand::Constant(_)))) => {
981 }
983 StatementKind::Assign(box (place, _)) => {
984 if let Some(value) = self.try_make_constant(
985 &mut analysis.ecx.borrow_mut(),
986 place,
987 state,
988 &analysis.map,
989 ) {
990 self.patch.assignments.insert(location, value);
991 }
992 }
993 _ => (),
994 }
995 }
996
997 fn visit_after_early_terminator_effect(
998 &mut self,
999 analysis: &ConstAnalysis<'_, 'tcx>,
1000 state: &State<FlatSet<Scalar>>,
1001 terminator: &Terminator<'tcx>,
1002 location: Location,
1003 ) {
1004 OperandCollector {
1005 state,
1006 visitor: self,
1007 ecx: &mut analysis.ecx.borrow_mut(),
1008 map: &analysis.map,
1009 }
1010 .visit_terminator(terminator, location);
1011 }
1012}
1013
1014impl<'tcx> MutVisitor<'tcx> for Patch<'tcx> {
1015 fn tcx(&self) -> TyCtxt<'tcx> {
1016 self.tcx
1017 }
1018
1019 fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
1020 if let Some(value) = self.assignments.get(&location) {
1021 match &mut statement.kind {
1022 StatementKind::Assign(box (_, rvalue)) => {
1023 *rvalue = Rvalue::Use(self.make_operand(*value));
1024 }
1025 _ => bug!("found assignment info for non-assign statement"),
1026 }
1027 } else {
1028 self.super_statement(statement, location);
1029 }
1030 }
1031
1032 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1033 match operand {
1034 Operand::Copy(place) | Operand::Move(place) => {
1035 if let Some(value) = self.before_effect.get(&(location, *place)) {
1036 *operand = self.make_operand(*value);
1037 } else if !place.projection.is_empty() {
1038 self.super_operand(operand, location)
1039 }
1040 }
1041 Operand::Constant(_) | Operand::RuntimeChecks(_) => {}
1042 }
1043 }
1044
1045 fn process_projection_elem(
1046 &mut self,
1047 elem: PlaceElem<'tcx>,
1048 location: Location,
1049 ) -> Option<PlaceElem<'tcx>> {
1050 if let PlaceElem::Index(local) = elem {
1051 let offset = self.before_effect.get(&(location, local.into()))?;
1052 let offset = offset.try_to_scalar()?;
1053 let offset = offset.to_target_usize(&self.tcx).discard_err()?;
1054 let min_length = offset.checked_add(1)?;
1055 Some(PlaceElem::ConstantIndex { offset, min_length, from_end: false })
1056 } else {
1057 None
1058 }
1059 }
1060}
1061
1062struct OperandCollector<'a, 'b, 'tcx> {
1063 state: &'a State<FlatSet<Scalar>>,
1064 visitor: &'a mut Collector<'b, 'tcx>,
1065 ecx: &'a mut InterpCx<'tcx, DummyMachine>,
1066 map: &'a Map<'tcx>,
1067}
1068
1069impl<'tcx> Visitor<'tcx> for OperandCollector<'_, '_, 'tcx> {
1070 fn visit_projection_elem(
1071 &mut self,
1072 _: PlaceRef<'tcx>,
1073 elem: PlaceElem<'tcx>,
1074 _: PlaceContext,
1075 location: Location,
1076 ) {
1077 if let PlaceElem::Index(local) = elem
1078 && let Some(value) =
1079 self.visitor.try_make_constant(self.ecx, local.into(), self.state, self.map)
1080 {
1081 self.visitor.patch.before_effect.insert((location, local.into()), value);
1082 }
1083 }
1084
1085 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
1086 if let Some(place) = operand.place() {
1087 if let Some(value) =
1088 self.visitor.try_make_constant(self.ecx, place, self.state, self.map)
1089 {
1090 self.visitor.patch.before_effect.insert((location, place), value);
1091 } else if !place.projection.is_empty() {
1092 self.super_operand(operand, location)
1094 }
1095 }
1096 }
1097}