Skip to main content

rustc_mir_transform/
promote_consts.rs

1//! A pass that promotes borrows of constant rvalues.
2//!
3//! The rvalues considered constant are trees of temps, each with exactly one
4//! initialization, and holding a constant value with no interior mutability.
5//! They are placed into a new MIR constant body in `promoted` and the borrow
6//! rvalue is replaced with a `Literal::Promoted` using the index into
7//! `promoted` of that constant MIR.
8//!
9//! This pass assumes that every use is dominated by an initialization and can
10//! otherwise silence errors, if move analysis runs after promotion on broken
11//! MIR.
12
13use std::cell::Cell;
14use std::{assert_matches, cmp, iter, mem};
15
16use either::{Left, Right};
17use rustc_const_eval::check_consts::{ConstCx, qualifs};
18use rustc_data_structures::fx::FxHashSet;
19use rustc_hir as hir;
20use rustc_hir::def::DefKind;
21use rustc_index::{IndexSlice, IndexVec};
22use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
23use rustc_middle::mir::*;
24use rustc_middle::ty::{self, GenericArgs, List, Ty, TyCtxt, TypeVisitableExt};
25use rustc_middle::{bug, mir, span_bug};
26use rustc_span::{Span, Spanned};
27use tracing::{debug, instrument};
28
29/// A `MirPass` for promotion.
30///
31/// Promotion is the extraction of promotable temps into separate MIR bodies so they can have
32/// `'static` lifetime.
33///
34/// After this pass is run, `promoted_fragments` will hold the MIR body corresponding to each
35/// newly created `Constant`.
36#[derive(Default)]
37pub(super) struct PromoteTemps<'tcx> {
38    // Must use `Cell` because `run_pass` takes `&self`, not `&mut self`.
39    pub promoted_fragments: Cell<IndexVec<Promoted, Body<'tcx>>>,
40}
41
42impl<'tcx> crate::MirPass<'tcx> for PromoteTemps<'tcx> {
43    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
44        // There's not really any point in promoting errorful MIR.
45        //
46        // This does not include MIR that failed const-checking, which we still try to promote.
47        if let Err(_) = body.return_ty().error_reported() {
48            debug!("PromoteTemps: MIR had errors");
49            return;
50        }
51        if body.source.promoted.is_some() {
52            return;
53        }
54
55        let ccx = ConstCx::new(tcx, body);
56        let (mut temps, all_candidates) = collect_temps_and_candidates(&ccx);
57
58        let promotable_candidates = validate_candidates(&ccx, &mut temps, all_candidates);
59
60        let promoted = promote_candidates(body, tcx, temps, promotable_candidates);
61        self.promoted_fragments.set(promoted);
62    }
63
64    fn is_required(&self) -> bool {
65        true
66    }
67}
68
69/// State of a temporary during collection and promotion.
70#[derive(Copy, Clone, PartialEq, Eq, Debug)]
71enum TempState {
72    /// No references to this temp.
73    Undefined,
74    /// One direct assignment and any number of direct uses.
75    /// A borrow of this temp is promotable if the assigned
76    /// value is qualified as constant.
77    Defined { location: Location, uses: usize, valid: Result<(), ()> },
78    /// Any other combination of assignments/uses.
79    Unpromotable,
80    /// This temp was part of an rvalue which got extracted
81    /// during promotion and needs cleanup.
82    PromotedOut,
83}
84
85/// A "root candidate" for promotion, which will become the
86/// returned value in a promoted MIR, unless it's a subset
87/// of a larger candidate.
88#[derive(Copy, Clone, PartialEq, Eq, Debug)]
89struct Candidate {
90    location: Location,
91}
92
93struct Collector<'a, 'tcx> {
94    ccx: &'a ConstCx<'a, 'tcx>,
95    temps: IndexVec<Local, TempState>,
96    candidates: Vec<Candidate>,
97}
98
99impl<'tcx> Visitor<'tcx> for Collector<'_, 'tcx> {
100    #[instrument(level = "debug", skip(self))]
101    fn visit_local(&mut self, index: Local, context: PlaceContext, location: Location) {
102        // We're only interested in temporaries and the return place
103        match self.ccx.body.local_kind(index) {
104            LocalKind::Arg => return,
105            LocalKind::Temp if self.ccx.body.local_decls[index].is_user_variable() => return,
106            LocalKind::ReturnPointer | LocalKind::Temp => {}
107        }
108
109        // Ignore drops, if the temp gets promoted,
110        // then it's constant and thus drop is noop.
111        // Non-uses are also irrelevant.
112        if context.is_drop() || !context.is_use() {
113            debug!(is_drop = context.is_drop(), is_use = context.is_use());
114            return;
115        }
116
117        let temp = &mut self.temps[index];
118        debug!(?temp);
119        *temp = match *temp {
120            TempState::Undefined => match context {
121                PlaceContext::MutatingUse(MutatingUseContext::Store | MutatingUseContext::Call) => {
122                    TempState::Defined { location, uses: 0, valid: Err(()) }
123                }
124                _ => TempState::Unpromotable,
125            },
126            TempState::Defined { ref mut uses, .. } => {
127                // We always allow borrows, even mutable ones, as we need
128                // to promote mutable borrows of some ZSTs e.g., `&mut []`.
129                let allowed_use = match context {
130                    PlaceContext::MutatingUse(MutatingUseContext::Borrow)
131                    | PlaceContext::NonMutatingUse(_) => true,
132                    PlaceContext::MutatingUse(_) | PlaceContext::NonUse(_) => false,
133                };
134                debug!(?allowed_use);
135                if allowed_use {
136                    *uses += 1;
137                    return;
138                }
139                TempState::Unpromotable
140            }
141            TempState::Unpromotable | TempState::PromotedOut => TempState::Unpromotable,
142        };
143        debug!(?temp);
144    }
145
146    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
147        self.super_rvalue(rvalue, location);
148
149        if let Rvalue::Ref(..) = *rvalue {
150            self.candidates.push(Candidate { location });
151        }
152    }
153}
154
155fn collect_temps_and_candidates<'tcx>(
156    ccx: &ConstCx<'_, 'tcx>,
157) -> (IndexVec<Local, TempState>, Vec<Candidate>) {
158    let mut collector = Collector {
159        temps: IndexVec::from_elem(TempState::Undefined, &ccx.body.local_decls),
160        candidates: vec![],
161        ccx,
162    };
163    for (bb, data) in traversal::reverse_postorder(ccx.body) {
164        collector.visit_basic_block_data(bb, data);
165    }
166    (collector.temps, collector.candidates)
167}
168
169/// Checks whether locals that appear in a promotion context (`Candidate`) are actually promotable.
170///
171/// This wraps an `Item`, and has access to all fields of that `Item` via `Deref` coercion.
172struct Validator<'a, 'tcx> {
173    ccx: &'a ConstCx<'a, 'tcx>,
174    temps: &'a mut IndexSlice<Local, TempState>,
175    /// For backwards compatibility, we are promoting function calls in `const`/`static`
176    /// initializers. But we want to avoid evaluating code that might panic and that otherwise would
177    /// not have been evaluated, so we only promote such calls in basic blocks that are guaranteed
178    /// to execute. In other words, we only promote such calls in basic blocks that are definitely
179    /// not dead code. Here we cache the result of computing that set of basic blocks.
180    promotion_safe_blocks: Option<FxHashSet<BasicBlock>>,
181}
182
183impl<'a, 'tcx> std::ops::Deref for Validator<'a, 'tcx> {
184    type Target = ConstCx<'a, 'tcx>;
185
186    fn deref(&self) -> &Self::Target {
187        self.ccx
188    }
189}
190
191struct Unpromotable;
192
193impl<'tcx> Validator<'_, 'tcx> {
194    fn validate_candidate(&mut self, candidate: Candidate) -> Result<(), Unpromotable> {
195        let Left(statement) = self.body.stmt_at(candidate.location) else { bug!() };
196        let Some((_, Rvalue::Ref(_, kind, place))) = statement.kind.as_assign() else { bug!() };
197
198        // We can only promote interior borrows of promotable temps (non-temps
199        // don't get promoted anyway).
200        self.validate_local(place.local)?;
201
202        // The reference operation itself must be promotable.
203        // (Needs to come after `validate_local` to avoid ICEs.)
204        self.validate_ref(*kind, place)?;
205
206        // We do not check all the projections (they do not get promoted anyway),
207        // but we do stay away from promoting anything involving a dereference.
208        if place.projection.contains(&ProjectionElem::Deref) {
209            return Err(Unpromotable);
210        }
211
212        Ok(())
213    }
214
215    // FIXME(eddyb) maybe cache this?
216    fn qualif_local<Q: qualifs::Qualif>(&mut self, local: Local) -> bool {
217        let TempState::Defined { location: loc, .. } = self.temps[local] else {
218            return false;
219        };
220
221        let stmt_or_term = self.body.stmt_at(loc);
222        match stmt_or_term {
223            Left(statement) => {
224                let Some((_, rhs)) = statement.kind.as_assign() else {
225                    span_bug!(statement.source_info.span, "{:?} is not an assignment", statement)
226                };
227                qualifs::in_rvalue::<Q, _>(self.ccx, &mut |l| self.qualif_local::<Q>(l), rhs)
228            }
229            Right(terminator) => {
230                assert_matches!(terminator.kind, TerminatorKind::Call { .. });
231                let return_ty = self.body.local_decls[local].ty;
232                Q::in_any_value_of_ty(self.ccx, return_ty)
233            }
234        }
235    }
236
237    fn validate_local(&mut self, local: Local) -> Result<(), Unpromotable> {
238        let TempState::Defined { location: loc, uses, valid } = self.temps[local] else {
239            return Err(Unpromotable);
240        };
241
242        // We cannot promote things that need dropping, since the promoted value would not get
243        // dropped.
244        if self.qualif_local::<qualifs::NeedsDrop>(local) {
245            return Err(Unpromotable);
246        }
247
248        if valid.is_ok() {
249            return Ok(());
250        }
251
252        let ok = {
253            let stmt_or_term = self.body.stmt_at(loc);
254            match stmt_or_term {
255                Left(statement) => {
256                    let Some((_, rhs)) = statement.kind.as_assign() else {
257                        span_bug!(
258                            statement.source_info.span,
259                            "{:?} is not an assignment",
260                            statement
261                        )
262                    };
263                    self.validate_rvalue(rhs)
264                }
265                Right(terminator) => match &terminator.kind {
266                    TerminatorKind::Call { func, args, .. } => {
267                        self.validate_call(func, args, loc.block)
268                    }
269                    TerminatorKind::Yield { .. } => Err(Unpromotable),
270                    kind => {
271                        span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
272                    }
273                },
274            }
275        };
276
277        self.temps[local] = match ok {
278            Ok(()) => TempState::Defined { location: loc, uses, valid: Ok(()) },
279            Err(_) => TempState::Unpromotable,
280        };
281
282        ok
283    }
284
285    fn validate_place(&mut self, place: PlaceRef<'tcx>) -> Result<(), Unpromotable> {
286        let Some((place_base, elem)) = place.last_projection() else {
287            return self.validate_local(place.local);
288        };
289
290        // Validate topmost projection, then recurse.
291        match elem {
292            // Recurse directly.
293            ProjectionElem::ConstantIndex { .. }
294            | ProjectionElem::Subslice { .. }
295            | ProjectionElem::UnwrapUnsafeBinder(_) => {}
296
297            // Never recurse.
298            ProjectionElem::OpaqueCast(..) | ProjectionElem::Downcast(..) => {
299                return Err(Unpromotable);
300            }
301
302            ProjectionElem::Deref => {
303                // When a static is used by-value, that gets desugared to `*STATIC_ADDR`,
304                // and we need to be able to promote this. So check if this deref matches
305                // that specific pattern.
306
307                // We need to make sure this is a `Deref` of a local with no further projections.
308                // Discussion can be found at
309                // https://github.com/rust-lang/rust/pull/74945#discussion_r463063247
310                if let Some(local) = place_base.as_local()
311                    && let TempState::Defined { location, .. } = self.temps[local]
312                    && let Left(def_stmt) = self.body.stmt_at(location)
313                    && let Some((_, Rvalue::Use(Operand::Constant(c), _))) = def_stmt.kind.as_assign()
314                    && let Some(did) = c.check_static_ptr(self.tcx)
315                    // Evaluating a promoted may not read statics except if it got
316                    // promoted from a static (this is a CTFE check). So we
317                    // can only promote static accesses inside statics.
318                    && let Some(hir::ConstContext::Static(..)) = self.const_kind
319                    && !self.tcx.is_thread_local_static(did)
320                {
321                    // Recurse.
322                } else {
323                    return Err(Unpromotable);
324                }
325            }
326            ProjectionElem::Index(local) => {
327                // Only accept if we can predict the index and are indexing an array.
328                if let TempState::Defined { location: loc, .. } = self.temps[local]
329                    && let Left(statement) =  self.body.stmt_at(loc)
330                    && let Some((_, Rvalue::Use(Operand::Constant(c), _))) = statement.kind.as_assign()
331                    && self.should_evaluate_for_promotion_checks(c.const_)
332                    && let Some(idx) = c.const_.try_eval_target_usize(self.tcx, self.typing_env)
333                    // Determine the type of the thing we are indexing.
334                    && let ty::Array(_, len) = place_base.ty(self.body, self.tcx).ty.kind()
335                    // It's an array; determine its length.
336                    && let Some(len) = len.try_to_target_usize(self.tcx)
337                    // If the index is in-bounds, go ahead.
338                    && idx < len
339                {
340                    self.validate_local(local)?;
341                    // Recurse.
342                } else {
343                    return Err(Unpromotable);
344                }
345            }
346
347            ProjectionElem::Field(..) => {
348                let base_ty = place_base.ty(self.body, self.tcx).ty;
349                if base_ty.is_union() {
350                    // No promotion of union field accesses.
351                    return Err(Unpromotable);
352                }
353            }
354        }
355
356        self.validate_place(place_base)
357    }
358
359    fn validate_operand(&mut self, operand: &Operand<'tcx>) -> Result<(), Unpromotable> {
360        match operand {
361            Operand::Copy(place) | Operand::Move(place) => self.validate_place(place.as_ref()),
362
363            // `RuntimeChecks` behaves different in const-eval and runtime MIR,
364            // so we do not promote it.
365            Operand::RuntimeChecks(_) => Err(Unpromotable),
366
367            // The qualifs for a constant (e.g. `HasMutInterior`) are checked in
368            // `validate_rvalue` upon access.
369            Operand::Constant(c) => {
370                if let Some(def_id) = c.check_static_ptr(self.tcx) {
371                    // Only allow statics (not consts) to refer to other statics.
372                    // FIXME(eddyb) does this matter at all for promotion?
373                    // FIXME(RalfJung) it makes little sense to not promote this in `fn`/`const fn`,
374                    // and in `const` this cannot occur anyway. The only concern is that we might
375                    // promote even `let x = &STATIC` which would be useless, but this applies to
376                    // promotion inside statics as well.
377                    let is_static = matches!(self.const_kind, Some(hir::ConstContext::Static(_)));
378                    if !is_static {
379                        return Err(Unpromotable);
380                    }
381
382                    let is_thread_local = self.tcx.is_thread_local_static(def_id);
383                    if is_thread_local {
384                        return Err(Unpromotable);
385                    }
386                }
387
388                Ok(())
389            }
390        }
391    }
392
393    fn validate_ref(&mut self, kind: BorrowKind, place: &Place<'tcx>) -> Result<(), Unpromotable> {
394        match kind {
395            // Reject these borrow types just to be safe.
396            // FIXME(RalfJung): could we allow them? Should we? No point in it until we have a
397            // usecase.
398            BorrowKind::Fake(_) | BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture } => {
399                return Err(Unpromotable);
400            }
401
402            BorrowKind::Shared => {
403                let has_mut_interior = self.qualif_local::<qualifs::HasMutInterior>(place.local);
404                if has_mut_interior {
405                    return Err(Unpromotable);
406                }
407            }
408
409            // FIXME: consider changing this to only promote &mut [] for default borrows,
410            // also forbidding two phase borrows
411            BorrowKind::Mut { kind: MutBorrowKind::Default | MutBorrowKind::TwoPhaseBorrow } => {
412                let ty = place.ty(self.body, self.tcx).ty;
413
414                // In theory, any zero-sized value could be borrowed
415                // mutably without consequences. However, only &mut []
416                // is allowed right now.
417                let ty::Array(_, len) = ty.kind() else { return Err(Unpromotable) };
418                let Some(0) = len.try_to_target_usize(self.tcx) else { return Err(Unpromotable) };
419            }
420        }
421
422        Ok(())
423    }
424
425    fn validate_rvalue(&mut self, rvalue: &Rvalue<'tcx>) -> Result<(), Unpromotable> {
426        match rvalue {
427            Rvalue::Use(_operand, WithRetag::No) => {
428                // This shouldn't actually happen, but just to be safe: we'll later add the promoted
429                // with retagging, so don't promote anything that didn't already have retagging.
430                return Err(Unpromotable);
431            }
432            Rvalue::Use(operand, _)
433            | Rvalue::Repeat(operand, _)
434            | Rvalue::WrapUnsafeBinder(operand, _) => {
435                self.validate_operand(operand)?;
436            }
437            Rvalue::CopyForDeref(place) => {
438                let op = &Operand::Copy(*place);
439                self.validate_operand(op)?
440            }
441
442            Rvalue::Discriminant(place) => self.validate_place(place.as_ref())?,
443
444            Rvalue::ThreadLocalRef(_) => return Err(Unpromotable),
445
446            // ptr-to-int casts are not possible in consts and thus not promotable
447            Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => return Err(Unpromotable),
448
449            // all other casts including int-to-ptr casts are fine, they just use the integer value
450            // at pointer type.
451            Rvalue::Cast(_, operand, _) => {
452                self.validate_operand(operand)?;
453            }
454
455            Rvalue::UnaryOp(op, operand) => {
456                match op {
457                    // These operations can never fail.
458                    UnOp::Neg | UnOp::Not | UnOp::PtrMetadata => {}
459                }
460
461                self.validate_operand(operand)?;
462            }
463
464            Rvalue::BinaryOp(op, box (lhs, rhs)) => {
465                let op = *op;
466                let lhs_ty = lhs.ty(self.body, self.tcx);
467
468                if let ty::RawPtr(_, _) | ty::FnPtr(..) = lhs_ty.kind() {
469                    // Raw and fn pointer operations are not allowed inside consts and thus not
470                    // promotable.
471                    assert_matches!(
472                        op,
473                        BinOp::Eq
474                            | BinOp::Ne
475                            | BinOp::Le
476                            | BinOp::Lt
477                            | BinOp::Ge
478                            | BinOp::Gt
479                            | BinOp::Offset
480                    );
481                    return Err(Unpromotable);
482                }
483
484                match op {
485                    BinOp::Div | BinOp::Rem => {
486                        if lhs_ty.is_integral() {
487                            let sz = lhs_ty.primitive_size(self.tcx);
488                            // Integer division: the RHS must be a non-zero const.
489                            let rhs_val = if let Operand::Constant(rhs_c) = rhs
490                                && self.should_evaluate_for_promotion_checks(rhs_c.const_)
491                                && let Some(rhs_val) =
492                                    rhs_c.const_.try_eval_scalar_int(self.tcx, self.typing_env)
493                                // for the zero test, int vs uint does not matter
494                                && rhs_val.to_uint(sz) != 0
495                            {
496                                rhs_val
497                            } else {
498                                // value not known or 0 -- not okay
499                                return Err(Unpromotable);
500                            };
501                            // Furthermore, for signed division, we also have to exclude `int::MIN /
502                            // -1`.
503                            if lhs_ty.is_signed() && rhs_val.to_int(sz) == -1 {
504                                // The RHS is -1, so we have to be careful. But is the LHS int::MIN?
505                                if let Operand::Constant(lhs_c) = lhs
506                                    && self.should_evaluate_for_promotion_checks(lhs_c.const_)
507                                    && let Some(lhs_val) =
508                                        lhs_c.const_.try_eval_scalar_int(self.tcx, self.typing_env)
509                                    && let lhs_min = sz.signed_int_min()
510                                    && lhs_val.to_int(sz) != lhs_min
511                                {
512                                    // okay
513                                } else {
514                                    // value not known or int::MIN -- not okay
515                                    return Err(Unpromotable);
516                                }
517                            }
518                        }
519                    }
520                    // The remaining operations can never fail.
521                    BinOp::Eq
522                    | BinOp::Ne
523                    | BinOp::Le
524                    | BinOp::Lt
525                    | BinOp::Ge
526                    | BinOp::Gt
527                    | BinOp::Cmp
528                    | BinOp::Offset
529                    | BinOp::Add
530                    | BinOp::AddUnchecked
531                    | BinOp::AddWithOverflow
532                    | BinOp::Sub
533                    | BinOp::SubUnchecked
534                    | BinOp::SubWithOverflow
535                    | BinOp::Mul
536                    | BinOp::MulUnchecked
537                    | BinOp::MulWithOverflow
538                    | BinOp::BitXor
539                    | BinOp::BitAnd
540                    | BinOp::BitOr
541                    | BinOp::Shl
542                    | BinOp::ShlUnchecked
543                    | BinOp::Shr
544                    | BinOp::ShrUnchecked => {}
545                }
546
547                self.validate_operand(lhs)?;
548                self.validate_operand(rhs)?;
549            }
550
551            Rvalue::RawPtr(_, place) => {
552                // We accept `&raw *`, i.e., raw reborrows -- creating a raw pointer is
553                // no problem, only using it is.
554                if let Some((place_base, ProjectionElem::Deref)) = place.as_ref().last_projection()
555                {
556                    let base_ty = place_base.ty(self.body, self.tcx).ty;
557                    if let ty::Ref(..) = base_ty.kind() {
558                        return self.validate_place(place_base);
559                    }
560                }
561                return Err(Unpromotable);
562            }
563
564            Rvalue::Ref(_, kind, place) => {
565                // Special-case reborrows to be more like a copy of the reference.
566                let mut place_simplified = place.as_ref();
567                if let Some((place_base, ProjectionElem::Deref)) =
568                    place_simplified.last_projection()
569                {
570                    let base_ty = place_base.ty(self.body, self.tcx).ty;
571                    if let ty::Ref(..) = base_ty.kind() {
572                        place_simplified = place_base;
573                    }
574                }
575
576                self.validate_place(place_simplified)?;
577
578                // Check that the reference is fine (using the original place!).
579                // (Needs to come after `validate_place` to avoid ICEs.)
580                self.validate_ref(*kind, place)?;
581            }
582
583            Rvalue::Reborrow(_, _, place) => {
584                // FIXME(reborrow): should probably have a place_simplified like above.
585                let op = &Operand::Copy(*place);
586                self.validate_operand(op)?
587            }
588
589            Rvalue::Aggregate(_, operands) => {
590                for o in operands {
591                    self.validate_operand(o)?;
592                }
593            }
594        }
595
596        Ok(())
597    }
598
599    /// Computes the sets of blocks of this MIR that are definitely going to be executed
600    /// if the function returns successfully. That makes it safe to promote calls in them
601    /// that might fail.
602    fn promotion_safe_blocks(body: &mir::Body<'tcx>) -> FxHashSet<BasicBlock> {
603        let mut safe_blocks = FxHashSet::default();
604        let mut safe_block = START_BLOCK;
605        loop {
606            safe_blocks.insert(safe_block);
607            // Let's see if we can find another safe block.
608            safe_block = match body.basic_blocks[safe_block].terminator().kind {
609                TerminatorKind::Goto { target } => target,
610                TerminatorKind::Call { target: Some(target), .. }
611                | TerminatorKind::Drop { target, .. } => {
612                    // This calls a function or the destructor. `target` does not get executed if
613                    // the callee loops or panics. But in both cases the const already fails to
614                    // evaluate, so we are fine considering `target` a safe block for promotion.
615                    target
616                }
617                TerminatorKind::Assert { target, .. } => {
618                    // Similar to above, we only consider successful execution.
619                    target
620                }
621                _ => {
622                    // No next safe block.
623                    break;
624                }
625            };
626        }
627        safe_blocks
628    }
629
630    /// Returns whether the block is "safe" for promotion, which means it cannot be dead code.
631    /// We use this to avoid promoting operations that can fail in dead code.
632    fn is_promotion_safe_block(&mut self, block: BasicBlock) -> bool {
633        let body = self.body;
634        let safe_blocks =
635            self.promotion_safe_blocks.get_or_insert_with(|| Self::promotion_safe_blocks(body));
636        safe_blocks.contains(&block)
637    }
638
639    fn validate_call(
640        &mut self,
641        callee: &Operand<'tcx>,
642        args: &[Spanned<Operand<'tcx>>],
643        block: BasicBlock,
644    ) -> Result<(), Unpromotable> {
645        // Validate the operands. If they fail, there's no question -- we cannot promote.
646        self.validate_operand(callee)?;
647        for arg in args {
648            self.validate_operand(&arg.node)?;
649        }
650
651        // Functions marked `#[rustc_promotable]` are explicitly allowed to be promoted, so we can
652        // accept them at this point.
653        let fn_ty = callee.ty(self.body, self.tcx);
654        if let ty::FnDef(def_id, _) = *fn_ty.kind() {
655            if self.tcx.is_promotable_const_fn(def_id) {
656                return Ok(());
657            }
658        }
659
660        // Ideally, we'd stop here and reject the rest.
661        // But for backward compatibility, we have to accept some promotion in const/static
662        // initializers. Inline consts are explicitly excluded, they are more recent so we have no
663        // backwards compatibility reason to allow more promotion inside of them.
664        let promote_all_fn = matches!(
665            self.const_kind,
666            Some(hir::ConstContext::Static(_) | hir::ConstContext::Const { inline: false })
667        );
668        if !promote_all_fn {
669            return Err(Unpromotable);
670        }
671        // Make sure the callee is a `const fn`.
672        let is_const_fn = match *fn_ty.kind() {
673            ty::FnDef(def_id, _) => self.tcx.is_const_fn(def_id),
674            _ => false,
675        };
676        if !is_const_fn {
677            return Err(Unpromotable);
678        }
679        // The problem is, this may promote calls to functions that panic.
680        // We don't want to introduce compilation errors if there's a panic in a call in dead code.
681        // So we ensure that this is not dead code.
682        if !self.is_promotion_safe_block(block) {
683            return Err(Unpromotable);
684        }
685        // This passed all checks, so let's accept.
686        Ok(())
687    }
688
689    /// Can we try to evaluate a given constant at this point in compilation? Attempting to evaluate
690    /// a const block before borrow-checking will result in a query cycle (#150464).
691    fn should_evaluate_for_promotion_checks(&self, constant: Const<'tcx>) -> bool {
692        match constant {
693            // `Const::Ty` is always a `ConstKind::Param` right now and that can never be turned
694            // into a mir value for promotion
695            // FIXME(mgca): do we want uses of type_const to be normalized during promotion?
696            Const::Ty(..) => false,
697            Const::Val(..) => true,
698            // Evaluating a MIR constant requires borrow-checking it. For inline consts, as of
699            // #138499, this means borrow-checking its typeck root. Since borrow-checking the
700            // typeck root requires promoting its constants, trying to evaluate an inline const here
701            // will result in a query cycle. To avoid the cycle, we can't evaluate const blocks yet.
702            // Other kinds of unevaluated's can cause query cycles too when they arise from
703            // self-reference in user code; e.g. evaluating a constant can require evaluating a
704            // const function that uses that constant, again requiring evaluation of the constant.
705            // However, this form of cycle renders both the constant and function unusable in
706            // general, so we don't need to special-case it here.
707            Const::Unevaluated(uc, _) => self.tcx.def_kind(uc.def) != DefKind::InlineConst,
708        }
709    }
710}
711
712fn validate_candidates(
713    ccx: &ConstCx<'_, '_>,
714    temps: &mut IndexSlice<Local, TempState>,
715    mut candidates: Vec<Candidate>,
716) -> Vec<Candidate> {
717    let mut validator = Validator { ccx, temps, promotion_safe_blocks: None };
718
719    candidates.retain(|&candidate| validator.validate_candidate(candidate).is_ok());
720    candidates
721}
722
723struct Promoter<'a, 'tcx> {
724    tcx: TyCtxt<'tcx>,
725    source: &'a mut Body<'tcx>,
726    promoted: Body<'tcx>,
727    temps: &'a mut IndexVec<Local, TempState>,
728    extra_statements: &'a mut Vec<(Location, Statement<'tcx>)>,
729
730    /// Used to assemble the required_consts list while building the promoted.
731    required_consts: Vec<ConstOperand<'tcx>>,
732
733    /// If true, all nested temps are also kept in the
734    /// source MIR, not moved to the promoted MIR.
735    keep_original: bool,
736
737    /// If true, add the new const (the promoted) to the required_consts of the parent MIR.
738    /// This is initially false and then set by the visitor when it encounters a `Call` terminator.
739    add_to_required: bool,
740}
741
742impl<'a, 'tcx> Promoter<'a, 'tcx> {
743    fn new_block(&mut self) -> BasicBlock {
744        let span = self.promoted.span;
745        self.promoted.basic_blocks_mut().push(BasicBlockData::new(
746            Some(Terminator {
747                source_info: SourceInfo::outermost(span),
748                kind: TerminatorKind::Return,
749            }),
750            false,
751        ))
752    }
753
754    fn assign(&mut self, dest: Local, rvalue: Rvalue<'tcx>, span: Span) {
755        let last = self.promoted.basic_blocks.last_index().unwrap();
756        let data = &mut self.promoted[last];
757        data.statements.push(Statement::new(
758            SourceInfo::outermost(span),
759            StatementKind::Assign(Box::new((Place::from(dest), rvalue))),
760        ));
761    }
762
763    fn is_temp_kind(&self, local: Local) -> bool {
764        self.source.local_kind(local) == LocalKind::Temp
765    }
766
767    /// Copies the initialization of this temp to the
768    /// promoted MIR, recursing through temps.
769    fn promote_temp(&mut self, temp: Local) -> Local {
770        let old_keep_original = self.keep_original;
771        let loc = match self.temps[temp] {
772            TempState::Defined { location, uses, .. } if uses > 0 => {
773                if uses > 1 {
774                    self.keep_original = true;
775                }
776                location
777            }
778            state => {
779                span_bug!(self.promoted.span, "{:?} not promotable: {:?}", temp, state);
780            }
781        };
782        if !self.keep_original {
783            self.temps[temp] = TempState::PromotedOut;
784        }
785
786        let num_stmts = self.source[loc.block].statements.len();
787        let new_temp = self.promoted.local_decls.push(LocalDecl::new(
788            self.source.local_decls[temp].ty,
789            self.source.local_decls[temp].source_info.span,
790        ));
791
792        debug!("promote({:?} @ {:?}/{:?}, {:?})", temp, loc, num_stmts, self.keep_original);
793
794        // First, take the Rvalue or Call out of the source MIR,
795        // or duplicate it, depending on keep_original.
796        if loc.statement_index < num_stmts {
797            let (mut rvalue, source_info) = {
798                let statement = &mut self.source[loc.block].statements[loc.statement_index];
799                let StatementKind::Assign(box (_, rhs)) = &mut statement.kind else {
800                    span_bug!(statement.source_info.span, "{:?} is not an assignment", statement);
801                };
802
803                (
804                    if self.keep_original {
805                        rhs.clone()
806                    } else {
807                        let unit = Rvalue::Use(
808                            Operand::Constant(Box::new(ConstOperand {
809                                span: statement.source_info.span,
810                                user_ty: None,
811                                const_: Const::zero_sized(self.tcx.types.unit),
812                            })),
813                            WithRetag::Yes,
814                        );
815                        mem::replace(rhs, unit)
816                    },
817                    statement.source_info,
818                )
819            };
820
821            self.visit_rvalue(&mut rvalue, loc);
822            self.assign(new_temp, rvalue, source_info.span);
823        } else {
824            let terminator = if self.keep_original {
825                self.source[loc.block].terminator().clone()
826            } else {
827                let terminator = self.source[loc.block].terminator_mut();
828                let target = match &terminator.kind {
829                    TerminatorKind::Call { target: Some(target), .. } => *target,
830                    kind => {
831                        span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
832                    }
833                };
834                Terminator {
835                    source_info: terminator.source_info,
836                    kind: mem::replace(&mut terminator.kind, TerminatorKind::Goto { target }),
837                }
838            };
839
840            match terminator.kind {
841                TerminatorKind::Call {
842                    mut func, mut args, call_source: desugar, fn_span, ..
843                } => {
844                    // This promoted involves a function call, so it may fail to evaluate. Let's
845                    // make sure it is added to `required_consts` so that failure cannot get lost.
846                    self.add_to_required = true;
847
848                    self.visit_operand(&mut func, loc);
849                    for arg in &mut args {
850                        self.visit_operand(&mut arg.node, loc);
851                    }
852
853                    let last = self.promoted.basic_blocks.last_index().unwrap();
854                    let new_target = self.new_block();
855
856                    *self.promoted[last].terminator_mut() = Terminator {
857                        kind: TerminatorKind::Call {
858                            func,
859                            args,
860                            unwind: UnwindAction::Continue,
861                            destination: Place::from(new_temp),
862                            target: Some(new_target),
863                            call_source: desugar,
864                            fn_span,
865                        },
866                        source_info: SourceInfo::outermost(terminator.source_info.span),
867                        ..terminator
868                    };
869                }
870                kind => {
871                    span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
872                }
873            };
874        };
875
876        self.keep_original = old_keep_original;
877        new_temp
878    }
879
880    fn promote_candidate(
881        mut self,
882        candidate: Candidate,
883        next_promoted_index: Promoted,
884    ) -> Body<'tcx> {
885        let def = self.source.source.def_id();
886        let (mut rvalue, promoted_op) = {
887            let promoted = &mut self.promoted;
888            let tcx = self.tcx;
889            let mut promoted_operand = |ty, span| {
890                promoted.span = span;
891                promoted.local_decls[RETURN_PLACE] = LocalDecl::new(ty, span);
892                let args =
893                    tcx.erase_and_anonymize_regions(GenericArgs::identity_for_item(tcx, def));
894                let uneval =
895                    mir::UnevaluatedConst { def, args, promoted: Some(next_promoted_index) };
896
897                ConstOperand { span, user_ty: None, const_: Const::Unevaluated(uneval, ty) }
898            };
899
900            let blocks = self.source.basic_blocks.as_mut();
901            let local_decls = &mut self.source.local_decls;
902            let loc = candidate.location;
903            let statement = &mut blocks[loc.block].statements[loc.statement_index];
904            let StatementKind::Assign(box (_, Rvalue::Ref(region, borrow_kind, place))) =
905                &mut statement.kind
906            else {
907                bug!()
908            };
909
910            // Use the underlying local for this (necessarily interior) borrow.
911            debug_assert!(region.is_erased());
912            let ty = local_decls[place.local].ty;
913            let span = statement.source_info.span;
914
915            let ref_ty =
916                Ty::new_ref(tcx, tcx.lifetimes.re_erased, ty, borrow_kind.to_mutbl_lossy());
917
918            let mut projection = vec![PlaceElem::Deref];
919            projection.extend(place.projection);
920            place.projection = tcx.mk_place_elems(&projection);
921
922            // Create a temp to hold the promoted reference.
923            // This is because `*r` requires `r` to be a local,
924            // otherwise we would use the `promoted` directly.
925            let mut promoted_ref = LocalDecl::new(ref_ty, span);
926            promoted_ref.source_info = statement.source_info;
927            let promoted_ref = local_decls.push(promoted_ref);
928            assert_eq!(self.temps.push(TempState::Unpromotable), promoted_ref);
929
930            let promoted_operand = promoted_operand(ref_ty, span);
931            let promoted_ref_statement = Statement::new(
932                statement.source_info,
933                StatementKind::Assign(Box::new((
934                    Place::from(promoted_ref),
935                    // We can retag here because we wouldn't promote non-retagged values (they get
936                    // rejected in validate_rvalue).
937                    Rvalue::Use(Operand::Constant(Box::new(promoted_operand)), WithRetag::Yes),
938                ))),
939            );
940            self.extra_statements.push((loc, promoted_ref_statement));
941
942            (
943                Rvalue::Ref(
944                    tcx.lifetimes.re_erased,
945                    *borrow_kind,
946                    Place {
947                        local: mem::replace(&mut place.local, promoted_ref),
948                        projection: List::empty(),
949                    },
950                ),
951                promoted_operand,
952            )
953        };
954
955        assert_eq!(self.new_block(), START_BLOCK);
956        self.visit_rvalue(
957            &mut rvalue,
958            Location { block: START_BLOCK, statement_index: usize::MAX },
959        );
960
961        let span = self.promoted.span;
962        self.assign(RETURN_PLACE, rvalue, span);
963
964        // Now that we did promotion, we know whether we'll want to add this to `required_consts` of
965        // the surrounding MIR body.
966        if self.add_to_required {
967            self.source.required_consts.as_mut().unwrap().push(promoted_op);
968        }
969
970        self.promoted.set_required_consts(self.required_consts);
971
972        self.promoted
973    }
974}
975
976/// Replaces all temporaries with their promoted counterparts.
977impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> {
978    fn tcx(&self) -> TyCtxt<'tcx> {
979        self.tcx
980    }
981
982    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
983        if self.is_temp_kind(*local) {
984            *local = self.promote_temp(*local);
985        }
986    }
987
988    fn visit_const_operand(&mut self, constant: &mut ConstOperand<'tcx>, _location: Location) {
989        if constant.const_.is_required_const() {
990            self.required_consts.push(*constant);
991        }
992
993        // Skipping `super_constant` as the visitor is otherwise only looking for locals.
994    }
995}
996
997fn promote_candidates<'tcx>(
998    body: &mut Body<'tcx>,
999    tcx: TyCtxt<'tcx>,
1000    mut temps: IndexVec<Local, TempState>,
1001    candidates: Vec<Candidate>,
1002) -> IndexVec<Promoted, Body<'tcx>> {
1003    // Visit candidates in reverse, in case they're nested.
1004    debug!(promote_candidates = ?candidates);
1005
1006    // eagerly fail fast
1007    if candidates.is_empty() {
1008        return IndexVec::new();
1009    }
1010
1011    let mut promotions = IndexVec::new();
1012
1013    let mut extra_statements = vec![];
1014    for candidate in candidates.into_iter().rev() {
1015        let Location { block, statement_index } = candidate.location;
1016        if let StatementKind::Assign(box (place, _)) = &body[block].statements[statement_index].kind
1017            && let Some(local) = place.as_local()
1018        {
1019            if temps[local] == TempState::PromotedOut {
1020                // Already promoted.
1021                continue;
1022            }
1023        }
1024
1025        // Declare return place local so that `mir::Body::new` doesn't complain.
1026        let initial_locals = iter::once(LocalDecl::new(tcx.types.never, body.span)).collect();
1027
1028        let mut scope = body.source_scopes[body.source_info(candidate.location).scope].clone();
1029        scope.parent_scope = None;
1030
1031        let mut promoted = Body::new(
1032            body.source, // `promoted` gets filled in below
1033            IndexVec::new(),
1034            IndexVec::from_elem_n(scope, 1),
1035            initial_locals,
1036            IndexVec::new(),
1037            0,
1038            vec![],
1039            body.span,
1040            None,
1041            body.tainted_by_errors,
1042        );
1043        promoted.phase = MirPhase::Analysis(AnalysisPhase::Initial);
1044
1045        let promoter = Promoter {
1046            promoted,
1047            tcx,
1048            source: body,
1049            temps: &mut temps,
1050            extra_statements: &mut extra_statements,
1051            keep_original: false,
1052            add_to_required: false,
1053            required_consts: Vec::new(),
1054        };
1055
1056        let mut promoted = promoter.promote_candidate(candidate, promotions.next_index());
1057        promoted.source.promoted = Some(promotions.next_index());
1058        promotions.push(promoted);
1059    }
1060
1061    // Insert each of `extra_statements` before its indicated location, which
1062    // has to be done in reverse location order, to not invalidate the rest.
1063    extra_statements.sort_by_key(|&(loc, _)| cmp::Reverse(loc));
1064    for (loc, statement) in extra_statements {
1065        body[loc.block].statements.insert(loc.statement_index, statement);
1066    }
1067
1068    // Eliminate assignments to, and drops of promoted temps.
1069    let promoted = |index: Local| temps[index] == TempState::PromotedOut;
1070    for block in body.basic_blocks_mut() {
1071        block.retain_statements(|statement| match &statement.kind {
1072            StatementKind::Assign(box (place, _)) => {
1073                if let Some(index) = place.as_local() {
1074                    !promoted(index)
1075                } else {
1076                    true
1077                }
1078            }
1079            StatementKind::StorageLive(index) | StatementKind::StorageDead(index) => {
1080                !promoted(*index)
1081            }
1082            _ => true,
1083        });
1084        let terminator = block.terminator_mut();
1085        if let TerminatorKind::Drop { place, target, .. } = &terminator.kind
1086            && let Some(index) = place.as_local()
1087        {
1088            if promoted(index) {
1089                terminator.kind = TerminatorKind::Goto { target: *target };
1090            }
1091        }
1092    }
1093
1094    promotions
1095}