Skip to main content

rustc_mir_transform/
promote_consts.rs

1//! A pass that promotes borrows of constant rvalues.
2//!
3//! The rvalues considered constant are trees of temps, each with exactly one
4//! initialization, and holding a constant value with no interior mutability.
5//! They are placed into a new MIR constant body in `promoted` and the borrow
6//! rvalue is replaced with a `Literal::Promoted` using the index into
7//! `promoted` of that constant MIR.
8//!
9//! This pass assumes that every use is dominated by an initialization and can
10//! otherwise silence errors, if move analysis runs after promotion on broken
11//! MIR.
12
13use std::cell::Cell;
14use std::{cmp, iter, mem};
15
16use either::{Left, Right};
17use rustc_const_eval::check_consts::{ConstCx, qualifs};
18use rustc_data_structures::assert_matches;
19use rustc_data_structures::fx::FxHashSet;
20use rustc_hir as hir;
21use rustc_hir::def::DefKind;
22use rustc_index::{IndexSlice, IndexVec};
23use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
24use rustc_middle::mir::*;
25use rustc_middle::ty::{self, GenericArgs, List, Ty, TyCtxt, TypeVisitableExt};
26use rustc_middle::{bug, mir, span_bug};
27use rustc_span::Span;
28use rustc_span::source_map::Spanned;
29use tracing::{debug, instrument};
30
31/// A `MirPass` for promotion.
32///
33/// Promotion is the extraction of promotable temps into separate MIR bodies so they can have
34/// `'static` lifetime.
35///
36/// After this pass is run, `promoted_fragments` will hold the MIR body corresponding to each
37/// newly created `Constant`.
38#[derive(Default)]
39pub(super) struct PromoteTemps<'tcx> {
40    // Must use `Cell` because `run_pass` takes `&self`, not `&mut self`.
41    pub promoted_fragments: Cell<IndexVec<Promoted, Body<'tcx>>>,
42}
43
44impl<'tcx> crate::MirPass<'tcx> for PromoteTemps<'tcx> {
45    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
46        // There's not really any point in promoting errorful MIR.
47        //
48        // This does not include MIR that failed const-checking, which we still try to promote.
49        if let Err(_) = body.return_ty().error_reported() {
50            debug!("PromoteTemps: MIR had errors");
51            return;
52        }
53        if body.source.promoted.is_some() {
54            return;
55        }
56
57        let ccx = ConstCx::new(tcx, body);
58        let (mut temps, all_candidates) = collect_temps_and_candidates(&ccx);
59
60        let promotable_candidates = validate_candidates(&ccx, &mut temps, all_candidates);
61
62        let promoted = promote_candidates(body, tcx, temps, promotable_candidates);
63        self.promoted_fragments.set(promoted);
64    }
65
66    fn is_required(&self) -> bool {
67        true
68    }
69}
70
71/// State of a temporary during collection and promotion.
72#[derive(Copy, Clone, PartialEq, Eq, Debug)]
73enum TempState {
74    /// No references to this temp.
75    Undefined,
76    /// One direct assignment and any number of direct uses.
77    /// A borrow of this temp is promotable if the assigned
78    /// value is qualified as constant.
79    Defined { location: Location, uses: usize, valid: Result<(), ()> },
80    /// Any other combination of assignments/uses.
81    Unpromotable,
82    /// This temp was part of an rvalue which got extracted
83    /// during promotion and needs cleanup.
84    PromotedOut,
85}
86
87/// A "root candidate" for promotion, which will become the
88/// returned value in a promoted MIR, unless it's a subset
89/// of a larger candidate.
90#[derive(Copy, Clone, PartialEq, Eq, Debug)]
91struct Candidate {
92    location: Location,
93}
94
95struct Collector<'a, 'tcx> {
96    ccx: &'a ConstCx<'a, 'tcx>,
97    temps: IndexVec<Local, TempState>,
98    candidates: Vec<Candidate>,
99}
100
101impl<'tcx> Visitor<'tcx> for Collector<'_, 'tcx> {
102    #[instrument(level = "debug", skip(self))]
103    fn visit_local(&mut self, index: Local, context: PlaceContext, location: Location) {
104        // We're only interested in temporaries and the return place
105        match self.ccx.body.local_kind(index) {
106            LocalKind::Arg => return,
107            LocalKind::Temp if self.ccx.body.local_decls[index].is_user_variable() => return,
108            LocalKind::ReturnPointer | LocalKind::Temp => {}
109        }
110
111        // Ignore drops, if the temp gets promoted,
112        // then it's constant and thus drop is noop.
113        // Non-uses are also irrelevant.
114        if context.is_drop() || !context.is_use() {
115            debug!(is_drop = context.is_drop(), is_use = context.is_use());
116            return;
117        }
118
119        let temp = &mut self.temps[index];
120        debug!(?temp);
121        *temp = match *temp {
122            TempState::Undefined => match context {
123                PlaceContext::MutatingUse(MutatingUseContext::Store | MutatingUseContext::Call) => {
124                    TempState::Defined { location, uses: 0, valid: Err(()) }
125                }
126                _ => TempState::Unpromotable,
127            },
128            TempState::Defined { ref mut uses, .. } => {
129                // We always allow borrows, even mutable ones, as we need
130                // to promote mutable borrows of some ZSTs e.g., `&mut []`.
131                let allowed_use = match context {
132                    PlaceContext::MutatingUse(MutatingUseContext::Borrow)
133                    | PlaceContext::NonMutatingUse(_) => true,
134                    PlaceContext::MutatingUse(_) | PlaceContext::NonUse(_) => false,
135                };
136                debug!(?allowed_use);
137                if allowed_use {
138                    *uses += 1;
139                    return;
140                }
141                TempState::Unpromotable
142            }
143            TempState::Unpromotable | TempState::PromotedOut => TempState::Unpromotable,
144        };
145        debug!(?temp);
146    }
147
148    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
149        self.super_rvalue(rvalue, location);
150
151        if let Rvalue::Ref(..) = *rvalue {
152            self.candidates.push(Candidate { location });
153        }
154    }
155}
156
157fn collect_temps_and_candidates<'tcx>(
158    ccx: &ConstCx<'_, 'tcx>,
159) -> (IndexVec<Local, TempState>, Vec<Candidate>) {
160    let mut collector = Collector {
161        temps: IndexVec::from_elem(TempState::Undefined, &ccx.body.local_decls),
162        candidates: vec![],
163        ccx,
164    };
165    for (bb, data) in traversal::reverse_postorder(ccx.body) {
166        collector.visit_basic_block_data(bb, data);
167    }
168    (collector.temps, collector.candidates)
169}
170
171/// Checks whether locals that appear in a promotion context (`Candidate`) are actually promotable.
172///
173/// This wraps an `Item`, and has access to all fields of that `Item` via `Deref` coercion.
174struct Validator<'a, 'tcx> {
175    ccx: &'a ConstCx<'a, 'tcx>,
176    temps: &'a mut IndexSlice<Local, TempState>,
177    /// For backwards compatibility, we are promoting function calls in `const`/`static`
178    /// initializers. But we want to avoid evaluating code that might panic and that otherwise would
179    /// not have been evaluated, so we only promote such calls in basic blocks that are guaranteed
180    /// to execute. In other words, we only promote such calls in basic blocks that are definitely
181    /// not dead code. Here we cache the result of computing that set of basic blocks.
182    promotion_safe_blocks: Option<FxHashSet<BasicBlock>>,
183}
184
185impl<'a, 'tcx> std::ops::Deref for Validator<'a, 'tcx> {
186    type Target = ConstCx<'a, 'tcx>;
187
188    fn deref(&self) -> &Self::Target {
189        self.ccx
190    }
191}
192
193struct Unpromotable;
194
195impl<'tcx> Validator<'_, 'tcx> {
196    fn validate_candidate(&mut self, candidate: Candidate) -> Result<(), Unpromotable> {
197        let Left(statement) = self.body.stmt_at(candidate.location) else { bug!() };
198        let Some((_, Rvalue::Ref(_, kind, place))) = statement.kind.as_assign() else { bug!() };
199
200        // We can only promote interior borrows of promotable temps (non-temps
201        // don't get promoted anyway).
202        self.validate_local(place.local)?;
203
204        // The reference operation itself must be promotable.
205        // (Needs to come after `validate_local` to avoid ICEs.)
206        self.validate_ref(*kind, place)?;
207
208        // We do not check all the projections (they do not get promoted anyway),
209        // but we do stay away from promoting anything involving a dereference.
210        if place.projection.contains(&ProjectionElem::Deref) {
211            return Err(Unpromotable);
212        }
213
214        Ok(())
215    }
216
217    // FIXME(eddyb) maybe cache this?
218    fn qualif_local<Q: qualifs::Qualif>(&mut self, local: Local) -> bool {
219        let TempState::Defined { location: loc, .. } = self.temps[local] else {
220            return false;
221        };
222
223        let stmt_or_term = self.body.stmt_at(loc);
224        match stmt_or_term {
225            Left(statement) => {
226                let Some((_, rhs)) = statement.kind.as_assign() else {
227                    span_bug!(statement.source_info.span, "{:?} is not an assignment", statement)
228                };
229                qualifs::in_rvalue::<Q, _>(self.ccx, &mut |l| self.qualif_local::<Q>(l), rhs)
230            }
231            Right(terminator) => {
232                assert_matches!(terminator.kind, TerminatorKind::Call { .. });
233                let return_ty = self.body.local_decls[local].ty;
234                Q::in_any_value_of_ty(self.ccx, return_ty)
235            }
236        }
237    }
238
239    fn validate_local(&mut self, local: Local) -> Result<(), Unpromotable> {
240        let TempState::Defined { location: loc, uses, valid } = self.temps[local] else {
241            return Err(Unpromotable);
242        };
243
244        // We cannot promote things that need dropping, since the promoted value would not get
245        // dropped.
246        if self.qualif_local::<qualifs::NeedsDrop>(local) {
247            return Err(Unpromotable);
248        }
249
250        if valid.is_ok() {
251            return Ok(());
252        }
253
254        let ok = {
255            let stmt_or_term = self.body.stmt_at(loc);
256            match stmt_or_term {
257                Left(statement) => {
258                    let Some((_, rhs)) = statement.kind.as_assign() else {
259                        span_bug!(
260                            statement.source_info.span,
261                            "{:?} is not an assignment",
262                            statement
263                        )
264                    };
265                    self.validate_rvalue(rhs)
266                }
267                Right(terminator) => match &terminator.kind {
268                    TerminatorKind::Call { func, args, .. } => {
269                        self.validate_call(func, args, loc.block)
270                    }
271                    TerminatorKind::Yield { .. } => Err(Unpromotable),
272                    kind => {
273                        span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
274                    }
275                },
276            }
277        };
278
279        self.temps[local] = match ok {
280            Ok(()) => TempState::Defined { location: loc, uses, valid: Ok(()) },
281            Err(_) => TempState::Unpromotable,
282        };
283
284        ok
285    }
286
287    fn validate_place(&mut self, place: PlaceRef<'tcx>) -> Result<(), Unpromotable> {
288        let Some((place_base, elem)) = place.last_projection() else {
289            return self.validate_local(place.local);
290        };
291
292        // Validate topmost projection, then recurse.
293        match elem {
294            // Recurse directly.
295            ProjectionElem::ConstantIndex { .. }
296            | ProjectionElem::Subslice { .. }
297            | ProjectionElem::UnwrapUnsafeBinder(_) => {}
298
299            // Never recurse.
300            ProjectionElem::OpaqueCast(..) | ProjectionElem::Downcast(..) => {
301                return Err(Unpromotable);
302            }
303
304            ProjectionElem::Deref => {
305                // When a static is used by-value, that gets desugared to `*STATIC_ADDR`,
306                // and we need to be able to promote this. So check if this deref matches
307                // that specific pattern.
308
309                // We need to make sure this is a `Deref` of a local with no further projections.
310                // Discussion can be found at
311                // https://github.com/rust-lang/rust/pull/74945#discussion_r463063247
312                if let Some(local) = place_base.as_local()
313                    && let TempState::Defined { location, .. } = self.temps[local]
314                    && let Left(def_stmt) = self.body.stmt_at(location)
315                    && let Some((_, Rvalue::Use(Operand::Constant(c)))) = def_stmt.kind.as_assign()
316                    && let Some(did) = c.check_static_ptr(self.tcx)
317                    // Evaluating a promoted may not read statics except if it got
318                    // promoted from a static (this is a CTFE check). So we
319                    // can only promote static accesses inside statics.
320                    && let Some(hir::ConstContext::Static(..)) = self.const_kind
321                    && !self.tcx.is_thread_local_static(did)
322                {
323                    // Recurse.
324                } else {
325                    return Err(Unpromotable);
326                }
327            }
328            ProjectionElem::Index(local) => {
329                // Only accept if we can predict the index and are indexing an array.
330                if let TempState::Defined { location: loc, .. } = self.temps[local]
331                    && let Left(statement) =  self.body.stmt_at(loc)
332                    && let Some((_, Rvalue::Use(Operand::Constant(c)))) = statement.kind.as_assign()
333                    && self.should_evaluate_for_promotion_checks(c.const_)
334                    && let Some(idx) = c.const_.try_eval_target_usize(self.tcx, self.typing_env)
335                    // Determine the type of the thing we are indexing.
336                    && let ty::Array(_, len) = place_base.ty(self.body, self.tcx).ty.kind()
337                    // It's an array; determine its length.
338                    && let Some(len) = len.try_to_target_usize(self.tcx)
339                    // If the index is in-bounds, go ahead.
340                    && idx < len
341                {
342                    self.validate_local(local)?;
343                    // Recurse.
344                } else {
345                    return Err(Unpromotable);
346                }
347            }
348
349            ProjectionElem::Field(..) => {
350                let base_ty = place_base.ty(self.body, self.tcx).ty;
351                if base_ty.is_union() {
352                    // No promotion of union field accesses.
353                    return Err(Unpromotable);
354                }
355            }
356        }
357
358        self.validate_place(place_base)
359    }
360
361    fn validate_operand(&mut self, operand: &Operand<'tcx>) -> Result<(), Unpromotable> {
362        match operand {
363            Operand::Copy(place) | Operand::Move(place) => self.validate_place(place.as_ref()),
364
365            // `RuntimeChecks` behaves different in const-eval and runtime MIR,
366            // so we do not promote it.
367            Operand::RuntimeChecks(_) => Err(Unpromotable),
368
369            // The qualifs for a constant (e.g. `HasMutInterior`) are checked in
370            // `validate_rvalue` upon access.
371            Operand::Constant(c) => {
372                if let Some(def_id) = c.check_static_ptr(self.tcx) {
373                    // Only allow statics (not consts) to refer to other statics.
374                    // FIXME(eddyb) does this matter at all for promotion?
375                    // FIXME(RalfJung) it makes little sense to not promote this in `fn`/`const fn`,
376                    // and in `const` this cannot occur anyway. The only concern is that we might
377                    // promote even `let x = &STATIC` which would be useless, but this applies to
378                    // promotion inside statics as well.
379                    let is_static = matches!(self.const_kind, Some(hir::ConstContext::Static(_)));
380                    if !is_static {
381                        return Err(Unpromotable);
382                    }
383
384                    let is_thread_local = self.tcx.is_thread_local_static(def_id);
385                    if is_thread_local {
386                        return Err(Unpromotable);
387                    }
388                }
389
390                Ok(())
391            }
392        }
393    }
394
395    fn validate_ref(&mut self, kind: BorrowKind, place: &Place<'tcx>) -> Result<(), Unpromotable> {
396        match kind {
397            // Reject these borrow types just to be safe.
398            // FIXME(RalfJung): could we allow them? Should we? No point in it until we have a
399            // usecase.
400            BorrowKind::Fake(_) | BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture } => {
401                return Err(Unpromotable);
402            }
403
404            BorrowKind::Shared => {
405                let has_mut_interior = self.qualif_local::<qualifs::HasMutInterior>(place.local);
406                if has_mut_interior {
407                    return Err(Unpromotable);
408                }
409            }
410
411            // FIXME: consider changing this to only promote &mut [] for default borrows,
412            // also forbidding two phase borrows
413            BorrowKind::Mut { kind: MutBorrowKind::Default | MutBorrowKind::TwoPhaseBorrow } => {
414                let ty = place.ty(self.body, self.tcx).ty;
415
416                // In theory, any zero-sized value could be borrowed
417                // mutably without consequences. However, only &mut []
418                // is allowed right now.
419                let ty::Array(_, len) = ty.kind() else { return Err(Unpromotable) };
420                let Some(0) = len.try_to_target_usize(self.tcx) else { return Err(Unpromotable) };
421            }
422        }
423
424        Ok(())
425    }
426
427    fn validate_rvalue(&mut self, rvalue: &Rvalue<'tcx>) -> Result<(), Unpromotable> {
428        match rvalue {
429            Rvalue::Use(operand)
430            | Rvalue::Repeat(operand, _)
431            | Rvalue::WrapUnsafeBinder(operand, _) => {
432                self.validate_operand(operand)?;
433            }
434            Rvalue::CopyForDeref(place) => {
435                let op = &Operand::Copy(*place);
436                self.validate_operand(op)?
437            }
438
439            Rvalue::Discriminant(place) => self.validate_place(place.as_ref())?,
440
441            Rvalue::ThreadLocalRef(_) => return Err(Unpromotable),
442
443            // ptr-to-int casts are not possible in consts and thus not promotable
444            Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => return Err(Unpromotable),
445
446            // all other casts including int-to-ptr casts are fine, they just use the integer value
447            // at pointer type.
448            Rvalue::Cast(_, operand, _) => {
449                self.validate_operand(operand)?;
450            }
451
452            Rvalue::ShallowInitBox(_, _) => return Err(Unpromotable),
453
454            Rvalue::UnaryOp(op, operand) => {
455                match op {
456                    // These operations can never fail.
457                    UnOp::Neg | UnOp::Not | UnOp::PtrMetadata => {}
458                }
459
460                self.validate_operand(operand)?;
461            }
462
463            Rvalue::BinaryOp(op, box (lhs, rhs)) => {
464                let op = *op;
465                let lhs_ty = lhs.ty(self.body, self.tcx);
466
467                if let ty::RawPtr(_, _) | ty::FnPtr(..) = lhs_ty.kind() {
468                    // Raw and fn pointer operations are not allowed inside consts and thus not
469                    // promotable.
470                    assert_matches!(
471                        op,
472                        BinOp::Eq
473                            | BinOp::Ne
474                            | BinOp::Le
475                            | BinOp::Lt
476                            | BinOp::Ge
477                            | BinOp::Gt
478                            | BinOp::Offset
479                    );
480                    return Err(Unpromotable);
481                }
482
483                match op {
484                    BinOp::Div | BinOp::Rem => {
485                        if lhs_ty.is_integral() {
486                            let sz = lhs_ty.primitive_size(self.tcx);
487                            // Integer division: the RHS must be a non-zero const.
488                            let rhs_val = if let Operand::Constant(rhs_c) = rhs
489                                && self.should_evaluate_for_promotion_checks(rhs_c.const_)
490                                && let Some(rhs_val) =
491                                    rhs_c.const_.try_eval_scalar_int(self.tcx, self.typing_env)
492                                // for the zero test, int vs uint does not matter
493                                && rhs_val.to_uint(sz) != 0
494                            {
495                                rhs_val
496                            } else {
497                                // value not known or 0 -- not okay
498                                return Err(Unpromotable);
499                            };
500                            // Furthermore, for signed division, we also have to exclude `int::MIN /
501                            // -1`.
502                            if lhs_ty.is_signed() && rhs_val.to_int(sz) == -1 {
503                                // The RHS is -1, so we have to be careful. But is the LHS int::MIN?
504                                if let Operand::Constant(lhs_c) = lhs
505                                    && self.should_evaluate_for_promotion_checks(lhs_c.const_)
506                                    && let Some(lhs_val) =
507                                        lhs_c.const_.try_eval_scalar_int(self.tcx, self.typing_env)
508                                    && let lhs_min = sz.signed_int_min()
509                                    && lhs_val.to_int(sz) != lhs_min
510                                {
511                                    // okay
512                                } else {
513                                    // value not known or int::MIN -- not okay
514                                    return Err(Unpromotable);
515                                }
516                            }
517                        }
518                    }
519                    // The remaining operations can never fail.
520                    BinOp::Eq
521                    | BinOp::Ne
522                    | BinOp::Le
523                    | BinOp::Lt
524                    | BinOp::Ge
525                    | BinOp::Gt
526                    | BinOp::Cmp
527                    | BinOp::Offset
528                    | BinOp::Add
529                    | BinOp::AddUnchecked
530                    | BinOp::AddWithOverflow
531                    | BinOp::Sub
532                    | BinOp::SubUnchecked
533                    | BinOp::SubWithOverflow
534                    | BinOp::Mul
535                    | BinOp::MulUnchecked
536                    | BinOp::MulWithOverflow
537                    | BinOp::BitXor
538                    | BinOp::BitAnd
539                    | BinOp::BitOr
540                    | BinOp::Shl
541                    | BinOp::ShlUnchecked
542                    | BinOp::Shr
543                    | BinOp::ShrUnchecked => {}
544                }
545
546                self.validate_operand(lhs)?;
547                self.validate_operand(rhs)?;
548            }
549
550            Rvalue::RawPtr(_, place) => {
551                // We accept `&raw *`, i.e., raw reborrows -- creating a raw pointer is
552                // no problem, only using it is.
553                if let Some((place_base, ProjectionElem::Deref)) = place.as_ref().last_projection()
554                {
555                    let base_ty = place_base.ty(self.body, self.tcx).ty;
556                    if let ty::Ref(..) = base_ty.kind() {
557                        return self.validate_place(place_base);
558                    }
559                }
560                return Err(Unpromotable);
561            }
562
563            Rvalue::Ref(_, kind, place) => {
564                // Special-case reborrows to be more like a copy of the reference.
565                let mut place_simplified = place.as_ref();
566                if let Some((place_base, ProjectionElem::Deref)) =
567                    place_simplified.last_projection()
568                {
569                    let base_ty = place_base.ty(self.body, self.tcx).ty;
570                    if let ty::Ref(..) = base_ty.kind() {
571                        place_simplified = place_base;
572                    }
573                }
574
575                self.validate_place(place_simplified)?;
576
577                // Check that the reference is fine (using the original place!).
578                // (Needs to come after `validate_place` to avoid ICEs.)
579                self.validate_ref(*kind, place)?;
580            }
581
582            Rvalue::Aggregate(_, operands) => {
583                for o in operands {
584                    self.validate_operand(o)?;
585                }
586            }
587        }
588
589        Ok(())
590    }
591
592    /// Computes the sets of blocks of this MIR that are definitely going to be executed
593    /// if the function returns successfully. That makes it safe to promote calls in them
594    /// that might fail.
595    fn promotion_safe_blocks(body: &mir::Body<'tcx>) -> FxHashSet<BasicBlock> {
596        let mut safe_blocks = FxHashSet::default();
597        let mut safe_block = START_BLOCK;
598        loop {
599            safe_blocks.insert(safe_block);
600            // Let's see if we can find another safe block.
601            safe_block = match body.basic_blocks[safe_block].terminator().kind {
602                TerminatorKind::Goto { target } => target,
603                TerminatorKind::Call { target: Some(target), .. }
604                | TerminatorKind::Drop { target, .. } => {
605                    // This calls a function or the destructor. `target` does not get executed if
606                    // the callee loops or panics. But in both cases the const already fails to
607                    // evaluate, so we are fine considering `target` a safe block for promotion.
608                    target
609                }
610                TerminatorKind::Assert { target, .. } => {
611                    // Similar to above, we only consider successful execution.
612                    target
613                }
614                _ => {
615                    // No next safe block.
616                    break;
617                }
618            };
619        }
620        safe_blocks
621    }
622
623    /// Returns whether the block is "safe" for promotion, which means it cannot be dead code.
624    /// We use this to avoid promoting operations that can fail in dead code.
625    fn is_promotion_safe_block(&mut self, block: BasicBlock) -> bool {
626        let body = self.body;
627        let safe_blocks =
628            self.promotion_safe_blocks.get_or_insert_with(|| Self::promotion_safe_blocks(body));
629        safe_blocks.contains(&block)
630    }
631
632    fn validate_call(
633        &mut self,
634        callee: &Operand<'tcx>,
635        args: &[Spanned<Operand<'tcx>>],
636        block: BasicBlock,
637    ) -> Result<(), Unpromotable> {
638        // Validate the operands. If they fail, there's no question -- we cannot promote.
639        self.validate_operand(callee)?;
640        for arg in args {
641            self.validate_operand(&arg.node)?;
642        }
643
644        // Functions marked `#[rustc_promotable]` are explicitly allowed to be promoted, so we can
645        // accept them at this point.
646        let fn_ty = callee.ty(self.body, self.tcx);
647        if let ty::FnDef(def_id, _) = *fn_ty.kind() {
648            if self.tcx.is_promotable_const_fn(def_id) {
649                return Ok(());
650            }
651        }
652
653        // Ideally, we'd stop here and reject the rest.
654        // But for backward compatibility, we have to accept some promotion in const/static
655        // initializers. Inline consts are explicitly excluded, they are more recent so we have no
656        // backwards compatibility reason to allow more promotion inside of them.
657        let promote_all_fn = matches!(
658            self.const_kind,
659            Some(hir::ConstContext::Static(_) | hir::ConstContext::Const { inline: false })
660        );
661        if !promote_all_fn {
662            return Err(Unpromotable);
663        }
664        // Make sure the callee is a `const fn`.
665        let is_const_fn = match *fn_ty.kind() {
666            ty::FnDef(def_id, _) => self.tcx.is_const_fn(def_id),
667            _ => false,
668        };
669        if !is_const_fn {
670            return Err(Unpromotable);
671        }
672        // The problem is, this may promote calls to functions that panic.
673        // We don't want to introduce compilation errors if there's a panic in a call in dead code.
674        // So we ensure that this is not dead code.
675        if !self.is_promotion_safe_block(block) {
676            return Err(Unpromotable);
677        }
678        // This passed all checks, so let's accept.
679        Ok(())
680    }
681
682    /// Can we try to evaluate a given constant at this point in compilation? Attempting to evaluate
683    /// a const block before borrow-checking will result in a query cycle (#150464).
684    fn should_evaluate_for_promotion_checks(&self, constant: Const<'tcx>) -> bool {
685        match constant {
686            // `Const::Ty` is always a `ConstKind::Param` right now and that can never be turned
687            // into a mir value for promotion
688            // FIXME(mgca): do we want uses of type_const to be normalized during promotion?
689            Const::Ty(..) => false,
690            Const::Val(..) => true,
691            // Evaluating a MIR constant requires borrow-checking it. For inline consts, as of
692            // #138499, this means borrow-checking its typeck root. Since borrow-checking the
693            // typeck root requires promoting its constants, trying to evaluate an inline const here
694            // will result in a query cycle. To avoid the cycle, we can't evaluate const blocks yet.
695            // Other kinds of unevaluated's can cause query cycles too when they arise from
696            // self-reference in user code; e.g. evaluating a constant can require evaluating a
697            // const function that uses that constant, again requiring evaluation of the constant.
698            // However, this form of cycle renders both the constant and function unusable in
699            // general, so we don't need to special-case it here.
700            Const::Unevaluated(uc, _) => self.tcx.def_kind(uc.def) != DefKind::InlineConst,
701        }
702    }
703}
704
705fn validate_candidates(
706    ccx: &ConstCx<'_, '_>,
707    temps: &mut IndexSlice<Local, TempState>,
708    mut candidates: Vec<Candidate>,
709) -> Vec<Candidate> {
710    let mut validator = Validator { ccx, temps, promotion_safe_blocks: None };
711
712    candidates.retain(|&candidate| validator.validate_candidate(candidate).is_ok());
713    candidates
714}
715
716struct Promoter<'a, 'tcx> {
717    tcx: TyCtxt<'tcx>,
718    source: &'a mut Body<'tcx>,
719    promoted: Body<'tcx>,
720    temps: &'a mut IndexVec<Local, TempState>,
721    extra_statements: &'a mut Vec<(Location, Statement<'tcx>)>,
722
723    /// Used to assemble the required_consts list while building the promoted.
724    required_consts: Vec<ConstOperand<'tcx>>,
725
726    /// If true, all nested temps are also kept in the
727    /// source MIR, not moved to the promoted MIR.
728    keep_original: bool,
729
730    /// If true, add the new const (the promoted) to the required_consts of the parent MIR.
731    /// This is initially false and then set by the visitor when it encounters a `Call` terminator.
732    add_to_required: bool,
733}
734
735impl<'a, 'tcx> Promoter<'a, 'tcx> {
736    fn new_block(&mut self) -> BasicBlock {
737        let span = self.promoted.span;
738        self.promoted.basic_blocks_mut().push(BasicBlockData::new(
739            Some(Terminator {
740                source_info: SourceInfo::outermost(span),
741                kind: TerminatorKind::Return,
742            }),
743            false,
744        ))
745    }
746
747    fn assign(&mut self, dest: Local, rvalue: Rvalue<'tcx>, span: Span) {
748        let last = self.promoted.basic_blocks.last_index().unwrap();
749        let data = &mut self.promoted[last];
750        data.statements.push(Statement::new(
751            SourceInfo::outermost(span),
752            StatementKind::Assign(Box::new((Place::from(dest), rvalue))),
753        ));
754    }
755
756    fn is_temp_kind(&self, local: Local) -> bool {
757        self.source.local_kind(local) == LocalKind::Temp
758    }
759
760    /// Copies the initialization of this temp to the
761    /// promoted MIR, recursing through temps.
762    fn promote_temp(&mut self, temp: Local) -> Local {
763        let old_keep_original = self.keep_original;
764        let loc = match self.temps[temp] {
765            TempState::Defined { location, uses, .. } if uses > 0 => {
766                if uses > 1 {
767                    self.keep_original = true;
768                }
769                location
770            }
771            state => {
772                span_bug!(self.promoted.span, "{:?} not promotable: {:?}", temp, state);
773            }
774        };
775        if !self.keep_original {
776            self.temps[temp] = TempState::PromotedOut;
777        }
778
779        let num_stmts = self.source[loc.block].statements.len();
780        let new_temp = self.promoted.local_decls.push(LocalDecl::new(
781            self.source.local_decls[temp].ty,
782            self.source.local_decls[temp].source_info.span,
783        ));
784
785        debug!("promote({:?} @ {:?}/{:?}, {:?})", temp, loc, num_stmts, self.keep_original);
786
787        // First, take the Rvalue or Call out of the source MIR,
788        // or duplicate it, depending on keep_original.
789        if loc.statement_index < num_stmts {
790            let (mut rvalue, source_info) = {
791                let statement = &mut self.source[loc.block].statements[loc.statement_index];
792                let StatementKind::Assign(box (_, rhs)) = &mut statement.kind else {
793                    span_bug!(statement.source_info.span, "{:?} is not an assignment", statement);
794                };
795
796                (
797                    if self.keep_original {
798                        rhs.clone()
799                    } else {
800                        let unit = Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
801                            span: statement.source_info.span,
802                            user_ty: None,
803                            const_: Const::zero_sized(self.tcx.types.unit),
804                        })));
805                        mem::replace(rhs, unit)
806                    },
807                    statement.source_info,
808                )
809            };
810
811            self.visit_rvalue(&mut rvalue, loc);
812            self.assign(new_temp, rvalue, source_info.span);
813        } else {
814            let terminator = if self.keep_original {
815                self.source[loc.block].terminator().clone()
816            } else {
817                let terminator = self.source[loc.block].terminator_mut();
818                let target = match &terminator.kind {
819                    TerminatorKind::Call { target: Some(target), .. } => *target,
820                    kind => {
821                        span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
822                    }
823                };
824                Terminator {
825                    source_info: terminator.source_info,
826                    kind: mem::replace(&mut terminator.kind, TerminatorKind::Goto { target }),
827                }
828            };
829
830            match terminator.kind {
831                TerminatorKind::Call {
832                    mut func, mut args, call_source: desugar, fn_span, ..
833                } => {
834                    // This promoted involves a function call, so it may fail to evaluate. Let's
835                    // make sure it is added to `required_consts` so that failure cannot get lost.
836                    self.add_to_required = true;
837
838                    self.visit_operand(&mut func, loc);
839                    for arg in &mut args {
840                        self.visit_operand(&mut arg.node, loc);
841                    }
842
843                    let last = self.promoted.basic_blocks.last_index().unwrap();
844                    let new_target = self.new_block();
845
846                    *self.promoted[last].terminator_mut() = Terminator {
847                        kind: TerminatorKind::Call {
848                            func,
849                            args,
850                            unwind: UnwindAction::Continue,
851                            destination: Place::from(new_temp),
852                            target: Some(new_target),
853                            call_source: desugar,
854                            fn_span,
855                        },
856                        source_info: SourceInfo::outermost(terminator.source_info.span),
857                        ..terminator
858                    };
859                }
860                kind => {
861                    span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
862                }
863            };
864        };
865
866        self.keep_original = old_keep_original;
867        new_temp
868    }
869
870    fn promote_candidate(
871        mut self,
872        candidate: Candidate,
873        next_promoted_index: Promoted,
874    ) -> Body<'tcx> {
875        let def = self.source.source.def_id();
876        let (mut rvalue, promoted_op) = {
877            let promoted = &mut self.promoted;
878            let tcx = self.tcx;
879            let mut promoted_operand = |ty, span| {
880                promoted.span = span;
881                promoted.local_decls[RETURN_PLACE] = LocalDecl::new(ty, span);
882                let args =
883                    tcx.erase_and_anonymize_regions(GenericArgs::identity_for_item(tcx, def));
884                let uneval =
885                    mir::UnevaluatedConst { def, args, promoted: Some(next_promoted_index) };
886
887                ConstOperand { span, user_ty: None, const_: Const::Unevaluated(uneval, ty) }
888            };
889
890            let blocks = self.source.basic_blocks.as_mut();
891            let local_decls = &mut self.source.local_decls;
892            let loc = candidate.location;
893            let statement = &mut blocks[loc.block].statements[loc.statement_index];
894            let StatementKind::Assign(box (_, Rvalue::Ref(region, borrow_kind, place))) =
895                &mut statement.kind
896            else {
897                bug!()
898            };
899
900            // Use the underlying local for this (necessarily interior) borrow.
901            debug_assert!(region.is_erased());
902            let ty = local_decls[place.local].ty;
903            let span = statement.source_info.span;
904
905            let ref_ty =
906                Ty::new_ref(tcx, tcx.lifetimes.re_erased, ty, borrow_kind.to_mutbl_lossy());
907
908            let mut projection = vec![PlaceElem::Deref];
909            projection.extend(place.projection);
910            place.projection = tcx.mk_place_elems(&projection);
911
912            // Create a temp to hold the promoted reference.
913            // This is because `*r` requires `r` to be a local,
914            // otherwise we would use the `promoted` directly.
915            let mut promoted_ref = LocalDecl::new(ref_ty, span);
916            promoted_ref.source_info = statement.source_info;
917            let promoted_ref = local_decls.push(promoted_ref);
918            assert_eq!(self.temps.push(TempState::Unpromotable), promoted_ref);
919
920            let promoted_operand = promoted_operand(ref_ty, span);
921            let promoted_ref_statement = Statement::new(
922                statement.source_info,
923                StatementKind::Assign(Box::new((
924                    Place::from(promoted_ref),
925                    Rvalue::Use(Operand::Constant(Box::new(promoted_operand))),
926                ))),
927            );
928            self.extra_statements.push((loc, promoted_ref_statement));
929
930            (
931                Rvalue::Ref(
932                    tcx.lifetimes.re_erased,
933                    *borrow_kind,
934                    Place {
935                        local: mem::replace(&mut place.local, promoted_ref),
936                        projection: List::empty(),
937                    },
938                ),
939                promoted_operand,
940            )
941        };
942
943        assert_eq!(self.new_block(), START_BLOCK);
944        self.visit_rvalue(
945            &mut rvalue,
946            Location { block: START_BLOCK, statement_index: usize::MAX },
947        );
948
949        let span = self.promoted.span;
950        self.assign(RETURN_PLACE, rvalue, span);
951
952        // Now that we did promotion, we know whether we'll want to add this to `required_consts` of
953        // the surrounding MIR body.
954        if self.add_to_required {
955            self.source.required_consts.as_mut().unwrap().push(promoted_op);
956        }
957
958        self.promoted.set_required_consts(self.required_consts);
959
960        self.promoted
961    }
962}
963
964/// Replaces all temporaries with their promoted counterparts.
965impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> {
966    fn tcx(&self) -> TyCtxt<'tcx> {
967        self.tcx
968    }
969
970    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
971        if self.is_temp_kind(*local) {
972            *local = self.promote_temp(*local);
973        }
974    }
975
976    fn visit_const_operand(&mut self, constant: &mut ConstOperand<'tcx>, _location: Location) {
977        if constant.const_.is_required_const() {
978            self.required_consts.push(*constant);
979        }
980
981        // Skipping `super_constant` as the visitor is otherwise only looking for locals.
982    }
983}
984
985fn promote_candidates<'tcx>(
986    body: &mut Body<'tcx>,
987    tcx: TyCtxt<'tcx>,
988    mut temps: IndexVec<Local, TempState>,
989    candidates: Vec<Candidate>,
990) -> IndexVec<Promoted, Body<'tcx>> {
991    // Visit candidates in reverse, in case they're nested.
992    debug!(promote_candidates = ?candidates);
993
994    // eagerly fail fast
995    if candidates.is_empty() {
996        return IndexVec::new();
997    }
998
999    let mut promotions = IndexVec::new();
1000
1001    let mut extra_statements = vec![];
1002    for candidate in candidates.into_iter().rev() {
1003        let Location { block, statement_index } = candidate.location;
1004        if let StatementKind::Assign(box (place, _)) = &body[block].statements[statement_index].kind
1005            && let Some(local) = place.as_local()
1006        {
1007            if temps[local] == TempState::PromotedOut {
1008                // Already promoted.
1009                continue;
1010            }
1011        }
1012
1013        // Declare return place local so that `mir::Body::new` doesn't complain.
1014        let initial_locals = iter::once(LocalDecl::new(tcx.types.never, body.span)).collect();
1015
1016        let mut scope = body.source_scopes[body.source_info(candidate.location).scope].clone();
1017        scope.parent_scope = None;
1018
1019        let mut promoted = Body::new(
1020            body.source, // `promoted` gets filled in below
1021            IndexVec::new(),
1022            IndexVec::from_elem_n(scope, 1),
1023            initial_locals,
1024            IndexVec::new(),
1025            0,
1026            vec![],
1027            body.span,
1028            None,
1029            body.tainted_by_errors,
1030        );
1031        promoted.phase = MirPhase::Analysis(AnalysisPhase::Initial);
1032
1033        let promoter = Promoter {
1034            promoted,
1035            tcx,
1036            source: body,
1037            temps: &mut temps,
1038            extra_statements: &mut extra_statements,
1039            keep_original: false,
1040            add_to_required: false,
1041            required_consts: Vec::new(),
1042        };
1043
1044        let mut promoted = promoter.promote_candidate(candidate, promotions.next_index());
1045        promoted.source.promoted = Some(promotions.next_index());
1046        promotions.push(promoted);
1047    }
1048
1049    // Insert each of `extra_statements` before its indicated location, which
1050    // has to be done in reverse location order, to not invalidate the rest.
1051    extra_statements.sort_by_key(|&(loc, _)| cmp::Reverse(loc));
1052    for (loc, statement) in extra_statements {
1053        body[loc.block].statements.insert(loc.statement_index, statement);
1054    }
1055
1056    // Eliminate assignments to, and drops of promoted temps.
1057    let promoted = |index: Local| temps[index] == TempState::PromotedOut;
1058    for block in body.basic_blocks_mut() {
1059        block.retain_statements(|statement| match &statement.kind {
1060            StatementKind::Assign(box (place, _)) => {
1061                if let Some(index) = place.as_local() {
1062                    !promoted(index)
1063                } else {
1064                    true
1065                }
1066            }
1067            StatementKind::StorageLive(index) | StatementKind::StorageDead(index) => {
1068                !promoted(*index)
1069            }
1070            _ => true,
1071        });
1072        let terminator = block.terminator_mut();
1073        if let TerminatorKind::Drop { place, target, .. } = &terminator.kind
1074            && let Some(index) = place.as_local()
1075        {
1076            if promoted(index) {
1077                terminator.kind = TerminatorKind::Goto { target: *target };
1078            }
1079        }
1080    }
1081
1082    promotions
1083}