rustc_mir_transform/
promote_consts.rs

1//! A pass that promotes borrows of constant rvalues.
2//!
3//! The rvalues considered constant are trees of temps, each with exactly one
4//! initialization, and holding a constant value with no interior mutability.
5//! They are placed into a new MIR constant body in `promoted` and the borrow
6//! rvalue is replaced with a `Literal::Promoted` using the index into
7//! `promoted` of that constant MIR.
8//!
9//! This pass assumes that every use is dominated by an initialization and can
10//! otherwise silence errors, if move analysis runs after promotion on broken
11//! MIR.
12
13use std::assert_matches::assert_matches;
14use std::cell::Cell;
15use std::{cmp, iter, mem};
16
17use either::{Left, Right};
18use rustc_const_eval::check_consts::{ConstCx, qualifs};
19use rustc_data_structures::fx::FxHashSet;
20use rustc_hir as hir;
21use rustc_index::{IndexSlice, IndexVec};
22use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
23use rustc_middle::mir::*;
24use rustc_middle::ty::{self, GenericArgs, List, Ty, TyCtxt, TypeVisitableExt};
25use rustc_middle::{bug, mir, span_bug};
26use rustc_span::Span;
27use rustc_span::source_map::Spanned;
28use tracing::{debug, instrument};
29
30/// A `MirPass` for promotion.
31///
32/// Promotion is the extraction of promotable temps into separate MIR bodies so they can have
33/// `'static` lifetime.
34///
35/// After this pass is run, `promoted_fragments` will hold the MIR body corresponding to each
36/// newly created `Constant`.
37#[derive(Default)]
38pub(super) struct PromoteTemps<'tcx> {
39    // Must use `Cell` because `run_pass` takes `&self`, not `&mut self`.
40    pub promoted_fragments: Cell<IndexVec<Promoted, Body<'tcx>>>,
41}
42
43impl<'tcx> crate::MirPass<'tcx> for PromoteTemps<'tcx> {
44    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
45        // There's not really any point in promoting errorful MIR.
46        //
47        // This does not include MIR that failed const-checking, which we still try to promote.
48        if let Err(_) = body.return_ty().error_reported() {
49            debug!("PromoteTemps: MIR had errors");
50            return;
51        }
52        if body.source.promoted.is_some() {
53            return;
54        }
55
56        let ccx = ConstCx::new(tcx, body);
57        let (mut temps, all_candidates) = collect_temps_and_candidates(&ccx);
58
59        let promotable_candidates = validate_candidates(&ccx, &mut temps, all_candidates);
60
61        let promoted = promote_candidates(body, tcx, temps, promotable_candidates);
62        self.promoted_fragments.set(promoted);
63    }
64
65    fn is_required(&self) -> bool {
66        true
67    }
68}
69
70/// State of a temporary during collection and promotion.
71#[derive(Copy, Clone, PartialEq, Eq, Debug)]
72enum TempState {
73    /// No references to this temp.
74    Undefined,
75    /// One direct assignment and any number of direct uses.
76    /// A borrow of this temp is promotable if the assigned
77    /// value is qualified as constant.
78    Defined { location: Location, uses: usize, valid: Result<(), ()> },
79    /// Any other combination of assignments/uses.
80    Unpromotable,
81    /// This temp was part of an rvalue which got extracted
82    /// during promotion and needs cleanup.
83    PromotedOut,
84}
85
86/// A "root candidate" for promotion, which will become the
87/// returned value in a promoted MIR, unless it's a subset
88/// of a larger candidate.
89#[derive(Copy, Clone, PartialEq, Eq, Debug)]
90struct Candidate {
91    location: Location,
92}
93
94struct Collector<'a, 'tcx> {
95    ccx: &'a ConstCx<'a, 'tcx>,
96    temps: IndexVec<Local, TempState>,
97    candidates: Vec<Candidate>,
98}
99
100impl<'tcx> Visitor<'tcx> for Collector<'_, 'tcx> {
101    #[instrument(level = "debug", skip(self))]
102    fn visit_local(&mut self, index: Local, context: PlaceContext, location: Location) {
103        // We're only interested in temporaries and the return place
104        match self.ccx.body.local_kind(index) {
105            LocalKind::Arg => return,
106            LocalKind::Temp if self.ccx.body.local_decls[index].is_user_variable() => return,
107            LocalKind::ReturnPointer | LocalKind::Temp => {}
108        }
109
110        // Ignore drops, if the temp gets promoted,
111        // then it's constant and thus drop is noop.
112        // Non-uses are also irrelevant.
113        if context.is_drop() || !context.is_use() {
114            debug!(is_drop = context.is_drop(), is_use = context.is_use());
115            return;
116        }
117
118        let temp = &mut self.temps[index];
119        debug!(?temp);
120        *temp = match *temp {
121            TempState::Undefined => match context {
122                PlaceContext::MutatingUse(MutatingUseContext::Store | MutatingUseContext::Call) => {
123                    TempState::Defined { location, uses: 0, valid: Err(()) }
124                }
125                _ => TempState::Unpromotable,
126            },
127            TempState::Defined { ref mut uses, .. } => {
128                // We always allow borrows, even mutable ones, as we need
129                // to promote mutable borrows of some ZSTs e.g., `&mut []`.
130                let allowed_use = match context {
131                    PlaceContext::MutatingUse(MutatingUseContext::Borrow)
132                    | PlaceContext::NonMutatingUse(_) => true,
133                    PlaceContext::MutatingUse(_) | PlaceContext::NonUse(_) => false,
134                };
135                debug!(?allowed_use);
136                if allowed_use {
137                    *uses += 1;
138                    return;
139                }
140                TempState::Unpromotable
141            }
142            TempState::Unpromotable | TempState::PromotedOut => TempState::Unpromotable,
143        };
144        debug!(?temp);
145    }
146
147    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
148        self.super_rvalue(rvalue, location);
149
150        if let Rvalue::Ref(..) = *rvalue {
151            self.candidates.push(Candidate { location });
152        }
153    }
154}
155
156fn collect_temps_and_candidates<'tcx>(
157    ccx: &ConstCx<'_, 'tcx>,
158) -> (IndexVec<Local, TempState>, Vec<Candidate>) {
159    let mut collector = Collector {
160        temps: IndexVec::from_elem(TempState::Undefined, &ccx.body.local_decls),
161        candidates: vec![],
162        ccx,
163    };
164    for (bb, data) in traversal::reverse_postorder(ccx.body) {
165        collector.visit_basic_block_data(bb, data);
166    }
167    (collector.temps, collector.candidates)
168}
169
170/// Checks whether locals that appear in a promotion context (`Candidate`) are actually promotable.
171///
172/// This wraps an `Item`, and has access to all fields of that `Item` via `Deref` coercion.
173struct Validator<'a, 'tcx> {
174    ccx: &'a ConstCx<'a, 'tcx>,
175    temps: &'a mut IndexSlice<Local, TempState>,
176    /// For backwards compatibility, we are promoting function calls in `const`/`static`
177    /// initializers. But we want to avoid evaluating code that might panic and that otherwise would
178    /// not have been evaluated, so we only promote such calls in basic blocks that are guaranteed
179    /// to execute. In other words, we only promote such calls in basic blocks that are definitely
180    /// not dead code. Here we cache the result of computing that set of basic blocks.
181    promotion_safe_blocks: Option<FxHashSet<BasicBlock>>,
182}
183
184impl<'a, 'tcx> std::ops::Deref for Validator<'a, 'tcx> {
185    type Target = ConstCx<'a, 'tcx>;
186
187    fn deref(&self) -> &Self::Target {
188        self.ccx
189    }
190}
191
192struct Unpromotable;
193
194impl<'tcx> Validator<'_, 'tcx> {
195    fn validate_candidate(&mut self, candidate: Candidate) -> Result<(), Unpromotable> {
196        let Left(statement) = self.body.stmt_at(candidate.location) else { bug!() };
197        let Some((_, Rvalue::Ref(_, kind, place))) = statement.kind.as_assign() else { bug!() };
198
199        // We can only promote interior borrows of promotable temps (non-temps
200        // don't get promoted anyway).
201        self.validate_local(place.local)?;
202
203        // The reference operation itself must be promotable.
204        // (Needs to come after `validate_local` to avoid ICEs.)
205        self.validate_ref(*kind, place)?;
206
207        // We do not check all the projections (they do not get promoted anyway),
208        // but we do stay away from promoting anything involving a dereference.
209        if place.projection.contains(&ProjectionElem::Deref) {
210            return Err(Unpromotable);
211        }
212
213        Ok(())
214    }
215
216    // FIXME(eddyb) maybe cache this?
217    fn qualif_local<Q: qualifs::Qualif>(&mut self, local: Local) -> bool {
218        let TempState::Defined { location: loc, .. } = self.temps[local] else {
219            return false;
220        };
221
222        let stmt_or_term = self.body.stmt_at(loc);
223        match stmt_or_term {
224            Left(statement) => {
225                let Some((_, rhs)) = statement.kind.as_assign() else {
226                    span_bug!(statement.source_info.span, "{:?} is not an assignment", statement)
227                };
228                qualifs::in_rvalue::<Q, _>(self.ccx, &mut |l| self.qualif_local::<Q>(l), rhs)
229            }
230            Right(terminator) => {
231                assert_matches!(terminator.kind, TerminatorKind::Call { .. });
232                let return_ty = self.body.local_decls[local].ty;
233                Q::in_any_value_of_ty(self.ccx, return_ty)
234            }
235        }
236    }
237
238    fn validate_local(&mut self, local: Local) -> Result<(), Unpromotable> {
239        let TempState::Defined { location: loc, uses, valid } = self.temps[local] else {
240            return Err(Unpromotable);
241        };
242
243        // We cannot promote things that need dropping, since the promoted value would not get
244        // dropped.
245        if self.qualif_local::<qualifs::NeedsDrop>(local) {
246            return Err(Unpromotable);
247        }
248
249        if valid.is_ok() {
250            return Ok(());
251        }
252
253        let ok = {
254            let stmt_or_term = self.body.stmt_at(loc);
255            match stmt_or_term {
256                Left(statement) => {
257                    let Some((_, rhs)) = statement.kind.as_assign() else {
258                        span_bug!(
259                            statement.source_info.span,
260                            "{:?} is not an assignment",
261                            statement
262                        )
263                    };
264                    self.validate_rvalue(rhs)
265                }
266                Right(terminator) => match &terminator.kind {
267                    TerminatorKind::Call { func, args, .. } => {
268                        self.validate_call(func, args, loc.block)
269                    }
270                    TerminatorKind::Yield { .. } => Err(Unpromotable),
271                    kind => {
272                        span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
273                    }
274                },
275            }
276        };
277
278        self.temps[local] = match ok {
279            Ok(()) => TempState::Defined { location: loc, uses, valid: Ok(()) },
280            Err(_) => TempState::Unpromotable,
281        };
282
283        ok
284    }
285
286    fn validate_place(&mut self, place: PlaceRef<'tcx>) -> Result<(), Unpromotable> {
287        let Some((place_base, elem)) = place.last_projection() else {
288            return self.validate_local(place.local);
289        };
290
291        // Validate topmost projection, then recurse.
292        match elem {
293            // Recurse directly.
294            ProjectionElem::ConstantIndex { .. }
295            | ProjectionElem::Subslice { .. }
296            | ProjectionElem::UnwrapUnsafeBinder(_) => {}
297
298            // Never recurse.
299            ProjectionElem::OpaqueCast(..) | ProjectionElem::Downcast(..) => {
300                return Err(Unpromotable);
301            }
302
303            ProjectionElem::Deref => {
304                // When a static is used by-value, that gets desugared to `*STATIC_ADDR`,
305                // and we need to be able to promote this. So check if this deref matches
306                // that specific pattern.
307
308                // We need to make sure this is a `Deref` of a local with no further projections.
309                // Discussion can be found at
310                // https://github.com/rust-lang/rust/pull/74945#discussion_r463063247
311                if let Some(local) = place_base.as_local()
312                    && let TempState::Defined { location, .. } = self.temps[local]
313                    && let Left(def_stmt) = self.body.stmt_at(location)
314                    && let Some((_, Rvalue::Use(Operand::Constant(c)))) = def_stmt.kind.as_assign()
315                    && let Some(did) = c.check_static_ptr(self.tcx)
316                    // Evaluating a promoted may not read statics except if it got
317                    // promoted from a static (this is a CTFE check). So we
318                    // can only promote static accesses inside statics.
319                    && let Some(hir::ConstContext::Static(..)) = self.const_kind
320                    && !self.tcx.is_thread_local_static(did)
321                {
322                    // Recurse.
323                } else {
324                    return Err(Unpromotable);
325                }
326            }
327            ProjectionElem::Index(local) => {
328                // Only accept if we can predict the index and are indexing an array.
329                if let TempState::Defined { location: loc, .. } = self.temps[local]
330                    && let Left(statement) =  self.body.stmt_at(loc)
331                    && let Some((_, Rvalue::Use(Operand::Constant(c)))) = statement.kind.as_assign()
332                    && let Some(idx) = c.const_.try_eval_target_usize(self.tcx, self.typing_env)
333                    // Determine the type of the thing we are indexing.
334                    && let ty::Array(_, len) = place_base.ty(self.body, self.tcx).ty.kind()
335                    // It's an array; determine its length.
336                    && let Some(len) = len.try_to_target_usize(self.tcx)
337                    // If the index is in-bounds, go ahead.
338                    && idx < len
339                {
340                    self.validate_local(local)?;
341                    // Recurse.
342                } else {
343                    return Err(Unpromotable);
344                }
345            }
346
347            ProjectionElem::Field(..) => {
348                let base_ty = place_base.ty(self.body, self.tcx).ty;
349                if base_ty.is_union() {
350                    // No promotion of union field accesses.
351                    return Err(Unpromotable);
352                }
353            }
354        }
355
356        self.validate_place(place_base)
357    }
358
359    fn validate_operand(&mut self, operand: &Operand<'tcx>) -> Result<(), Unpromotable> {
360        match operand {
361            Operand::Copy(place) | Operand::Move(place) => self.validate_place(place.as_ref()),
362
363            // The qualifs for a constant (e.g. `HasMutInterior`) are checked in
364            // `validate_rvalue` upon access.
365            Operand::Constant(c) => {
366                if let Some(def_id) = c.check_static_ptr(self.tcx) {
367                    // Only allow statics (not consts) to refer to other statics.
368                    // FIXME(eddyb) does this matter at all for promotion?
369                    // FIXME(RalfJung) it makes little sense to not promote this in `fn`/`const fn`,
370                    // and in `const` this cannot occur anyway. The only concern is that we might
371                    // promote even `let x = &STATIC` which would be useless, but this applies to
372                    // promotion inside statics as well.
373                    let is_static = matches!(self.const_kind, Some(hir::ConstContext::Static(_)));
374                    if !is_static {
375                        return Err(Unpromotable);
376                    }
377
378                    let is_thread_local = self.tcx.is_thread_local_static(def_id);
379                    if is_thread_local {
380                        return Err(Unpromotable);
381                    }
382                }
383
384                Ok(())
385            }
386        }
387    }
388
389    fn validate_ref(&mut self, kind: BorrowKind, place: &Place<'tcx>) -> Result<(), Unpromotable> {
390        match kind {
391            // Reject these borrow types just to be safe.
392            // FIXME(RalfJung): could we allow them? Should we? No point in it until we have a
393            // usecase.
394            BorrowKind::Fake(_) | BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture } => {
395                return Err(Unpromotable);
396            }
397
398            BorrowKind::Shared => {
399                let has_mut_interior = self.qualif_local::<qualifs::HasMutInterior>(place.local);
400                if has_mut_interior {
401                    return Err(Unpromotable);
402                }
403            }
404
405            // FIXME: consider changing this to only promote &mut [] for default borrows,
406            // also forbidding two phase borrows
407            BorrowKind::Mut { kind: MutBorrowKind::Default | MutBorrowKind::TwoPhaseBorrow } => {
408                let ty = place.ty(self.body, self.tcx).ty;
409
410                // In theory, any zero-sized value could be borrowed
411                // mutably without consequences. However, only &mut []
412                // is allowed right now.
413                if let ty::Array(_, len) = ty.kind() {
414                    match len.try_to_target_usize(self.tcx) {
415                        Some(0) => {}
416                        _ => return Err(Unpromotable),
417                    }
418                } else {
419                    return Err(Unpromotable);
420                }
421            }
422        }
423
424        Ok(())
425    }
426
427    fn validate_rvalue(&mut self, rvalue: &Rvalue<'tcx>) -> Result<(), Unpromotable> {
428        match rvalue {
429            Rvalue::Use(operand)
430            | Rvalue::Repeat(operand, _)
431            | Rvalue::WrapUnsafeBinder(operand, _) => {
432                self.validate_operand(operand)?;
433            }
434            Rvalue::CopyForDeref(place) => {
435                let op = &Operand::Copy(*place);
436                self.validate_operand(op)?
437            }
438
439            Rvalue::Discriminant(place) => self.validate_place(place.as_ref())?,
440
441            Rvalue::ThreadLocalRef(_) => return Err(Unpromotable),
442
443            // ptr-to-int casts are not possible in consts and thus not promotable
444            Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => return Err(Unpromotable),
445
446            // all other casts including int-to-ptr casts are fine, they just use the integer value
447            // at pointer type.
448            Rvalue::Cast(_, operand, _) => {
449                self.validate_operand(operand)?;
450            }
451
452            Rvalue::NullaryOp(op, _) => match op {
453                NullOp::SizeOf => {}
454                NullOp::AlignOf => {}
455                NullOp::OffsetOf(_) => {}
456                NullOp::UbChecks => {}
457                NullOp::ContractChecks => {}
458            },
459
460            Rvalue::ShallowInitBox(_, _) => return Err(Unpromotable),
461
462            Rvalue::UnaryOp(op, operand) => {
463                match op {
464                    // These operations can never fail.
465                    UnOp::Neg | UnOp::Not | UnOp::PtrMetadata => {}
466                }
467
468                self.validate_operand(operand)?;
469            }
470
471            Rvalue::BinaryOp(op, box (lhs, rhs)) => {
472                let op = *op;
473                let lhs_ty = lhs.ty(self.body, self.tcx);
474
475                if let ty::RawPtr(_, _) | ty::FnPtr(..) = lhs_ty.kind() {
476                    // Raw and fn pointer operations are not allowed inside consts and thus not
477                    // promotable.
478                    assert_matches!(
479                        op,
480                        BinOp::Eq
481                            | BinOp::Ne
482                            | BinOp::Le
483                            | BinOp::Lt
484                            | BinOp::Ge
485                            | BinOp::Gt
486                            | BinOp::Offset
487                    );
488                    return Err(Unpromotable);
489                }
490
491                match op {
492                    BinOp::Div | BinOp::Rem => {
493                        if lhs_ty.is_integral() {
494                            let sz = lhs_ty.primitive_size(self.tcx);
495                            // Integer division: the RHS must be a non-zero const.
496                            let rhs_val = match rhs {
497                                Operand::Constant(c) => {
498                                    c.const_.try_eval_scalar_int(self.tcx, self.typing_env)
499                                }
500                                _ => None,
501                            };
502                            match rhs_val.map(|x| x.to_uint(sz)) {
503                                // for the zero test, int vs uint does not matter
504                                Some(x) if x != 0 => {}        // okay
505                                _ => return Err(Unpromotable), // value not known or 0 -- not okay
506                            }
507                            // Furthermore, for signed division, we also have to exclude `int::MIN /
508                            // -1`.
509                            if lhs_ty.is_signed() {
510                                match rhs_val.map(|x| x.to_int(sz)) {
511                                    Some(-1) | None => {
512                                        // The RHS is -1 or unknown, so we have to be careful.
513                                        // But is the LHS int::MIN?
514                                        let lhs_val = match lhs {
515                                            Operand::Constant(c) => c
516                                                .const_
517                                                .try_eval_scalar_int(self.tcx, self.typing_env),
518                                            _ => None,
519                                        };
520                                        let lhs_min = sz.signed_int_min();
521                                        match lhs_val.map(|x| x.to_int(sz)) {
522                                            // okay
523                                            Some(x) if x != lhs_min => {}
524
525                                            // value not known or int::MIN -- not okay
526                                            _ => return Err(Unpromotable),
527                                        }
528                                    }
529                                    _ => {}
530                                }
531                            }
532                        }
533                    }
534                    // The remaining operations can never fail.
535                    BinOp::Eq
536                    | BinOp::Ne
537                    | BinOp::Le
538                    | BinOp::Lt
539                    | BinOp::Ge
540                    | BinOp::Gt
541                    | BinOp::Cmp
542                    | BinOp::Offset
543                    | BinOp::Add
544                    | BinOp::AddUnchecked
545                    | BinOp::AddWithOverflow
546                    | BinOp::Sub
547                    | BinOp::SubUnchecked
548                    | BinOp::SubWithOverflow
549                    | BinOp::Mul
550                    | BinOp::MulUnchecked
551                    | BinOp::MulWithOverflow
552                    | BinOp::BitXor
553                    | BinOp::BitAnd
554                    | BinOp::BitOr
555                    | BinOp::Shl
556                    | BinOp::ShlUnchecked
557                    | BinOp::Shr
558                    | BinOp::ShrUnchecked => {}
559                }
560
561                self.validate_operand(lhs)?;
562                self.validate_operand(rhs)?;
563            }
564
565            Rvalue::RawPtr(_, place) => {
566                // We accept `&raw *`, i.e., raw reborrows -- creating a raw pointer is
567                // no problem, only using it is.
568                if let Some((place_base, ProjectionElem::Deref)) = place.as_ref().last_projection()
569                {
570                    let base_ty = place_base.ty(self.body, self.tcx).ty;
571                    if let ty::Ref(..) = base_ty.kind() {
572                        return self.validate_place(place_base);
573                    }
574                }
575                return Err(Unpromotable);
576            }
577
578            Rvalue::Ref(_, kind, place) => {
579                // Special-case reborrows to be more like a copy of the reference.
580                let mut place_simplified = place.as_ref();
581                if let Some((place_base, ProjectionElem::Deref)) =
582                    place_simplified.last_projection()
583                {
584                    let base_ty = place_base.ty(self.body, self.tcx).ty;
585                    if let ty::Ref(..) = base_ty.kind() {
586                        place_simplified = place_base;
587                    }
588                }
589
590                self.validate_place(place_simplified)?;
591
592                // Check that the reference is fine (using the original place!).
593                // (Needs to come after `validate_place` to avoid ICEs.)
594                self.validate_ref(*kind, place)?;
595            }
596
597            Rvalue::Aggregate(_, operands) => {
598                for o in operands {
599                    self.validate_operand(o)?;
600                }
601            }
602        }
603
604        Ok(())
605    }
606
607    /// Computes the sets of blocks of this MIR that are definitely going to be executed
608    /// if the function returns successfully. That makes it safe to promote calls in them
609    /// that might fail.
610    fn promotion_safe_blocks(body: &mir::Body<'tcx>) -> FxHashSet<BasicBlock> {
611        let mut safe_blocks = FxHashSet::default();
612        let mut safe_block = START_BLOCK;
613        loop {
614            safe_blocks.insert(safe_block);
615            // Let's see if we can find another safe block.
616            safe_block = match body.basic_blocks[safe_block].terminator().kind {
617                TerminatorKind::Goto { target } => target,
618                TerminatorKind::Call { target: Some(target), .. }
619                | TerminatorKind::Drop { target, .. } => {
620                    // This calls a function or the destructor. `target` does not get executed if
621                    // the callee loops or panics. But in both cases the const already fails to
622                    // evaluate, so we are fine considering `target` a safe block for promotion.
623                    target
624                }
625                TerminatorKind::Assert { target, .. } => {
626                    // Similar to above, we only consider successful execution.
627                    target
628                }
629                _ => {
630                    // No next safe block.
631                    break;
632                }
633            };
634        }
635        safe_blocks
636    }
637
638    /// Returns whether the block is "safe" for promotion, which means it cannot be dead code.
639    /// We use this to avoid promoting operations that can fail in dead code.
640    fn is_promotion_safe_block(&mut self, block: BasicBlock) -> bool {
641        let body = self.body;
642        let safe_blocks =
643            self.promotion_safe_blocks.get_or_insert_with(|| Self::promotion_safe_blocks(body));
644        safe_blocks.contains(&block)
645    }
646
647    fn validate_call(
648        &mut self,
649        callee: &Operand<'tcx>,
650        args: &[Spanned<Operand<'tcx>>],
651        block: BasicBlock,
652    ) -> Result<(), Unpromotable> {
653        // Validate the operands. If they fail, there's no question -- we cannot promote.
654        self.validate_operand(callee)?;
655        for arg in args {
656            self.validate_operand(&arg.node)?;
657        }
658
659        // Functions marked `#[rustc_promotable]` are explicitly allowed to be promoted, so we can
660        // accept them at this point.
661        let fn_ty = callee.ty(self.body, self.tcx);
662        if let ty::FnDef(def_id, _) = *fn_ty.kind() {
663            if self.tcx.is_promotable_const_fn(def_id) {
664                return Ok(());
665            }
666        }
667
668        // Ideally, we'd stop here and reject the rest.
669        // But for backward compatibility, we have to accept some promotion in const/static
670        // initializers. Inline consts are explicitly excluded, they are more recent so we have no
671        // backwards compatibility reason to allow more promotion inside of them.
672        let promote_all_fn = matches!(
673            self.const_kind,
674            Some(hir::ConstContext::Static(_) | hir::ConstContext::Const { inline: false })
675        );
676        if !promote_all_fn {
677            return Err(Unpromotable);
678        }
679        // Make sure the callee is a `const fn`.
680        let is_const_fn = match *fn_ty.kind() {
681            ty::FnDef(def_id, _) => self.tcx.is_const_fn(def_id),
682            _ => false,
683        };
684        if !is_const_fn {
685            return Err(Unpromotable);
686        }
687        // The problem is, this may promote calls to functions that panic.
688        // We don't want to introduce compilation errors if there's a panic in a call in dead code.
689        // So we ensure that this is not dead code.
690        if !self.is_promotion_safe_block(block) {
691            return Err(Unpromotable);
692        }
693        // This passed all checks, so let's accept.
694        Ok(())
695    }
696}
697
698fn validate_candidates(
699    ccx: &ConstCx<'_, '_>,
700    temps: &mut IndexSlice<Local, TempState>,
701    mut candidates: Vec<Candidate>,
702) -> Vec<Candidate> {
703    let mut validator = Validator { ccx, temps, promotion_safe_blocks: None };
704
705    candidates.retain(|&candidate| validator.validate_candidate(candidate).is_ok());
706    candidates
707}
708
709struct Promoter<'a, 'tcx> {
710    tcx: TyCtxt<'tcx>,
711    source: &'a mut Body<'tcx>,
712    promoted: Body<'tcx>,
713    temps: &'a mut IndexVec<Local, TempState>,
714    extra_statements: &'a mut Vec<(Location, Statement<'tcx>)>,
715
716    /// Used to assemble the required_consts list while building the promoted.
717    required_consts: Vec<ConstOperand<'tcx>>,
718
719    /// If true, all nested temps are also kept in the
720    /// source MIR, not moved to the promoted MIR.
721    keep_original: bool,
722
723    /// If true, add the new const (the promoted) to the required_consts of the parent MIR.
724    /// This is initially false and then set by the visitor when it encounters a `Call` terminator.
725    add_to_required: bool,
726}
727
728impl<'a, 'tcx> Promoter<'a, 'tcx> {
729    fn new_block(&mut self) -> BasicBlock {
730        let span = self.promoted.span;
731        self.promoted.basic_blocks_mut().push(BasicBlockData::new(
732            Some(Terminator {
733                source_info: SourceInfo::outermost(span),
734                kind: TerminatorKind::Return,
735            }),
736            false,
737        ))
738    }
739
740    fn assign(&mut self, dest: Local, rvalue: Rvalue<'tcx>, span: Span) {
741        let last = self.promoted.basic_blocks.last_index().unwrap();
742        let data = &mut self.promoted[last];
743        data.statements.push(Statement::new(
744            SourceInfo::outermost(span),
745            StatementKind::Assign(Box::new((Place::from(dest), rvalue))),
746        ));
747    }
748
749    fn is_temp_kind(&self, local: Local) -> bool {
750        self.source.local_kind(local) == LocalKind::Temp
751    }
752
753    /// Copies the initialization of this temp to the
754    /// promoted MIR, recursing through temps.
755    fn promote_temp(&mut self, temp: Local) -> Local {
756        let old_keep_original = self.keep_original;
757        let loc = match self.temps[temp] {
758            TempState::Defined { location, uses, .. } if uses > 0 => {
759                if uses > 1 {
760                    self.keep_original = true;
761                }
762                location
763            }
764            state => {
765                span_bug!(self.promoted.span, "{:?} not promotable: {:?}", temp, state);
766            }
767        };
768        if !self.keep_original {
769            self.temps[temp] = TempState::PromotedOut;
770        }
771
772        let num_stmts = self.source[loc.block].statements.len();
773        let new_temp = self.promoted.local_decls.push(LocalDecl::new(
774            self.source.local_decls[temp].ty,
775            self.source.local_decls[temp].source_info.span,
776        ));
777
778        debug!("promote({:?} @ {:?}/{:?}, {:?})", temp, loc, num_stmts, self.keep_original);
779
780        // First, take the Rvalue or Call out of the source MIR,
781        // or duplicate it, depending on keep_original.
782        if loc.statement_index < num_stmts {
783            let (mut rvalue, source_info) = {
784                let statement = &mut self.source[loc.block].statements[loc.statement_index];
785                let StatementKind::Assign(box (_, rhs)) = &mut statement.kind else {
786                    span_bug!(statement.source_info.span, "{:?} is not an assignment", statement);
787                };
788
789                (
790                    if self.keep_original {
791                        rhs.clone()
792                    } else {
793                        let unit = Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
794                            span: statement.source_info.span,
795                            user_ty: None,
796                            const_: Const::zero_sized(self.tcx.types.unit),
797                        })));
798                        mem::replace(rhs, unit)
799                    },
800                    statement.source_info,
801                )
802            };
803
804            self.visit_rvalue(&mut rvalue, loc);
805            self.assign(new_temp, rvalue, source_info.span);
806        } else {
807            let terminator = if self.keep_original {
808                self.source[loc.block].terminator().clone()
809            } else {
810                let terminator = self.source[loc.block].terminator_mut();
811                let target = match &terminator.kind {
812                    TerminatorKind::Call { target: Some(target), .. } => *target,
813                    kind => {
814                        span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
815                    }
816                };
817                Terminator {
818                    source_info: terminator.source_info,
819                    kind: mem::replace(&mut terminator.kind, TerminatorKind::Goto { target }),
820                }
821            };
822
823            match terminator.kind {
824                TerminatorKind::Call {
825                    mut func, mut args, call_source: desugar, fn_span, ..
826                } => {
827                    // This promoted involves a function call, so it may fail to evaluate. Let's
828                    // make sure it is added to `required_consts` so that failure cannot get lost.
829                    self.add_to_required = true;
830
831                    self.visit_operand(&mut func, loc);
832                    for arg in &mut args {
833                        self.visit_operand(&mut arg.node, loc);
834                    }
835
836                    let last = self.promoted.basic_blocks.last_index().unwrap();
837                    let new_target = self.new_block();
838
839                    *self.promoted[last].terminator_mut() = Terminator {
840                        kind: TerminatorKind::Call {
841                            func,
842                            args,
843                            unwind: UnwindAction::Continue,
844                            destination: Place::from(new_temp),
845                            target: Some(new_target),
846                            call_source: desugar,
847                            fn_span,
848                        },
849                        source_info: SourceInfo::outermost(terminator.source_info.span),
850                        ..terminator
851                    };
852                }
853                kind => {
854                    span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
855                }
856            };
857        };
858
859        self.keep_original = old_keep_original;
860        new_temp
861    }
862
863    fn promote_candidate(
864        mut self,
865        candidate: Candidate,
866        next_promoted_index: Promoted,
867    ) -> Body<'tcx> {
868        let def = self.source.source.def_id();
869        let (mut rvalue, promoted_op) = {
870            let promoted = &mut self.promoted;
871            let tcx = self.tcx;
872            let mut promoted_operand = |ty, span| {
873                promoted.span = span;
874                promoted.local_decls[RETURN_PLACE] = LocalDecl::new(ty, span);
875                let args =
876                    tcx.erase_and_anonymize_regions(GenericArgs::identity_for_item(tcx, def));
877                let uneval =
878                    mir::UnevaluatedConst { def, args, promoted: Some(next_promoted_index) };
879
880                ConstOperand { span, user_ty: None, const_: Const::Unevaluated(uneval, ty) }
881            };
882
883            let blocks = self.source.basic_blocks.as_mut();
884            let local_decls = &mut self.source.local_decls;
885            let loc = candidate.location;
886            let statement = &mut blocks[loc.block].statements[loc.statement_index];
887            let StatementKind::Assign(box (_, Rvalue::Ref(region, borrow_kind, place))) =
888                &mut statement.kind
889            else {
890                bug!()
891            };
892
893            // Use the underlying local for this (necessarily interior) borrow.
894            debug_assert!(region.is_erased());
895            let ty = local_decls[place.local].ty;
896            let span = statement.source_info.span;
897
898            let ref_ty =
899                Ty::new_ref(tcx, tcx.lifetimes.re_erased, ty, borrow_kind.to_mutbl_lossy());
900
901            let mut projection = vec![PlaceElem::Deref];
902            projection.extend(place.projection);
903            place.projection = tcx.mk_place_elems(&projection);
904
905            // Create a temp to hold the promoted reference.
906            // This is because `*r` requires `r` to be a local,
907            // otherwise we would use the `promoted` directly.
908            let mut promoted_ref = LocalDecl::new(ref_ty, span);
909            promoted_ref.source_info = statement.source_info;
910            let promoted_ref = local_decls.push(promoted_ref);
911            assert_eq!(self.temps.push(TempState::Unpromotable), promoted_ref);
912
913            let promoted_operand = promoted_operand(ref_ty, span);
914            let promoted_ref_statement = Statement::new(
915                statement.source_info,
916                StatementKind::Assign(Box::new((
917                    Place::from(promoted_ref),
918                    Rvalue::Use(Operand::Constant(Box::new(promoted_operand))),
919                ))),
920            );
921            self.extra_statements.push((loc, promoted_ref_statement));
922
923            (
924                Rvalue::Ref(
925                    tcx.lifetimes.re_erased,
926                    *borrow_kind,
927                    Place {
928                        local: mem::replace(&mut place.local, promoted_ref),
929                        projection: List::empty(),
930                    },
931                ),
932                promoted_operand,
933            )
934        };
935
936        assert_eq!(self.new_block(), START_BLOCK);
937        self.visit_rvalue(
938            &mut rvalue,
939            Location { block: START_BLOCK, statement_index: usize::MAX },
940        );
941
942        let span = self.promoted.span;
943        self.assign(RETURN_PLACE, rvalue, span);
944
945        // Now that we did promotion, we know whether we'll want to add this to `required_consts` of
946        // the surrounding MIR body.
947        if self.add_to_required {
948            self.source.required_consts.as_mut().unwrap().push(promoted_op);
949        }
950
951        self.promoted.set_required_consts(self.required_consts);
952
953        self.promoted
954    }
955}
956
957/// Replaces all temporaries with their promoted counterparts.
958impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> {
959    fn tcx(&self) -> TyCtxt<'tcx> {
960        self.tcx
961    }
962
963    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
964        if self.is_temp_kind(*local) {
965            *local = self.promote_temp(*local);
966        }
967    }
968
969    fn visit_const_operand(&mut self, constant: &mut ConstOperand<'tcx>, _location: Location) {
970        if constant.const_.is_required_const() {
971            self.required_consts.push(*constant);
972        }
973
974        // Skipping `super_constant` as the visitor is otherwise only looking for locals.
975    }
976}
977
978fn promote_candidates<'tcx>(
979    body: &mut Body<'tcx>,
980    tcx: TyCtxt<'tcx>,
981    mut temps: IndexVec<Local, TempState>,
982    candidates: Vec<Candidate>,
983) -> IndexVec<Promoted, Body<'tcx>> {
984    // Visit candidates in reverse, in case they're nested.
985    debug!(promote_candidates = ?candidates);
986
987    // eagerly fail fast
988    if candidates.is_empty() {
989        return IndexVec::new();
990    }
991
992    let mut promotions = IndexVec::new();
993
994    let mut extra_statements = vec![];
995    for candidate in candidates.into_iter().rev() {
996        let Location { block, statement_index } = candidate.location;
997        if let StatementKind::Assign(box (place, _)) = &body[block].statements[statement_index].kind
998            && let Some(local) = place.as_local()
999        {
1000            if temps[local] == TempState::PromotedOut {
1001                // Already promoted.
1002                continue;
1003            }
1004        }
1005
1006        // Declare return place local so that `mir::Body::new` doesn't complain.
1007        let initial_locals = iter::once(LocalDecl::new(tcx.types.never, body.span)).collect();
1008
1009        let mut scope = body.source_scopes[body.source_info(candidate.location).scope].clone();
1010        scope.parent_scope = None;
1011
1012        let mut promoted = Body::new(
1013            body.source, // `promoted` gets filled in below
1014            IndexVec::new(),
1015            IndexVec::from_elem_n(scope, 1),
1016            initial_locals,
1017            IndexVec::new(),
1018            0,
1019            vec![],
1020            body.span,
1021            None,
1022            body.tainted_by_errors,
1023        );
1024        promoted.phase = MirPhase::Analysis(AnalysisPhase::Initial);
1025
1026        let promoter = Promoter {
1027            promoted,
1028            tcx,
1029            source: body,
1030            temps: &mut temps,
1031            extra_statements: &mut extra_statements,
1032            keep_original: false,
1033            add_to_required: false,
1034            required_consts: Vec::new(),
1035        };
1036
1037        let mut promoted = promoter.promote_candidate(candidate, promotions.next_index());
1038        promoted.source.promoted = Some(promotions.next_index());
1039        promotions.push(promoted);
1040    }
1041
1042    // Insert each of `extra_statements` before its indicated location, which
1043    // has to be done in reverse location order, to not invalidate the rest.
1044    extra_statements.sort_by_key(|&(loc, _)| cmp::Reverse(loc));
1045    for (loc, statement) in extra_statements {
1046        body[loc.block].statements.insert(loc.statement_index, statement);
1047    }
1048
1049    // Eliminate assignments to, and drops of promoted temps.
1050    let promoted = |index: Local| temps[index] == TempState::PromotedOut;
1051    for block in body.basic_blocks_mut() {
1052        block.retain_statements(|statement| match &statement.kind {
1053            StatementKind::Assign(box (place, _)) => {
1054                if let Some(index) = place.as_local() {
1055                    !promoted(index)
1056                } else {
1057                    true
1058                }
1059            }
1060            StatementKind::StorageLive(index) | StatementKind::StorageDead(index) => {
1061                !promoted(*index)
1062            }
1063            _ => true,
1064        });
1065        let terminator = block.terminator_mut();
1066        if let TerminatorKind::Drop { place, target, .. } = &terminator.kind
1067            && let Some(index) = place.as_local()
1068        {
1069            if promoted(index) {
1070                terminator.kind = TerminatorKind::Goto { target: *target };
1071            }
1072        }
1073    }
1074
1075    promotions
1076}