rustc_mir_transform/
coroutine.rs

1//! This is the implementation of the pass which transforms coroutines into state machines.
2//!
3//! MIR generation for coroutines creates a function which has a self argument which
4//! passes by value. This argument is effectively a coroutine type which only contains upvars and
5//! is only used for this argument inside the MIR for the coroutine.
6//! It is passed by value to enable upvars to be moved out of it. Drop elaboration runs on that
7//! MIR before this pass and creates drop flags for MIR locals.
8//! It will also drop the coroutine argument (which only consists of upvars) if any of the upvars
9//! are moved out of. This pass elaborates the drops of upvars / coroutine argument in the case
10//! that none of the upvars were moved out of. This is because we cannot have any drops of this
11//! coroutine in the MIR, since it is used to create the drop glue for the coroutine. We'd get
12//! infinite recursion otherwise.
13//!
14//! This pass creates the implementation for either the `Coroutine::resume` or `Future::poll`
15//! function and the drop shim for the coroutine based on the MIR input.
16//! It converts the coroutine argument from Self to &mut Self adding derefs in the MIR as needed.
17//! It computes the final layout of the coroutine struct which looks like this:
18//!     First upvars are stored
19//!     It is followed by the coroutine state field.
20//!     Then finally the MIR locals which are live across a suspension point are stored.
21//!     ```ignore (illustrative)
22//!     struct Coroutine {
23//!         upvars...,
24//!         state: u32,
25//!         mir_locals...,
26//!     }
27//!     ```
28//! This pass computes the meaning of the state field and the MIR locals which are live
29//! across a suspension point. There are however three hardcoded coroutine states:
30//!     0 - Coroutine have not been resumed yet
31//!     1 - Coroutine has returned / is completed
32//!     2 - Coroutine has been poisoned
33//!
34//! It also rewrites `return x` and `yield y` as setting a new coroutine state and returning
35//! `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`,
36//! or `Poll::Ready(x)` and `Poll::Pending` respectively.
37//! MIR locals which are live across a suspension point are moved to the coroutine struct
38//! with references to them being updated with references to the coroutine struct.
39//!
40//! The pass creates two functions which have a switch on the coroutine state giving
41//! the action to take.
42//!
43//! One of them is the implementation of `Coroutine::resume` / `Future::poll`.
44//! For coroutines with state 0 (unresumed) it starts the execution of the coroutine.
45//! For coroutines with state 1 (returned) and state 2 (poisoned) it panics.
46//! Otherwise it continues the execution from the last suspension point.
47//!
48//! The other function is the drop glue for the coroutine.
49//! For coroutines with state 0 (unresumed) it drops the upvars of the coroutine.
50//! For coroutines with state 1 (returned) and state 2 (poisoned) it does nothing.
51//! Otherwise it drops all the values in scope at the last suspension point.
52
53mod by_move_body;
54mod drop;
55use std::{iter, ops};
56
57pub(super) use by_move_body::coroutine_by_move_body_def_id;
58use drop::{
59    cleanup_async_drops, create_coroutine_drop_shim, create_coroutine_drop_shim_async,
60    create_coroutine_drop_shim_proxy_async, elaborate_coroutine_drops, expand_async_drops,
61    has_expandable_async_drops, insert_clean_drop,
62};
63use rustc_abi::{FieldIdx, VariantIdx};
64use rustc_data_structures::fx::FxHashSet;
65use rustc_errors::pluralize;
66use rustc_hir as hir;
67use rustc_hir::lang_items::LangItem;
68use rustc_hir::{CoroutineDesugaring, CoroutineKind};
69use rustc_index::bit_set::{BitMatrix, DenseBitSet, GrowableBitSet};
70use rustc_index::{Idx, IndexVec, indexvec};
71use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
72use rustc_middle::mir::*;
73use rustc_middle::ty::util::Discr;
74use rustc_middle::ty::{
75    self, CoroutineArgs, CoroutineArgsExt, GenericArgsRef, InstanceKind, Ty, TyCtxt, TypingMode,
76};
77use rustc_middle::{bug, span_bug};
78use rustc_mir_dataflow::impls::{
79    MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive,
80    always_storage_live_locals,
81};
82use rustc_mir_dataflow::{
83    Analysis, Results, ResultsCursor, ResultsVisitor, visit_reachable_results,
84};
85use rustc_span::def_id::{DefId, LocalDefId};
86use rustc_span::source_map::dummy_spanned;
87use rustc_span::symbol::sym;
88use rustc_span::{DUMMY_SP, Span};
89use rustc_trait_selection::error_reporting::InferCtxtErrorExt;
90use rustc_trait_selection::infer::TyCtxtInferExt as _;
91use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode, ObligationCtxt};
92use tracing::{debug, instrument, trace};
93
94use crate::deref_separator::deref_finder;
95use crate::{abort_unwinding_calls, errors, pass_manager as pm, simplify};
96
97pub(super) struct StateTransform;
98
99struct RenameLocalVisitor<'tcx> {
100    from: Local,
101    to: Local,
102    tcx: TyCtxt<'tcx>,
103}
104
105impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor<'tcx> {
106    fn tcx(&self) -> TyCtxt<'tcx> {
107        self.tcx
108    }
109
110    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
111        if *local == self.from {
112            *local = self.to;
113        } else if *local == self.to {
114            *local = self.from;
115        }
116    }
117
118    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
119        match terminator.kind {
120            TerminatorKind::Return => {
121                // Do not replace the implicit `_0` access here, as that's not possible. The
122                // transform already handles `return` correctly.
123            }
124            _ => self.super_terminator(terminator, location),
125        }
126    }
127}
128
129struct SelfArgVisitor<'tcx> {
130    tcx: TyCtxt<'tcx>,
131    new_base: Place<'tcx>,
132}
133
134impl<'tcx> SelfArgVisitor<'tcx> {
135    fn new(tcx: TyCtxt<'tcx>, elem: ProjectionElem<Local, Ty<'tcx>>) -> Self {
136        Self { tcx, new_base: Place { local: SELF_ARG, projection: tcx.mk_place_elems(&[elem]) } }
137    }
138}
139
140impl<'tcx> MutVisitor<'tcx> for SelfArgVisitor<'tcx> {
141    fn tcx(&self) -> TyCtxt<'tcx> {
142        self.tcx
143    }
144
145    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
146        assert_ne!(*local, SELF_ARG);
147    }
148
149    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
150        if place.local == SELF_ARG {
151            replace_base(place, self.new_base, self.tcx);
152        } else {
153            self.visit_local(&mut place.local, context, location);
154
155            for elem in place.projection.iter() {
156                if let PlaceElem::Index(local) = elem {
157                    assert_ne!(local, SELF_ARG);
158                }
159            }
160        }
161    }
162}
163
164#[tracing::instrument(level = "trace", skip(tcx))]
165fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtxt<'tcx>) {
166    place.local = new_base.local;
167
168    let mut new_projection = new_base.projection.to_vec();
169    new_projection.append(&mut place.projection.to_vec());
170
171    place.projection = tcx.mk_place_elems(&new_projection);
172    tracing::trace!(?place);
173}
174
175const SELF_ARG: Local = Local::from_u32(1);
176const CTX_ARG: Local = Local::from_u32(2);
177
178/// A `yield` point in the coroutine.
179struct SuspensionPoint<'tcx> {
180    /// State discriminant used when suspending or resuming at this point.
181    state: usize,
182    /// The block to jump to after resumption.
183    resume: BasicBlock,
184    /// Where to move the resume argument after resumption.
185    resume_arg: Place<'tcx>,
186    /// Which block to jump to if the coroutine is dropped in this state.
187    drop: Option<BasicBlock>,
188    /// Set of locals that have live storage while at this suspension point.
189    storage_liveness: GrowableBitSet<Local>,
190}
191
192struct TransformVisitor<'tcx> {
193    tcx: TyCtxt<'tcx>,
194    coroutine_kind: hir::CoroutineKind,
195
196    // The type of the discriminant in the coroutine struct
197    discr_ty: Ty<'tcx>,
198
199    // Mapping from Local to (type of local, coroutine struct index)
200    remap: IndexVec<Local, Option<(Ty<'tcx>, VariantIdx, FieldIdx)>>,
201
202    // A map from a suspension point in a block to the locals which have live storage at that point
203    storage_liveness: IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
204
205    // A list of suspension points, generated during the transform
206    suspension_points: Vec<SuspensionPoint<'tcx>>,
207
208    // The set of locals that have no `StorageLive`/`StorageDead` annotations.
209    always_live_locals: DenseBitSet<Local>,
210
211    // New local we just create to hold the `CoroutineState` value.
212    new_ret_local: Local,
213
214    old_yield_ty: Ty<'tcx>,
215
216    old_ret_ty: Ty<'tcx>,
217}
218
219impl<'tcx> TransformVisitor<'tcx> {
220    fn insert_none_ret_block(&self, body: &mut Body<'tcx>) -> BasicBlock {
221        let block = body.basic_blocks.next_index();
222        let source_info = SourceInfo::outermost(body.span);
223
224        let none_value = match self.coroutine_kind {
225            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
226                span_bug!(body.span, "`Future`s are not fused inherently")
227            }
228            CoroutineKind::Coroutine(_) => span_bug!(body.span, "`Coroutine`s cannot be fused"),
229            // `gen` continues return `None`
230            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
231                let option_def_id = self.tcx.require_lang_item(LangItem::Option, body.span);
232                make_aggregate_adt(
233                    option_def_id,
234                    VariantIdx::ZERO,
235                    self.tcx.mk_args(&[self.old_yield_ty.into()]),
236                    IndexVec::new(),
237                )
238            }
239            // `async gen` continues to return `Poll::Ready(None)`
240            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
241                let ty::Adt(_poll_adt, args) = *self.old_yield_ty.kind() else { bug!() };
242                let ty::Adt(_option_adt, args) = *args.type_at(0).kind() else { bug!() };
243                let yield_ty = args.type_at(0);
244                Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
245                    span: source_info.span,
246                    const_: Const::Unevaluated(
247                        UnevaluatedConst::new(
248                            self.tcx.require_lang_item(LangItem::AsyncGenFinished, body.span),
249                            self.tcx.mk_args(&[yield_ty.into()]),
250                        ),
251                        self.old_yield_ty,
252                    ),
253                    user_ty: None,
254                })))
255            }
256        };
257
258        let statements = vec![Statement::new(
259            source_info,
260            StatementKind::Assign(Box::new((Place::return_place(), none_value))),
261        )];
262
263        body.basic_blocks_mut().push(BasicBlockData::new_stmts(
264            statements,
265            Some(Terminator { source_info, kind: TerminatorKind::Return }),
266            false,
267        ));
268
269        block
270    }
271
272    // Make a `CoroutineState` or `Poll` variant assignment.
273    //
274    // `core::ops::CoroutineState` only has single element tuple variants,
275    // so we can just write to the downcasted first field and then set the
276    // discriminant to the appropriate variant.
277    #[tracing::instrument(level = "trace", skip(self, statements))]
278    fn make_state(
279        &self,
280        val: Operand<'tcx>,
281        source_info: SourceInfo,
282        is_return: bool,
283        statements: &mut Vec<Statement<'tcx>>,
284    ) {
285        const ZERO: VariantIdx = VariantIdx::ZERO;
286        const ONE: VariantIdx = VariantIdx::from_usize(1);
287        let rvalue = match self.coroutine_kind {
288            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
289                let poll_def_id = self.tcx.require_lang_item(LangItem::Poll, source_info.span);
290                let args = self.tcx.mk_args(&[self.old_ret_ty.into()]);
291                let (variant_idx, operands) = if is_return {
292                    (ZERO, indexvec![val]) // Poll::Ready(val)
293                } else {
294                    (ONE, IndexVec::new()) // Poll::Pending
295                };
296                make_aggregate_adt(poll_def_id, variant_idx, args, operands)
297            }
298            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
299                let option_def_id = self.tcx.require_lang_item(LangItem::Option, source_info.span);
300                let args = self.tcx.mk_args(&[self.old_yield_ty.into()]);
301                let (variant_idx, operands) = if is_return {
302                    (ZERO, IndexVec::new()) // None
303                } else {
304                    (ONE, indexvec![val]) // Some(val)
305                };
306                make_aggregate_adt(option_def_id, variant_idx, args, operands)
307            }
308            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
309                if is_return {
310                    let ty::Adt(_poll_adt, args) = *self.old_yield_ty.kind() else { bug!() };
311                    let ty::Adt(_option_adt, args) = *args.type_at(0).kind() else { bug!() };
312                    let yield_ty = args.type_at(0);
313                    Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
314                        span: source_info.span,
315                        const_: Const::Unevaluated(
316                            UnevaluatedConst::new(
317                                self.tcx.require_lang_item(
318                                    LangItem::AsyncGenFinished,
319                                    source_info.span,
320                                ),
321                                self.tcx.mk_args(&[yield_ty.into()]),
322                            ),
323                            self.old_yield_ty,
324                        ),
325                        user_ty: None,
326                    })))
327                } else {
328                    Rvalue::Use(val)
329                }
330            }
331            CoroutineKind::Coroutine(_) => {
332                let coroutine_state_def_id =
333                    self.tcx.require_lang_item(LangItem::CoroutineState, source_info.span);
334                let args = self.tcx.mk_args(&[self.old_yield_ty.into(), self.old_ret_ty.into()]);
335                let variant_idx = if is_return {
336                    ONE // CoroutineState::Complete(val)
337                } else {
338                    ZERO // CoroutineState::Yielded(val)
339                };
340                make_aggregate_adt(coroutine_state_def_id, variant_idx, args, indexvec![val])
341            }
342        };
343
344        // Assign to `new_ret_local`, which will be replaced by `RETURN_PLACE` later.
345        statements.push(Statement::new(
346            source_info,
347            StatementKind::Assign(Box::new((self.new_ret_local.into(), rvalue))),
348        ));
349    }
350
351    // Create a Place referencing a coroutine struct field
352    #[tracing::instrument(level = "trace", skip(self), ret)]
353    fn make_field(&self, variant_index: VariantIdx, idx: FieldIdx, ty: Ty<'tcx>) -> Place<'tcx> {
354        let self_place = Place::from(SELF_ARG);
355        let base = self.tcx.mk_place_downcast_unnamed(self_place, variant_index);
356        let mut projection = base.projection.to_vec();
357        projection.push(ProjectionElem::Field(idx, ty));
358
359        Place { local: base.local, projection: self.tcx.mk_place_elems(&projection) }
360    }
361
362    // Create a statement which changes the discriminant
363    #[tracing::instrument(level = "trace", skip(self))]
364    fn set_discr(&self, state_disc: VariantIdx, source_info: SourceInfo) -> Statement<'tcx> {
365        let self_place = Place::from(SELF_ARG);
366        Statement::new(
367            source_info,
368            StatementKind::SetDiscriminant {
369                place: Box::new(self_place),
370                variant_index: state_disc,
371            },
372        )
373    }
374
375    // Create a statement which reads the discriminant into a temporary
376    #[tracing::instrument(level = "trace", skip(self, body))]
377    fn get_discr(&self, body: &mut Body<'tcx>) -> (Statement<'tcx>, Place<'tcx>) {
378        let temp_decl = LocalDecl::new(self.discr_ty, body.span);
379        let local_decls_len = body.local_decls.push(temp_decl);
380        let temp = Place::from(local_decls_len);
381
382        let self_place = Place::from(SELF_ARG);
383        let assign = Statement::new(
384            SourceInfo::outermost(body.span),
385            StatementKind::Assign(Box::new((temp, Rvalue::Discriminant(self_place)))),
386        );
387        (assign, temp)
388    }
389
390    /// Swaps all references of `old_local` and `new_local`.
391    #[tracing::instrument(level = "trace", skip(self, body))]
392    fn replace_local(&mut self, old_local: Local, new_local: Local, body: &mut Body<'tcx>) {
393        body.local_decls.swap(old_local, new_local);
394
395        let mut visitor = RenameLocalVisitor { from: old_local, to: new_local, tcx: self.tcx };
396        visitor.visit_body(body);
397        for suspension in &mut self.suspension_points {
398            let ctxt = PlaceContext::MutatingUse(MutatingUseContext::Yield);
399            let location = Location { block: START_BLOCK, statement_index: 0 };
400            visitor.visit_place(&mut suspension.resume_arg, ctxt, location);
401        }
402    }
403}
404
405impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> {
406    fn tcx(&self) -> TyCtxt<'tcx> {
407        self.tcx
408    }
409
410    #[tracing::instrument(level = "trace", skip(self), ret)]
411    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _location: Location) {
412        assert!(!self.remap.contains(*local));
413    }
414
415    #[tracing::instrument(level = "trace", skip(self), ret)]
416    fn visit_place(&mut self, place: &mut Place<'tcx>, _: PlaceContext, _location: Location) {
417        // Replace an Local in the remap with a coroutine struct access
418        if let Some(&Some((ty, variant_index, idx))) = self.remap.get(place.local) {
419            replace_base(place, self.make_field(variant_index, idx, ty), self.tcx);
420        }
421    }
422
423    #[tracing::instrument(level = "trace", skip(self, stmt), ret)]
424    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, location: Location) {
425        // Remove StorageLive and StorageDead statements for remapped locals
426        if let StatementKind::StorageLive(l) | StatementKind::StorageDead(l) = stmt.kind
427            && self.remap.contains(l)
428        {
429            stmt.make_nop(true);
430        }
431        self.super_statement(stmt, location);
432    }
433
434    #[tracing::instrument(level = "trace", skip(self, term), ret)]
435    fn visit_terminator(&mut self, term: &mut Terminator<'tcx>, location: Location) {
436        if let TerminatorKind::Return = term.kind {
437            // `visit_basic_block_data` introduces `Return` terminators which read `RETURN_PLACE`.
438            // But this `RETURN_PLACE` is already remapped, so we should not touch it again.
439            return;
440        }
441        self.super_terminator(term, location);
442    }
443
444    #[tracing::instrument(level = "trace", skip(self, data), ret)]
445    fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
446        match data.terminator().kind {
447            TerminatorKind::Return => {
448                let source_info = data.terminator().source_info;
449                // We must assign the value first in case it gets declared dead below
450                self.make_state(
451                    Operand::Move(Place::return_place()),
452                    source_info,
453                    true,
454                    &mut data.statements,
455                );
456                // Return state.
457                let state = VariantIdx::new(CoroutineArgs::RETURNED);
458                data.statements.push(self.set_discr(state, source_info));
459                data.terminator_mut().kind = TerminatorKind::Return;
460            }
461            TerminatorKind::Yield { ref value, resume, mut resume_arg, drop } => {
462                let source_info = data.terminator().source_info;
463                // We must assign the value first in case it gets declared dead below
464                self.make_state(value.clone(), source_info, false, &mut data.statements);
465                // Yield state.
466                let state = CoroutineArgs::RESERVED_VARIANTS + self.suspension_points.len();
467
468                // The resume arg target location might itself be remapped if its base local is
469                // live across a yield.
470                if let Some(&Some((ty, variant, idx))) = self.remap.get(resume_arg.local) {
471                    replace_base(&mut resume_arg, self.make_field(variant, idx, ty), self.tcx);
472                }
473
474                let storage_liveness: GrowableBitSet<Local> =
475                    self.storage_liveness[block].clone().unwrap().into();
476
477                for i in 0..self.always_live_locals.domain_size() {
478                    let l = Local::new(i);
479                    let needs_storage_dead = storage_liveness.contains(l)
480                        && !self.remap.contains(l)
481                        && !self.always_live_locals.contains(l);
482                    if needs_storage_dead {
483                        data.statements
484                            .push(Statement::new(source_info, StatementKind::StorageDead(l)));
485                    }
486                }
487
488                self.suspension_points.push(SuspensionPoint {
489                    state,
490                    resume,
491                    resume_arg,
492                    drop,
493                    storage_liveness,
494                });
495
496                let state = VariantIdx::new(state);
497                data.statements.push(self.set_discr(state, source_info));
498                data.terminator_mut().kind = TerminatorKind::Return;
499            }
500            _ => {}
501        }
502
503        self.super_basic_block_data(block, data);
504    }
505}
506
507fn make_aggregate_adt<'tcx>(
508    def_id: DefId,
509    variant_idx: VariantIdx,
510    args: GenericArgsRef<'tcx>,
511    operands: IndexVec<FieldIdx, Operand<'tcx>>,
512) -> Rvalue<'tcx> {
513    Rvalue::Aggregate(Box::new(AggregateKind::Adt(def_id, variant_idx, args, None, None)), operands)
514}
515
516#[tracing::instrument(level = "trace", skip(tcx, body))]
517fn make_coroutine_state_argument_indirect<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
518    let coroutine_ty = body.local_decls.raw[1].ty;
519
520    let ref_coroutine_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty);
521
522    // Replace the by value coroutine argument
523    body.local_decls.raw[1].ty = ref_coroutine_ty;
524
525    // Add a deref to accesses of the coroutine state
526    SelfArgVisitor::new(tcx, ProjectionElem::Deref).visit_body(body);
527}
528
529#[tracing::instrument(level = "trace", skip(tcx, body))]
530fn make_coroutine_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
531    let ref_coroutine_ty = body.local_decls.raw[1].ty;
532
533    let pin_did = tcx.require_lang_item(LangItem::Pin, body.span);
534    let pin_adt_ref = tcx.adt_def(pin_did);
535    let args = tcx.mk_args(&[ref_coroutine_ty.into()]);
536    let pin_ref_coroutine_ty = Ty::new_adt(tcx, pin_adt_ref, args);
537
538    // Replace the by ref coroutine argument
539    body.local_decls.raw[1].ty = pin_ref_coroutine_ty;
540
541    // Add the Pin field access to accesses of the coroutine state
542    SelfArgVisitor::new(tcx, ProjectionElem::Field(FieldIdx::ZERO, ref_coroutine_ty))
543        .visit_body(body);
544}
545
546/// Transforms the `body` of the coroutine applying the following transforms:
547///
548/// - Eliminates all the `get_context` calls that async lowering created.
549/// - Replace all `Local` `ResumeTy` types with `&mut Context<'_>` (`context_mut_ref`).
550///
551/// The `Local`s that have their types replaced are:
552/// - The `resume` argument itself.
553/// - The argument to `get_context`.
554/// - The yielded value of a `yield`.
555///
556/// The `ResumeTy` hides a `&mut Context<'_>` behind an unsafe raw pointer, and the
557/// `get_context` function is being used to convert that back to a `&mut Context<'_>`.
558///
559/// Ideally the async lowering would not use the `ResumeTy`/`get_context` indirection,
560/// but rather directly use `&mut Context<'_>`, however that would currently
561/// lead to higher-kinded lifetime errors.
562/// See <https://github.com/rust-lang/rust/issues/105501>.
563///
564/// The async lowering step and the type / lifetime inference / checking are
565/// still using the `ResumeTy` indirection for the time being, and that indirection
566/// is removed here. After this transform, the coroutine body only knows about `&mut Context<'_>`.
567#[tracing::instrument(level = "trace", skip(tcx, body), ret)]
568fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> Ty<'tcx> {
569    let context_mut_ref = Ty::new_task_context(tcx);
570
571    // replace the type of the `resume` argument
572    replace_resume_ty_local(tcx, body, CTX_ARG, context_mut_ref);
573
574    let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, body.span);
575
576    for bb in body.basic_blocks.indices() {
577        let bb_data = &body[bb];
578        if bb_data.is_cleanup {
579            continue;
580        }
581
582        match &bb_data.terminator().kind {
583            TerminatorKind::Call { func, .. } => {
584                let func_ty = func.ty(body, tcx);
585                if let ty::FnDef(def_id, _) = *func_ty.kind()
586                    && def_id == get_context_def_id
587                {
588                    let local = eliminate_get_context_call(&mut body[bb]);
589                    replace_resume_ty_local(tcx, body, local, context_mut_ref);
590                }
591            }
592            TerminatorKind::Yield { resume_arg, .. } => {
593                replace_resume_ty_local(tcx, body, resume_arg.local, context_mut_ref);
594            }
595            _ => {}
596        }
597    }
598    context_mut_ref
599}
600
601fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local {
602    let terminator = bb_data.terminator.take().unwrap();
603    let TerminatorKind::Call { args, destination, target, .. } = terminator.kind else {
604        bug!();
605    };
606    let [arg] = *Box::try_from(args).unwrap();
607    let local = arg.node.place().unwrap().local;
608
609    let arg = Rvalue::Use(arg.node);
610    let assign =
611        Statement::new(terminator.source_info, StatementKind::Assign(Box::new((destination, arg))));
612    bb_data.statements.push(assign);
613    bb_data.terminator = Some(Terminator {
614        source_info: terminator.source_info,
615        kind: TerminatorKind::Goto { target: target.unwrap() },
616    });
617    local
618}
619
620#[cfg_attr(not(debug_assertions), allow(unused))]
621#[tracing::instrument(level = "trace", skip(tcx, body), ret)]
622fn replace_resume_ty_local<'tcx>(
623    tcx: TyCtxt<'tcx>,
624    body: &mut Body<'tcx>,
625    local: Local,
626    context_mut_ref: Ty<'tcx>,
627) {
628    let local_ty = std::mem::replace(&mut body.local_decls[local].ty, context_mut_ref);
629    // We have to replace the `ResumeTy` that is used for type and borrow checking
630    // with `&mut Context<'_>` in MIR.
631    #[cfg(debug_assertions)]
632    {
633        if let ty::Adt(resume_ty_adt, _) = local_ty.kind() {
634            let expected_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, body.span));
635            assert_eq!(*resume_ty_adt, expected_adt);
636        } else {
637            panic!("expected `ResumeTy`, found `{:?}`", local_ty);
638        };
639    }
640}
641
642/// Transforms the `body` of the coroutine applying the following transform:
643///
644/// - Remove the `resume` argument.
645///
646/// Ideally the async lowering would not add the `resume` argument.
647///
648/// The async lowering step and the type / lifetime inference / checking are
649/// still using the `resume` argument for the time being. After this transform,
650/// the coroutine body doesn't have the `resume` argument.
651fn transform_gen_context<'tcx>(body: &mut Body<'tcx>) {
652    // This leaves the local representing the `resume` argument in place,
653    // but turns it into a regular local variable. This is cheaper than
654    // adjusting all local references in the body after removing it.
655    body.arg_count = 1;
656}
657
658struct LivenessInfo {
659    /// Which locals are live across any suspension point.
660    saved_locals: CoroutineSavedLocals,
661
662    /// The set of saved locals live at each suspension point.
663    live_locals_at_suspension_points: Vec<DenseBitSet<CoroutineSavedLocal>>,
664
665    /// Parallel vec to the above with SourceInfo for each yield terminator.
666    source_info_at_suspension_points: Vec<SourceInfo>,
667
668    /// For every saved local, the set of other saved locals that are
669    /// storage-live at the same time as this local. We cannot overlap locals in
670    /// the layout which have conflicting storage.
671    storage_conflicts: BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal>,
672
673    /// For every suspending block, the locals which are storage-live across
674    /// that suspension point.
675    storage_liveness: IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
676}
677
678/// Computes which locals have to be stored in the state-machine for the
679/// given coroutine.
680///
681/// The basic idea is as follows:
682/// - a local is live until we encounter a `StorageDead` statement. In
683///   case none exist, the local is considered to be always live.
684/// - a local has to be stored if it is either directly used after the
685///   the suspend point, or if it is live and has been previously borrowed.
686#[tracing::instrument(level = "trace", skip(tcx, body))]
687fn locals_live_across_suspend_points<'tcx>(
688    tcx: TyCtxt<'tcx>,
689    body: &Body<'tcx>,
690    always_live_locals: &DenseBitSet<Local>,
691    movable: bool,
692) -> LivenessInfo {
693    // Calculate when MIR locals have live storage. This gives us an upper bound of their
694    // lifetimes.
695    let mut storage_live = MaybeStorageLive::new(std::borrow::Cow::Borrowed(always_live_locals))
696        .iterate_to_fixpoint(tcx, body, None)
697        .into_results_cursor(body);
698
699    // Calculate the MIR locals that have been previously borrowed (even if they are still active).
700    let borrowed_locals = MaybeBorrowedLocals.iterate_to_fixpoint(tcx, body, Some("coroutine"));
701    let mut borrowed_locals_analysis1 = borrowed_locals.analysis;
702    let mut borrowed_locals_analysis2 = borrowed_locals_analysis1.clone(); // trivial
703    let borrowed_locals_cursor1 = ResultsCursor::new_borrowing(
704        body,
705        &mut borrowed_locals_analysis1,
706        &borrowed_locals.results,
707    );
708    let mut borrowed_locals_cursor2 = ResultsCursor::new_borrowing(
709        body,
710        &mut borrowed_locals_analysis2,
711        &borrowed_locals.results,
712    );
713
714    // Calculate the MIR locals that we need to keep storage around for.
715    let mut requires_storage =
716        MaybeRequiresStorage::new(borrowed_locals_cursor1).iterate_to_fixpoint(tcx, body, None);
717    let mut requires_storage_cursor = ResultsCursor::new_borrowing(
718        body,
719        &mut requires_storage.analysis,
720        &requires_storage.results,
721    );
722
723    // Calculate the liveness of MIR locals ignoring borrows.
724    let mut liveness =
725        MaybeLiveLocals.iterate_to_fixpoint(tcx, body, Some("coroutine")).into_results_cursor(body);
726
727    let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks);
728    let mut live_locals_at_suspension_points = Vec::new();
729    let mut source_info_at_suspension_points = Vec::new();
730    let mut live_locals_at_any_suspension_point = DenseBitSet::new_empty(body.local_decls.len());
731
732    for (block, data) in body.basic_blocks.iter_enumerated() {
733        if let TerminatorKind::Yield { .. } = data.terminator().kind {
734            let loc = Location { block, statement_index: data.statements.len() };
735
736            liveness.seek_to_block_end(block);
737            let mut live_locals = liveness.get().clone();
738
739            if !movable {
740                // The `liveness` variable contains the liveness of MIR locals ignoring borrows.
741                // This is correct for movable coroutines since borrows cannot live across
742                // suspension points. However for immovable coroutines we need to account for
743                // borrows, so we conservatively assume that all borrowed locals are live until
744                // we find a StorageDead statement referencing the locals.
745                // To do this we just union our `liveness` result with `borrowed_locals`, which
746                // contains all the locals which has been borrowed before this suspension point.
747                // If a borrow is converted to a raw reference, we must also assume that it lives
748                // forever. Note that the final liveness is still bounded by the storage liveness
749                // of the local, which happens using the `intersect` operation below.
750                borrowed_locals_cursor2.seek_before_primary_effect(loc);
751                live_locals.union(borrowed_locals_cursor2.get());
752            }
753
754            // Store the storage liveness for later use so we can restore the state
755            // after a suspension point
756            storage_live.seek_before_primary_effect(loc);
757            storage_liveness_map[block] = Some(storage_live.get().clone());
758
759            // Locals live are live at this point only if they are used across
760            // suspension points (the `liveness` variable)
761            // and their storage is required (the `storage_required` variable)
762            requires_storage_cursor.seek_before_primary_effect(loc);
763            live_locals.intersect(requires_storage_cursor.get());
764
765            // The coroutine argument is ignored.
766            live_locals.remove(SELF_ARG);
767
768            debug!("loc = {:?}, live_locals = {:?}", loc, live_locals);
769
770            // Add the locals live at this suspension point to the set of locals which live across
771            // any suspension points
772            live_locals_at_any_suspension_point.union(&live_locals);
773
774            live_locals_at_suspension_points.push(live_locals);
775            source_info_at_suspension_points.push(data.terminator().source_info);
776        }
777    }
778
779    debug!("live_locals_anywhere = {:?}", live_locals_at_any_suspension_point);
780    let saved_locals = CoroutineSavedLocals(live_locals_at_any_suspension_point);
781
782    // Renumber our liveness_map bitsets to include only the locals we are
783    // saving.
784    let live_locals_at_suspension_points = live_locals_at_suspension_points
785        .iter()
786        .map(|live_here| saved_locals.renumber_bitset(live_here))
787        .collect();
788
789    let storage_conflicts = compute_storage_conflicts(
790        body,
791        &saved_locals,
792        always_live_locals.clone(),
793        &mut requires_storage.analysis,
794        &requires_storage.results,
795    );
796
797    LivenessInfo {
798        saved_locals,
799        live_locals_at_suspension_points,
800        source_info_at_suspension_points,
801        storage_conflicts,
802        storage_liveness: storage_liveness_map,
803    }
804}
805
806/// The set of `Local`s that must be saved across yield points.
807///
808/// `CoroutineSavedLocal` is indexed in terms of the elements in this set;
809/// i.e. `CoroutineSavedLocal::new(1)` corresponds to the second local
810/// included in this set.
811struct CoroutineSavedLocals(DenseBitSet<Local>);
812
813impl CoroutineSavedLocals {
814    /// Returns an iterator over each `CoroutineSavedLocal` along with the `Local` it corresponds
815    /// to.
816    fn iter_enumerated(&self) -> impl '_ + Iterator<Item = (CoroutineSavedLocal, Local)> {
817        self.iter().enumerate().map(|(i, l)| (CoroutineSavedLocal::from(i), l))
818    }
819
820    /// Transforms a `DenseBitSet<Local>` that contains only locals saved across yield points to the
821    /// equivalent `DenseBitSet<CoroutineSavedLocal>`.
822    fn renumber_bitset(&self, input: &DenseBitSet<Local>) -> DenseBitSet<CoroutineSavedLocal> {
823        assert!(self.superset(input), "{:?} not a superset of {:?}", self.0, input);
824        let mut out = DenseBitSet::new_empty(self.count());
825        for (saved_local, local) in self.iter_enumerated() {
826            if input.contains(local) {
827                out.insert(saved_local);
828            }
829        }
830        out
831    }
832
833    fn get(&self, local: Local) -> Option<CoroutineSavedLocal> {
834        if !self.contains(local) {
835            return None;
836        }
837
838        let idx = self.iter().take_while(|&l| l < local).count();
839        Some(CoroutineSavedLocal::new(idx))
840    }
841}
842
843impl ops::Deref for CoroutineSavedLocals {
844    type Target = DenseBitSet<Local>;
845
846    fn deref(&self) -> &Self::Target {
847        &self.0
848    }
849}
850
851/// For every saved local, looks for which locals are StorageLive at the same
852/// time. Generates a bitset for every local of all the other locals that may be
853/// StorageLive simultaneously with that local. This is used in the layout
854/// computation; see `CoroutineLayout` for more.
855fn compute_storage_conflicts<'mir, 'tcx>(
856    body: &'mir Body<'tcx>,
857    saved_locals: &'mir CoroutineSavedLocals,
858    always_live_locals: DenseBitSet<Local>,
859    analysis: &mut MaybeRequiresStorage<'mir, 'tcx>,
860    results: &Results<DenseBitSet<Local>>,
861) -> BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal> {
862    assert_eq!(body.local_decls.len(), saved_locals.domain_size());
863
864    debug!("compute_storage_conflicts({:?})", body.span);
865    debug!("always_live = {:?}", always_live_locals);
866
867    // Locals that are always live or ones that need to be stored across
868    // suspension points are not eligible for overlap.
869    let mut ineligible_locals = always_live_locals;
870    ineligible_locals.intersect(&**saved_locals);
871
872    // Compute the storage conflicts for all eligible locals.
873    let mut visitor = StorageConflictVisitor {
874        body,
875        saved_locals,
876        local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()),
877        eligible_storage_live: DenseBitSet::new_empty(body.local_decls.len()),
878    };
879
880    visit_reachable_results(body, analysis, results, &mut visitor);
881
882    let local_conflicts = visitor.local_conflicts;
883
884    // Compress the matrix using only stored locals (Local -> CoroutineSavedLocal).
885    //
886    // NOTE: Today we store a full conflict bitset for every local. Technically
887    // this is twice as many bits as we need, since the relation is symmetric.
888    // However, in practice these bitsets are not usually large. The layout code
889    // also needs to keep track of how many conflicts each local has, so it's
890    // simpler to keep it this way for now.
891    let mut storage_conflicts = BitMatrix::new(saved_locals.count(), saved_locals.count());
892    for (saved_local_a, local_a) in saved_locals.iter_enumerated() {
893        if ineligible_locals.contains(local_a) {
894            // Conflicts with everything.
895            storage_conflicts.insert_all_into_row(saved_local_a);
896        } else {
897            // Keep overlap information only for stored locals.
898            for (saved_local_b, local_b) in saved_locals.iter_enumerated() {
899                if local_conflicts.contains(local_a, local_b) {
900                    storage_conflicts.insert(saved_local_a, saved_local_b);
901                }
902            }
903        }
904    }
905    storage_conflicts
906}
907
908struct StorageConflictVisitor<'a, 'tcx> {
909    body: &'a Body<'tcx>,
910    saved_locals: &'a CoroutineSavedLocals,
911    // FIXME(tmandry): Consider using sparse bitsets here once we have good
912    // benchmarks for coroutines.
913    local_conflicts: BitMatrix<Local, Local>,
914    // We keep this bitset as a buffer to avoid reallocating memory.
915    eligible_storage_live: DenseBitSet<Local>,
916}
917
918impl<'a, 'tcx> ResultsVisitor<'tcx, MaybeRequiresStorage<'a, 'tcx>>
919    for StorageConflictVisitor<'a, 'tcx>
920{
921    fn visit_after_early_statement_effect(
922        &mut self,
923        _analysis: &mut MaybeRequiresStorage<'a, 'tcx>,
924        state: &DenseBitSet<Local>,
925        _statement: &Statement<'tcx>,
926        loc: Location,
927    ) {
928        self.apply_state(state, loc);
929    }
930
931    fn visit_after_early_terminator_effect(
932        &mut self,
933        _analysis: &mut MaybeRequiresStorage<'a, 'tcx>,
934        state: &DenseBitSet<Local>,
935        _terminator: &Terminator<'tcx>,
936        loc: Location,
937    ) {
938        self.apply_state(state, loc);
939    }
940}
941
942impl StorageConflictVisitor<'_, '_> {
943    fn apply_state(&mut self, state: &DenseBitSet<Local>, loc: Location) {
944        // Ignore unreachable blocks.
945        if let TerminatorKind::Unreachable = self.body.basic_blocks[loc.block].terminator().kind {
946            return;
947        }
948
949        self.eligible_storage_live.clone_from(state);
950        self.eligible_storage_live.intersect(&**self.saved_locals);
951
952        for local in self.eligible_storage_live.iter() {
953            self.local_conflicts.union_row_with(&self.eligible_storage_live, local);
954        }
955
956        if self.eligible_storage_live.count() > 1 {
957            trace!("at {:?}, eligible_storage_live={:?}", loc, self.eligible_storage_live);
958        }
959    }
960}
961
962#[tracing::instrument(level = "trace", skip(liveness, body))]
963fn compute_layout<'tcx>(
964    liveness: LivenessInfo,
965    body: &Body<'tcx>,
966) -> (
967    IndexVec<Local, Option<(Ty<'tcx>, VariantIdx, FieldIdx)>>,
968    CoroutineLayout<'tcx>,
969    IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
970) {
971    let LivenessInfo {
972        saved_locals,
973        live_locals_at_suspension_points,
974        source_info_at_suspension_points,
975        storage_conflicts,
976        storage_liveness,
977    } = liveness;
978
979    // Gather live local types and their indices.
980    let mut locals = IndexVec::<CoroutineSavedLocal, _>::new();
981    let mut tys = IndexVec::<CoroutineSavedLocal, _>::new();
982    for (saved_local, local) in saved_locals.iter_enumerated() {
983        debug!("coroutine saved local {:?} => {:?}", saved_local, local);
984
985        locals.push(local);
986        let decl = &body.local_decls[local];
987        debug!(?decl);
988
989        // Do not `unwrap_crate_local` here, as post-borrowck cleanup may have already cleared
990        // the information. This is alright, since `ignore_for_traits` is only relevant when
991        // this code runs on pre-cleanup MIR, and `ignore_for_traits = false` is the safer
992        // default.
993        let ignore_for_traits = match decl.local_info {
994            // Do not include raw pointers created from accessing `static` items, as those could
995            // well be re-created by another access to the same static.
996            ClearCrossCrate::Set(box LocalInfo::StaticRef { is_thread_local, .. }) => {
997                !is_thread_local
998            }
999            // Fake borrows are only read by fake reads, so do not have any reality in
1000            // post-analysis MIR.
1001            ClearCrossCrate::Set(box LocalInfo::FakeBorrow) => true,
1002            _ => false,
1003        };
1004        let decl =
1005            CoroutineSavedTy { ty: decl.ty, source_info: decl.source_info, ignore_for_traits };
1006        debug!(?decl);
1007
1008        tys.push(decl);
1009    }
1010
1011    // Leave empty variants for the UNRESUMED, RETURNED, and POISONED states.
1012    // In debuginfo, these will correspond to the beginning (UNRESUMED) or end
1013    // (RETURNED, POISONED) of the function.
1014    let body_span = body.source_scopes[OUTERMOST_SOURCE_SCOPE].span;
1015    let mut variant_source_info: IndexVec<VariantIdx, SourceInfo> = [
1016        SourceInfo::outermost(body_span.shrink_to_lo()),
1017        SourceInfo::outermost(body_span.shrink_to_hi()),
1018        SourceInfo::outermost(body_span.shrink_to_hi()),
1019    ]
1020    .iter()
1021    .copied()
1022    .collect();
1023
1024    // Build the coroutine variant field list.
1025    // Create a map from local indices to coroutine struct indices.
1026    let mut variant_fields: IndexVec<VariantIdx, IndexVec<FieldIdx, CoroutineSavedLocal>> =
1027        iter::repeat(IndexVec::new()).take(CoroutineArgs::RESERVED_VARIANTS).collect();
1028    let mut remap = IndexVec::from_elem_n(None, saved_locals.domain_size());
1029    for (suspension_point_idx, live_locals) in live_locals_at_suspension_points.iter().enumerate() {
1030        let variant_index =
1031            VariantIdx::from(CoroutineArgs::RESERVED_VARIANTS + suspension_point_idx);
1032        let mut fields = IndexVec::new();
1033        for (idx, saved_local) in live_locals.iter().enumerate() {
1034            fields.push(saved_local);
1035            // Note that if a field is included in multiple variants, we will
1036            // just use the first one here. That's fine; fields do not move
1037            // around inside coroutines, so it doesn't matter which variant
1038            // index we access them by.
1039            let idx = FieldIdx::from_usize(idx);
1040            remap[locals[saved_local]] = Some((tys[saved_local].ty, variant_index, idx));
1041        }
1042        variant_fields.push(fields);
1043        variant_source_info.push(source_info_at_suspension_points[suspension_point_idx]);
1044    }
1045    debug!("coroutine variant_fields = {:?}", variant_fields);
1046    debug!("coroutine storage_conflicts = {:#?}", storage_conflicts);
1047
1048    let mut field_names = IndexVec::from_elem(None, &tys);
1049    for var in &body.var_debug_info {
1050        let VarDebugInfoContents::Place(place) = &var.value else { continue };
1051        let Some(local) = place.as_local() else { continue };
1052        let Some(&Some((_, variant, field))) = remap.get(local) else {
1053            continue;
1054        };
1055
1056        let saved_local = variant_fields[variant][field];
1057        field_names.get_or_insert_with(saved_local, || var.name);
1058    }
1059
1060    let layout = CoroutineLayout {
1061        field_tys: tys,
1062        field_names,
1063        variant_fields,
1064        variant_source_info,
1065        storage_conflicts,
1066    };
1067    debug!(?remap);
1068    debug!(?layout);
1069    debug!(?storage_liveness);
1070
1071    (remap, layout, storage_liveness)
1072}
1073
1074/// Replaces the entry point of `body` with a block that switches on the coroutine discriminant and
1075/// dispatches to blocks according to `cases`.
1076///
1077/// After this function, the former entry point of the function will be bb1.
1078fn insert_switch<'tcx>(
1079    body: &mut Body<'tcx>,
1080    cases: Vec<(usize, BasicBlock)>,
1081    transform: &TransformVisitor<'tcx>,
1082    default_block: BasicBlock,
1083) {
1084    let (assign, discr) = transform.get_discr(body);
1085    let switch_targets =
1086        SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block);
1087    let switch = TerminatorKind::SwitchInt { discr: Operand::Move(discr), targets: switch_targets };
1088
1089    let source_info = SourceInfo::outermost(body.span);
1090    body.basic_blocks_mut().raw.insert(
1091        0,
1092        BasicBlockData::new_stmts(
1093            vec![assign],
1094            Some(Terminator { source_info, kind: switch }),
1095            false,
1096        ),
1097    );
1098
1099    for b in body.basic_blocks_mut().iter_mut() {
1100        b.terminator_mut().successors_mut(|target| *target += 1);
1101    }
1102}
1103
1104fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock {
1105    let source_info = SourceInfo::outermost(body.span);
1106    body.basic_blocks_mut().push(BasicBlockData::new(Some(Terminator { source_info, kind }), false))
1107}
1108
1109fn return_poll_ready_assign<'tcx>(tcx: TyCtxt<'tcx>, source_info: SourceInfo) -> Statement<'tcx> {
1110    // Poll::Ready(())
1111    let poll_def_id = tcx.require_lang_item(LangItem::Poll, source_info.span);
1112    let args = tcx.mk_args(&[tcx.types.unit.into()]);
1113    let val = Operand::Constant(Box::new(ConstOperand {
1114        span: source_info.span,
1115        user_ty: None,
1116        const_: Const::zero_sized(tcx.types.unit),
1117    }));
1118    let ready_val = Rvalue::Aggregate(
1119        Box::new(AggregateKind::Adt(poll_def_id, VariantIdx::from_usize(0), args, None, None)),
1120        indexvec![val],
1121    );
1122    Statement::new(source_info, StatementKind::Assign(Box::new((Place::return_place(), ready_val))))
1123}
1124
1125fn insert_poll_ready_block<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> BasicBlock {
1126    let source_info = SourceInfo::outermost(body.span);
1127    body.basic_blocks_mut().push(BasicBlockData::new_stmts(
1128        [return_poll_ready_assign(tcx, source_info)].to_vec(),
1129        Some(Terminator { source_info, kind: TerminatorKind::Return }),
1130        false,
1131    ))
1132}
1133
1134fn insert_panic_block<'tcx>(
1135    tcx: TyCtxt<'tcx>,
1136    body: &mut Body<'tcx>,
1137    message: AssertMessage<'tcx>,
1138) -> BasicBlock {
1139    let assert_block = body.basic_blocks.next_index();
1140    let kind = TerminatorKind::Assert {
1141        cond: Operand::Constant(Box::new(ConstOperand {
1142            span: body.span,
1143            user_ty: None,
1144            const_: Const::from_bool(tcx, false),
1145        })),
1146        expected: true,
1147        msg: Box::new(message),
1148        target: assert_block,
1149        unwind: UnwindAction::Continue,
1150    };
1151
1152    insert_term_block(body, kind)
1153}
1154
1155fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> bool {
1156    // Returning from a function with an uninhabited return type is undefined behavior.
1157    if body.return_ty().is_privately_uninhabited(tcx, typing_env) {
1158        return false;
1159    }
1160
1161    // If there's a return terminator the function may return.
1162    body.basic_blocks.iter().any(|block| matches!(block.terminator().kind, TerminatorKind::Return))
1163    // Otherwise the function can't return.
1164}
1165
1166fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
1167    // Nothing can unwind when landing pads are off.
1168    if !tcx.sess.panic_strategy().unwinds() {
1169        return false;
1170    }
1171
1172    // Unwinds can only start at certain terminators.
1173    for block in body.basic_blocks.iter() {
1174        match block.terminator().kind {
1175            // These never unwind.
1176            TerminatorKind::Goto { .. }
1177            | TerminatorKind::SwitchInt { .. }
1178            | TerminatorKind::UnwindTerminate(_)
1179            | TerminatorKind::Return
1180            | TerminatorKind::Unreachable
1181            | TerminatorKind::CoroutineDrop
1182            | TerminatorKind::FalseEdge { .. }
1183            | TerminatorKind::FalseUnwind { .. } => {}
1184
1185            // Resume will *continue* unwinding, but if there's no other unwinding terminator it
1186            // will never be reached.
1187            TerminatorKind::UnwindResume => {}
1188
1189            TerminatorKind::Yield { .. } => {
1190                unreachable!("`can_unwind` called before coroutine transform")
1191            }
1192
1193            // These may unwind.
1194            TerminatorKind::Drop { .. }
1195            | TerminatorKind::Call { .. }
1196            | TerminatorKind::InlineAsm { .. }
1197            | TerminatorKind::Assert { .. } => return true,
1198
1199            TerminatorKind::TailCall { .. } => {
1200                unreachable!("tail calls can't be present in generators")
1201            }
1202        }
1203    }
1204
1205    // If we didn't find an unwinding terminator, the function cannot unwind.
1206    false
1207}
1208
1209// Poison the coroutine when it unwinds
1210fn generate_poison_block_and_redirect_unwinds_there<'tcx>(
1211    transform: &TransformVisitor<'tcx>,
1212    body: &mut Body<'tcx>,
1213) {
1214    let source_info = SourceInfo::outermost(body.span);
1215    let poison_block = body.basic_blocks_mut().push(BasicBlockData::new_stmts(
1216        vec![transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info)],
1217        Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }),
1218        true,
1219    ));
1220
1221    for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() {
1222        let source_info = block.terminator().source_info;
1223
1224        if let TerminatorKind::UnwindResume = block.terminator().kind {
1225            // An existing `Resume` terminator is redirected to jump to our dedicated
1226            // "poisoning block" above.
1227            if idx != poison_block {
1228                *block.terminator_mut() =
1229                    Terminator { source_info, kind: TerminatorKind::Goto { target: poison_block } };
1230            }
1231        } else if !block.is_cleanup
1232            // Any terminators that *can* unwind but don't have an unwind target set are also
1233            // pointed at our poisoning block (unless they're part of the cleanup path).
1234            && let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut()
1235        {
1236            *unwind = UnwindAction::Cleanup(poison_block);
1237        }
1238    }
1239}
1240
1241#[tracing::instrument(level = "trace", skip(tcx, transform, body))]
1242fn create_coroutine_resume_function<'tcx>(
1243    tcx: TyCtxt<'tcx>,
1244    transform: TransformVisitor<'tcx>,
1245    body: &mut Body<'tcx>,
1246    can_return: bool,
1247    can_unwind: bool,
1248) {
1249    // Poison the coroutine when it unwinds
1250    if can_unwind {
1251        generate_poison_block_and_redirect_unwinds_there(&transform, body);
1252    }
1253
1254    let mut cases = create_cases(body, &transform, Operation::Resume);
1255
1256    use rustc_middle::mir::AssertKind::{ResumedAfterPanic, ResumedAfterReturn};
1257
1258    // Jump to the entry point on the unresumed
1259    cases.insert(0, (CoroutineArgs::UNRESUMED, START_BLOCK));
1260
1261    // Panic when resumed on the returned or poisoned state
1262    if can_unwind {
1263        cases.insert(
1264            1,
1265            (
1266                CoroutineArgs::POISONED,
1267                insert_panic_block(tcx, body, ResumedAfterPanic(transform.coroutine_kind)),
1268            ),
1269        );
1270    }
1271
1272    if can_return {
1273        let block = match transform.coroutine_kind {
1274            CoroutineKind::Desugared(CoroutineDesugaring::Async, _)
1275            | CoroutineKind::Coroutine(_) => {
1276                // For `async_drop_in_place<T>::{closure}` we just keep return Poll::Ready,
1277                // because async drop of such coroutine keeps polling original coroutine
1278                if tcx.is_async_drop_in_place_coroutine(body.source.def_id()) {
1279                    insert_poll_ready_block(tcx, body)
1280                } else {
1281                    insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind))
1282                }
1283            }
1284            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)
1285            | CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
1286                transform.insert_none_ret_block(body)
1287            }
1288        };
1289        cases.insert(1, (CoroutineArgs::RETURNED, block));
1290    }
1291
1292    let default_block = insert_term_block(body, TerminatorKind::Unreachable);
1293    insert_switch(body, cases, &transform, default_block);
1294
1295    make_coroutine_state_argument_indirect(tcx, body);
1296
1297    match transform.coroutine_kind {
1298        CoroutineKind::Coroutine(_)
1299        | CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) =>
1300        {
1301            make_coroutine_state_argument_pinned(tcx, body);
1302        }
1303        // Iterator::next doesn't accept a pinned argument,
1304        // unlike for all other coroutine kinds.
1305        CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {}
1306    }
1307
1308    // Make sure we remove dead blocks to remove
1309    // unrelated code from the drop part of the function
1310    simplify::remove_dead_blocks(body);
1311
1312    pm::run_passes_no_validate(tcx, body, &[&abort_unwinding_calls::AbortUnwindingCalls], None);
1313
1314    if let Some(dumper) = MirDumper::new(tcx, "coroutine_resume", body) {
1315        dumper.dump_mir(body);
1316    }
1317}
1318
1319/// An operation that can be performed on a coroutine.
1320#[derive(PartialEq, Copy, Clone, Debug)]
1321enum Operation {
1322    Resume,
1323    Drop,
1324}
1325
1326impl Operation {
1327    fn target_block(self, point: &SuspensionPoint<'_>) -> Option<BasicBlock> {
1328        match self {
1329            Operation::Resume => Some(point.resume),
1330            Operation::Drop => point.drop,
1331        }
1332    }
1333}
1334
1335#[tracing::instrument(level = "trace", skip(transform, body))]
1336fn create_cases<'tcx>(
1337    body: &mut Body<'tcx>,
1338    transform: &TransformVisitor<'tcx>,
1339    operation: Operation,
1340) -> Vec<(usize, BasicBlock)> {
1341    let source_info = SourceInfo::outermost(body.span);
1342
1343    transform
1344        .suspension_points
1345        .iter()
1346        .filter_map(|point| {
1347            // Find the target for this suspension point, if applicable
1348            operation.target_block(point).map(|target| {
1349                let mut statements = Vec::new();
1350
1351                // Create StorageLive instructions for locals with live storage
1352                for l in body.local_decls.indices() {
1353                    let needs_storage_live = point.storage_liveness.contains(l)
1354                        && !transform.remap.contains(l)
1355                        && !transform.always_live_locals.contains(l);
1356                    if needs_storage_live {
1357                        statements.push(Statement::new(source_info, StatementKind::StorageLive(l)));
1358                    }
1359                }
1360
1361                if operation == Operation::Resume && point.resume_arg != CTX_ARG.into() {
1362                    // Move the resume argument to the destination place of the `Yield` terminator
1363                    statements.push(Statement::new(
1364                        source_info,
1365                        StatementKind::Assign(Box::new((
1366                            point.resume_arg,
1367                            Rvalue::Use(Operand::Move(CTX_ARG.into())),
1368                        ))),
1369                    ));
1370                }
1371
1372                // Then jump to the real target
1373                let block = body.basic_blocks_mut().push(BasicBlockData::new_stmts(
1374                    statements,
1375                    Some(Terminator { source_info, kind: TerminatorKind::Goto { target } }),
1376                    false,
1377                ));
1378
1379                (point.state, block)
1380            })
1381        })
1382        .collect()
1383}
1384
1385#[instrument(level = "debug", skip(tcx), ret)]
1386pub(crate) fn mir_coroutine_witnesses<'tcx>(
1387    tcx: TyCtxt<'tcx>,
1388    def_id: LocalDefId,
1389) -> Option<CoroutineLayout<'tcx>> {
1390    let (body, _) = tcx.mir_promoted(def_id);
1391    let body = body.borrow();
1392    let body = &*body;
1393
1394    // The first argument is the coroutine type passed by value
1395    let coroutine_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty;
1396
1397    let movable = match *coroutine_ty.kind() {
1398        ty::Coroutine(def_id, _) => tcx.coroutine_movability(def_id) == hir::Movability::Movable,
1399        ty::Error(_) => return None,
1400        _ => span_bug!(body.span, "unexpected coroutine type {}", coroutine_ty),
1401    };
1402
1403    // The witness simply contains all locals live across suspend points.
1404
1405    let always_live_locals = always_storage_live_locals(body);
1406    let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1407
1408    // Extract locals which are live across suspension point into `layout`
1409    // `remap` gives a mapping from local indices onto coroutine struct indices
1410    // `storage_liveness` tells us which locals have live storage at suspension points
1411    let (_, coroutine_layout, _) = compute_layout(liveness_info, body);
1412
1413    check_suspend_tys(tcx, &coroutine_layout, body);
1414    check_field_tys_sized(tcx, &coroutine_layout, def_id);
1415
1416    Some(coroutine_layout)
1417}
1418
1419fn check_field_tys_sized<'tcx>(
1420    tcx: TyCtxt<'tcx>,
1421    coroutine_layout: &CoroutineLayout<'tcx>,
1422    def_id: LocalDefId,
1423) {
1424    // No need to check if unsized_fn_params is disabled,
1425    // since we will error during typeck.
1426    if !tcx.features().unsized_fn_params() {
1427        return;
1428    }
1429
1430    // FIXME(#132279): @lcnr believes that we may want to support coroutines
1431    // whose `Sized`-ness relies on the hidden types of opaques defined by the
1432    // parent function. In this case we'd have to be able to reveal only these
1433    // opaques here.
1434    let infcx = tcx.infer_ctxt().ignoring_regions().build(TypingMode::non_body_analysis());
1435    let param_env = tcx.param_env(def_id);
1436
1437    let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
1438    for field_ty in &coroutine_layout.field_tys {
1439        ocx.register_bound(
1440            ObligationCause::new(
1441                field_ty.source_info.span,
1442                def_id,
1443                ObligationCauseCode::SizedCoroutineInterior(def_id),
1444            ),
1445            param_env,
1446            field_ty.ty,
1447            tcx.require_lang_item(hir::LangItem::Sized, field_ty.source_info.span),
1448        );
1449    }
1450
1451    let errors = ocx.evaluate_obligations_error_on_ambiguity();
1452    debug!(?errors);
1453    if !errors.is_empty() {
1454        infcx.err_ctxt().report_fulfillment_errors(errors);
1455    }
1456}
1457
1458impl<'tcx> crate::MirPass<'tcx> for StateTransform {
1459    #[instrument(level = "debug", skip(self, tcx, body), ret)]
1460    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
1461        debug!(def_id = ?body.source.def_id());
1462
1463        let Some(old_yield_ty) = body.yield_ty() else {
1464            // This only applies to coroutines
1465            return;
1466        };
1467        tracing::trace!(def_id = ?body.source.def_id());
1468
1469        let old_ret_ty = body.return_ty();
1470
1471        assert!(body.coroutine_drop().is_none() && body.coroutine_drop_async().is_none());
1472
1473        if let Some(dumper) = MirDumper::new(tcx, "coroutine_before", body) {
1474            dumper.dump_mir(body);
1475        }
1476
1477        // The first argument is the coroutine type passed by value
1478        let coroutine_ty = body.local_decls.raw[1].ty;
1479        let coroutine_kind = body.coroutine_kind().unwrap();
1480
1481        // Get the discriminant type and args which typeck computed
1482        let ty::Coroutine(_, args) = coroutine_ty.kind() else {
1483            tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}"));
1484        };
1485        let discr_ty = args.as_coroutine().discr_ty(tcx);
1486
1487        let new_ret_ty = match coroutine_kind {
1488            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
1489                // Compute Poll<return_ty>
1490                let poll_did = tcx.require_lang_item(LangItem::Poll, body.span);
1491                let poll_adt_ref = tcx.adt_def(poll_did);
1492                let poll_args = tcx.mk_args(&[old_ret_ty.into()]);
1493                Ty::new_adt(tcx, poll_adt_ref, poll_args)
1494            }
1495            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
1496                // Compute Option<yield_ty>
1497                let option_did = tcx.require_lang_item(LangItem::Option, body.span);
1498                let option_adt_ref = tcx.adt_def(option_did);
1499                let option_args = tcx.mk_args(&[old_yield_ty.into()]);
1500                Ty::new_adt(tcx, option_adt_ref, option_args)
1501            }
1502            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
1503                // The yield ty is already `Poll<Option<yield_ty>>`
1504                old_yield_ty
1505            }
1506            CoroutineKind::Coroutine(_) => {
1507                // Compute CoroutineState<yield_ty, return_ty>
1508                let state_did = tcx.require_lang_item(LangItem::CoroutineState, body.span);
1509                let state_adt_ref = tcx.adt_def(state_did);
1510                let state_args = tcx.mk_args(&[old_yield_ty.into(), old_ret_ty.into()]);
1511                Ty::new_adt(tcx, state_adt_ref, state_args)
1512            }
1513        };
1514
1515        // We need to insert clean drop for unresumed state and perform drop elaboration
1516        // (finally in open_drop_for_tuple) before async drop expansion.
1517        // Async drops, produced by this drop elaboration, will be expanded,
1518        // and corresponding futures kept in layout.
1519        let has_async_drops = matches!(
1520            coroutine_kind,
1521            CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
1522        ) && has_expandable_async_drops(tcx, body, coroutine_ty);
1523
1524        // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies.
1525        if matches!(
1526            coroutine_kind,
1527            CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
1528        ) {
1529            let context_mut_ref = transform_async_context(tcx, body);
1530            expand_async_drops(tcx, body, context_mut_ref, coroutine_kind, coroutine_ty);
1531
1532            if let Some(dumper) = MirDumper::new(tcx, "coroutine_async_drop_expand", body) {
1533                dumper.dump_mir(body);
1534            }
1535        } else {
1536            cleanup_async_drops(body);
1537        }
1538
1539        let always_live_locals = always_storage_live_locals(body);
1540        let movable = coroutine_kind.movability() == hir::Movability::Movable;
1541        let liveness_info =
1542            locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1543
1544        if tcx.sess.opts.unstable_opts.validate_mir {
1545            let mut vis = EnsureCoroutineFieldAssignmentsNeverAlias {
1546                assigned_local: None,
1547                saved_locals: &liveness_info.saved_locals,
1548                storage_conflicts: &liveness_info.storage_conflicts,
1549            };
1550
1551            vis.visit_body(body);
1552        }
1553
1554        // Extract locals which are live across suspension point into `layout`
1555        // `remap` gives a mapping from local indices onto coroutine struct indices
1556        // `storage_liveness` tells us which locals have live storage at suspension points
1557        let (remap, layout, storage_liveness) = compute_layout(liveness_info, body);
1558
1559        let can_return = can_return(tcx, body, body.typing_env(tcx));
1560
1561        // We rename RETURN_PLACE which has type mir.return_ty to new_ret_local
1562        // RETURN_PLACE then is a fresh unused local with type ret_ty.
1563        let new_ret_local = body.local_decls.push(LocalDecl::new(new_ret_ty, body.span));
1564        tracing::trace!(?new_ret_local);
1565
1566        // Run the transformation which converts Places from Local to coroutine struct
1567        // accesses for locals in `remap`.
1568        // It also rewrites `return x` and `yield y` as writing a new coroutine state and returning
1569        // either `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`,
1570        // or `Poll::Ready(x)` and `Poll::Pending` respectively depending on the coroutine kind.
1571        let mut transform = TransformVisitor {
1572            tcx,
1573            coroutine_kind,
1574            remap,
1575            storage_liveness,
1576            always_live_locals,
1577            suspension_points: Vec::new(),
1578            discr_ty,
1579            new_ret_local,
1580            old_ret_ty,
1581            old_yield_ty,
1582        };
1583        transform.visit_body(body);
1584
1585        // Swap the actual `RETURN_PLACE` and the provisional `new_ret_local`.
1586        transform.replace_local(RETURN_PLACE, new_ret_local, body);
1587
1588        // MIR parameters are not explicitly assigned-to when entering the MIR body.
1589        // If we want to save their values inside the coroutine state, we need to do so explicitly.
1590        let source_info = SourceInfo::outermost(body.span);
1591        let args_iter = body.args_iter();
1592        body.basic_blocks.as_mut()[START_BLOCK].statements.splice(
1593            0..0,
1594            args_iter.filter_map(|local| {
1595                let (ty, variant_index, idx) = transform.remap[local]?;
1596                let lhs = transform.make_field(variant_index, idx, ty);
1597                let rhs = Rvalue::Use(Operand::Move(local.into()));
1598                let assign = StatementKind::Assign(Box::new((lhs, rhs)));
1599                Some(Statement::new(source_info, assign))
1600            }),
1601        );
1602
1603        // Update our MIR struct to reflect the changes we've made
1604        body.arg_count = 2; // self, resume arg
1605        body.spread_arg = None;
1606
1607        // Remove the context argument within generator bodies.
1608        if matches!(coroutine_kind, CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) {
1609            transform_gen_context(body);
1610        }
1611
1612        // The original arguments to the function are no longer arguments, mark them as such.
1613        // Otherwise they'll conflict with our new arguments, which although they don't have
1614        // argument_index set, will get emitted as unnamed arguments.
1615        for var in &mut body.var_debug_info {
1616            var.argument_index = None;
1617        }
1618
1619        body.coroutine.as_mut().unwrap().yield_ty = None;
1620        body.coroutine.as_mut().unwrap().resume_ty = None;
1621        body.coroutine.as_mut().unwrap().coroutine_layout = Some(layout);
1622
1623        // FIXME: Drops, produced by insert_clean_drop + elaborate_coroutine_drops,
1624        // are currently sync only. To allow async for them, we need to move those calls
1625        // before expand_async_drops, and fix the related problems.
1626        //
1627        // Insert `drop(coroutine_struct)` which is used to drop upvars for coroutines in
1628        // the unresumed state.
1629        // This is expanded to a drop ladder in `elaborate_coroutine_drops`.
1630        let drop_clean = insert_clean_drop(tcx, body, has_async_drops);
1631
1632        if let Some(dumper) = MirDumper::new(tcx, "coroutine_pre-elab", body) {
1633            dumper.dump_mir(body);
1634        }
1635
1636        // Expand `drop(coroutine_struct)` to a drop ladder which destroys upvars.
1637        // If any upvars are moved out of, drop elaboration will handle upvar destruction.
1638        // However we need to also elaborate the code generated by `insert_clean_drop`.
1639        elaborate_coroutine_drops(tcx, body);
1640
1641        if let Some(dumper) = MirDumper::new(tcx, "coroutine_post-transform", body) {
1642            dumper.dump_mir(body);
1643        }
1644
1645        let can_unwind = can_unwind(tcx, body);
1646
1647        // Create a copy of our MIR and use it to create the drop shim for the coroutine
1648        if has_async_drops {
1649            // If coroutine has async drops, generating async drop shim
1650            let mut drop_shim =
1651                create_coroutine_drop_shim_async(tcx, &transform, body, drop_clean, can_unwind);
1652            // Run derefer to fix Derefs that are not in the first place
1653            deref_finder(tcx, &mut drop_shim, false);
1654            body.coroutine.as_mut().unwrap().coroutine_drop_async = Some(drop_shim);
1655        } else {
1656            // If coroutine has no async drops, generating sync drop shim
1657            let mut drop_shim =
1658                create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean);
1659            // Run derefer to fix Derefs that are not in the first place
1660            deref_finder(tcx, &mut drop_shim, false);
1661            body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim);
1662
1663            // For coroutine with sync drop, generating async proxy for `future_drop_poll` call
1664            let mut proxy_shim = create_coroutine_drop_shim_proxy_async(tcx, body);
1665            deref_finder(tcx, &mut proxy_shim, false);
1666            body.coroutine.as_mut().unwrap().coroutine_drop_proxy_async = Some(proxy_shim);
1667        }
1668
1669        // Create the Coroutine::resume / Future::poll function
1670        create_coroutine_resume_function(tcx, transform, body, can_return, can_unwind);
1671
1672        // Run derefer to fix Derefs that are not in the first place
1673        deref_finder(tcx, body, false);
1674    }
1675
1676    fn is_required(&self) -> bool {
1677        true
1678    }
1679}
1680
1681/// Looks for any assignments between locals (e.g., `_4 = _5`) that will both be converted to fields
1682/// in the coroutine state machine but whose storage is not marked as conflicting
1683///
1684/// Validation needs to happen immediately *before* `TransformVisitor` is invoked, not after.
1685///
1686/// This condition would arise when the assignment is the last use of `_5` but the initial
1687/// definition of `_4` if we weren't extra careful to mark all locals used inside a statement as
1688/// conflicting. Non-conflicting coroutine saved locals may be stored at the same location within
1689/// the coroutine state machine, which would result in ill-formed MIR: the left-hand and right-hand
1690/// sides of an assignment may not alias. This caused a miscompilation in [#73137].
1691///
1692/// [#73137]: https://github.com/rust-lang/rust/issues/73137
1693struct EnsureCoroutineFieldAssignmentsNeverAlias<'a> {
1694    saved_locals: &'a CoroutineSavedLocals,
1695    storage_conflicts: &'a BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal>,
1696    assigned_local: Option<CoroutineSavedLocal>,
1697}
1698
1699impl EnsureCoroutineFieldAssignmentsNeverAlias<'_> {
1700    fn saved_local_for_direct_place(&self, place: Place<'_>) -> Option<CoroutineSavedLocal> {
1701        if place.is_indirect() {
1702            return None;
1703        }
1704
1705        self.saved_locals.get(place.local)
1706    }
1707
1708    fn check_assigned_place(&mut self, place: Place<'_>, f: impl FnOnce(&mut Self)) {
1709        if let Some(assigned_local) = self.saved_local_for_direct_place(place) {
1710            assert!(self.assigned_local.is_none(), "`check_assigned_place` must not recurse");
1711
1712            self.assigned_local = Some(assigned_local);
1713            f(self);
1714            self.assigned_local = None;
1715        }
1716    }
1717}
1718
1719impl<'tcx> Visitor<'tcx> for EnsureCoroutineFieldAssignmentsNeverAlias<'_> {
1720    fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
1721        let Some(lhs) = self.assigned_local else {
1722            // This visitor only invokes `visit_place` for the right-hand side of an assignment
1723            // and only after setting `self.assigned_local`. However, the default impl of
1724            // `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places
1725            // with debuginfo. Ignore them here.
1726            assert!(!context.is_use());
1727            return;
1728        };
1729
1730        let Some(rhs) = self.saved_local_for_direct_place(*place) else { return };
1731
1732        if !self.storage_conflicts.contains(lhs, rhs) {
1733            bug!(
1734                "Assignment between coroutine saved locals whose storage is not \
1735                    marked as conflicting: {:?}: {:?} = {:?}",
1736                location,
1737                lhs,
1738                rhs,
1739            );
1740        }
1741    }
1742
1743    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1744        match &statement.kind {
1745            StatementKind::Assign(box (lhs, rhs)) => {
1746                self.check_assigned_place(*lhs, |this| this.visit_rvalue(rhs, location));
1747            }
1748
1749            StatementKind::FakeRead(..)
1750            | StatementKind::SetDiscriminant { .. }
1751            | StatementKind::StorageLive(_)
1752            | StatementKind::StorageDead(_)
1753            | StatementKind::Retag(..)
1754            | StatementKind::AscribeUserType(..)
1755            | StatementKind::PlaceMention(..)
1756            | StatementKind::Coverage(..)
1757            | StatementKind::Intrinsic(..)
1758            | StatementKind::ConstEvalCounter
1759            | StatementKind::BackwardIncompatibleDropHint { .. }
1760            | StatementKind::Nop => {}
1761        }
1762    }
1763
1764    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1765        // Checking for aliasing in terminators is probably overkill, but until we have actual
1766        // semantics, we should be conservative here.
1767        match &terminator.kind {
1768            TerminatorKind::Call {
1769                func,
1770                args,
1771                destination,
1772                target: Some(_),
1773                unwind: _,
1774                call_source: _,
1775                fn_span: _,
1776            } => {
1777                self.check_assigned_place(*destination, |this| {
1778                    this.visit_operand(func, location);
1779                    for arg in args {
1780                        this.visit_operand(&arg.node, location);
1781                    }
1782                });
1783            }
1784
1785            TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => {
1786                self.check_assigned_place(*resume_arg, |this| this.visit_operand(value, location));
1787            }
1788
1789            // FIXME: Does `asm!` have any aliasing requirements?
1790            TerminatorKind::InlineAsm { .. } => {}
1791
1792            TerminatorKind::Call { .. }
1793            | TerminatorKind::Goto { .. }
1794            | TerminatorKind::SwitchInt { .. }
1795            | TerminatorKind::UnwindResume
1796            | TerminatorKind::UnwindTerminate(_)
1797            | TerminatorKind::Return
1798            | TerminatorKind::TailCall { .. }
1799            | TerminatorKind::Unreachable
1800            | TerminatorKind::Drop { .. }
1801            | TerminatorKind::Assert { .. }
1802            | TerminatorKind::CoroutineDrop
1803            | TerminatorKind::FalseEdge { .. }
1804            | TerminatorKind::FalseUnwind { .. } => {}
1805        }
1806    }
1807}
1808
1809fn check_suspend_tys<'tcx>(tcx: TyCtxt<'tcx>, layout: &CoroutineLayout<'tcx>, body: &Body<'tcx>) {
1810    let mut linted_tys = FxHashSet::default();
1811
1812    for (variant, yield_source_info) in
1813        layout.variant_fields.iter().zip(&layout.variant_source_info)
1814    {
1815        debug!(?variant);
1816        for &local in variant {
1817            let decl = &layout.field_tys[local];
1818            debug!(?decl);
1819
1820            if !decl.ignore_for_traits && linted_tys.insert(decl.ty) {
1821                let Some(hir_id) = decl.source_info.scope.lint_root(&body.source_scopes) else {
1822                    continue;
1823                };
1824
1825                check_must_not_suspend_ty(
1826                    tcx,
1827                    decl.ty,
1828                    hir_id,
1829                    SuspendCheckData {
1830                        source_span: decl.source_info.span,
1831                        yield_span: yield_source_info.span,
1832                        plural_len: 1,
1833                        ..Default::default()
1834                    },
1835                );
1836            }
1837        }
1838    }
1839}
1840
1841#[derive(Default)]
1842struct SuspendCheckData<'a> {
1843    source_span: Span,
1844    yield_span: Span,
1845    descr_pre: &'a str,
1846    descr_post: &'a str,
1847    plural_len: usize,
1848}
1849
1850// Returns whether it emitted a diagnostic or not
1851// Note that this fn and the proceeding one are based on the code
1852// for creating must_use diagnostics
1853//
1854// Note that this technique was chosen over things like a `Suspend` marker trait
1855// as it is simpler and has precedent in the compiler
1856fn check_must_not_suspend_ty<'tcx>(
1857    tcx: TyCtxt<'tcx>,
1858    ty: Ty<'tcx>,
1859    hir_id: hir::HirId,
1860    data: SuspendCheckData<'_>,
1861) -> bool {
1862    if ty.is_unit() {
1863        return false;
1864    }
1865
1866    let plural_suffix = pluralize!(data.plural_len);
1867
1868    debug!("Checking must_not_suspend for {}", ty);
1869
1870    match *ty.kind() {
1871        ty::Adt(_, args) if ty.is_box() => {
1872            let boxed_ty = args.type_at(0);
1873            let allocator_ty = args.type_at(1);
1874            check_must_not_suspend_ty(
1875                tcx,
1876                boxed_ty,
1877                hir_id,
1878                SuspendCheckData { descr_pre: &format!("{}boxed ", data.descr_pre), ..data },
1879            ) || check_must_not_suspend_ty(
1880                tcx,
1881                allocator_ty,
1882                hir_id,
1883                SuspendCheckData { descr_pre: &format!("{}allocator ", data.descr_pre), ..data },
1884            )
1885        }
1886        ty::Adt(def, _) => check_must_not_suspend_def(tcx, def.did(), hir_id, data),
1887        // FIXME: support adding the attribute to TAITs
1888        ty::Alias(ty::Opaque, ty::AliasTy { def_id: def, .. }) => {
1889            let mut has_emitted = false;
1890            for &(predicate, _) in tcx.explicit_item_bounds(def).skip_binder() {
1891                // We only look at the `DefId`, so it is safe to skip the binder here.
1892                if let ty::ClauseKind::Trait(ref poly_trait_predicate) =
1893                    predicate.kind().skip_binder()
1894                {
1895                    let def_id = poly_trait_predicate.trait_ref.def_id;
1896                    let descr_pre = &format!("{}implementer{} of ", data.descr_pre, plural_suffix);
1897                    if check_must_not_suspend_def(
1898                        tcx,
1899                        def_id,
1900                        hir_id,
1901                        SuspendCheckData { descr_pre, ..data },
1902                    ) {
1903                        has_emitted = true;
1904                        break;
1905                    }
1906                }
1907            }
1908            has_emitted
1909        }
1910        ty::Dynamic(binder, _) => {
1911            let mut has_emitted = false;
1912            for predicate in binder.iter() {
1913                if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() {
1914                    let def_id = trait_ref.def_id;
1915                    let descr_post = &format!(" trait object{}{}", plural_suffix, data.descr_post);
1916                    if check_must_not_suspend_def(
1917                        tcx,
1918                        def_id,
1919                        hir_id,
1920                        SuspendCheckData { descr_post, ..data },
1921                    ) {
1922                        has_emitted = true;
1923                        break;
1924                    }
1925                }
1926            }
1927            has_emitted
1928        }
1929        ty::Tuple(fields) => {
1930            let mut has_emitted = false;
1931            for (i, ty) in fields.iter().enumerate() {
1932                let descr_post = &format!(" in tuple element {i}");
1933                if check_must_not_suspend_ty(
1934                    tcx,
1935                    ty,
1936                    hir_id,
1937                    SuspendCheckData { descr_post, ..data },
1938                ) {
1939                    has_emitted = true;
1940                }
1941            }
1942            has_emitted
1943        }
1944        ty::Array(ty, len) => {
1945            let descr_pre = &format!("{}array{} of ", data.descr_pre, plural_suffix);
1946            check_must_not_suspend_ty(
1947                tcx,
1948                ty,
1949                hir_id,
1950                SuspendCheckData {
1951                    descr_pre,
1952                    // FIXME(must_not_suspend): This is wrong. We should handle printing unevaluated consts.
1953                    plural_len: len.try_to_target_usize(tcx).unwrap_or(0) as usize + 1,
1954                    ..data
1955                },
1956            )
1957        }
1958        // If drop tracking is enabled, we want to look through references, since the referent
1959        // may not be considered live across the await point.
1960        ty::Ref(_region, ty, _mutability) => {
1961            let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix);
1962            check_must_not_suspend_ty(tcx, ty, hir_id, SuspendCheckData { descr_pre, ..data })
1963        }
1964        _ => false,
1965    }
1966}
1967
1968fn check_must_not_suspend_def(
1969    tcx: TyCtxt<'_>,
1970    def_id: DefId,
1971    hir_id: hir::HirId,
1972    data: SuspendCheckData<'_>,
1973) -> bool {
1974    if let Some(attr) = tcx.get_attr(def_id, sym::must_not_suspend) {
1975        let reason = attr.value_str().map(|s| errors::MustNotSuspendReason {
1976            span: data.source_span,
1977            reason: s.as_str().to_string(),
1978        });
1979        tcx.emit_node_span_lint(
1980            rustc_session::lint::builtin::MUST_NOT_SUSPEND,
1981            hir_id,
1982            data.source_span,
1983            errors::MustNotSupend {
1984                tcx,
1985                yield_sp: data.yield_span,
1986                reason,
1987                src_sp: data.source_span,
1988                pre: data.descr_pre,
1989                def_id,
1990                post: data.descr_post,
1991            },
1992        );
1993
1994        true
1995    } else {
1996        false
1997    }
1998}