rustc_mir_transform/
coroutine.rs

1//! This is the implementation of the pass which transforms coroutines into state machines.
2//!
3//! MIR generation for coroutines creates a function which has a self argument which
4//! passes by value. This argument is effectively a coroutine type which only contains upvars and
5//! is only used for this argument inside the MIR for the coroutine.
6//! It is passed by value to enable upvars to be moved out of it. Drop elaboration runs on that
7//! MIR before this pass and creates drop flags for MIR locals.
8//! It will also drop the coroutine argument (which only consists of upvars) if any of the upvars
9//! are moved out of. This pass elaborates the drops of upvars / coroutine argument in the case
10//! that none of the upvars were moved out of. This is because we cannot have any drops of this
11//! coroutine in the MIR, since it is used to create the drop glue for the coroutine. We'd get
12//! infinite recursion otherwise.
13//!
14//! This pass creates the implementation for either the `Coroutine::resume` or `Future::poll`
15//! function and the drop shim for the coroutine based on the MIR input.
16//! It converts the coroutine argument from Self to &mut Self adding derefs in the MIR as needed.
17//! It computes the final layout of the coroutine struct which looks like this:
18//!     First upvars are stored
19//!     It is followed by the coroutine state field.
20//!     Then finally the MIR locals which are live across a suspension point are stored.
21//!     ```ignore (illustrative)
22//!     struct Coroutine {
23//!         upvars...,
24//!         state: u32,
25//!         mir_locals...,
26//!     }
27//!     ```
28//! This pass computes the meaning of the state field and the MIR locals which are live
29//! across a suspension point. There are however three hardcoded coroutine states:
30//!     0 - Coroutine have not been resumed yet
31//!     1 - Coroutine has returned / is completed
32//!     2 - Coroutine has been poisoned
33//!
34//! It also rewrites `return x` and `yield y` as setting a new coroutine state and returning
35//! `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`,
36//! or `Poll::Ready(x)` and `Poll::Pending` respectively.
37//! MIR locals which are live across a suspension point are moved to the coroutine struct
38//! with references to them being updated with references to the coroutine struct.
39//!
40//! The pass creates two functions which have a switch on the coroutine state giving
41//! the action to take.
42//!
43//! One of them is the implementation of `Coroutine::resume` / `Future::poll`.
44//! For coroutines with state 0 (unresumed) it starts the execution of the coroutine.
45//! For coroutines with state 1 (returned) and state 2 (poisoned) it panics.
46//! Otherwise it continues the execution from the last suspension point.
47//!
48//! The other function is the drop glue for the coroutine.
49//! For coroutines with state 0 (unresumed) it drops the upvars of the coroutine.
50//! For coroutines with state 1 (returned) and state 2 (poisoned) it does nothing.
51//! Otherwise it drops all the values in scope at the last suspension point.
52
53mod by_move_body;
54mod drop;
55use std::{iter, ops};
56
57pub(super) use by_move_body::coroutine_by_move_body_def_id;
58use drop::{
59    cleanup_async_drops, create_coroutine_drop_shim, create_coroutine_drop_shim_async,
60    create_coroutine_drop_shim_proxy_async, elaborate_coroutine_drops, expand_async_drops,
61    has_expandable_async_drops, insert_clean_drop,
62};
63use rustc_abi::{FieldIdx, VariantIdx};
64use rustc_data_structures::fx::FxHashSet;
65use rustc_errors::pluralize;
66use rustc_hir as hir;
67use rustc_hir::lang_items::LangItem;
68use rustc_hir::{CoroutineDesugaring, CoroutineKind};
69use rustc_index::bit_set::{BitMatrix, DenseBitSet, GrowableBitSet};
70use rustc_index::{Idx, IndexVec};
71use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
72use rustc_middle::mir::*;
73use rustc_middle::ty::util::Discr;
74use rustc_middle::ty::{
75    self, CoroutineArgs, CoroutineArgsExt, GenericArgsRef, InstanceKind, Ty, TyCtxt, TypingMode,
76};
77use rustc_middle::{bug, span_bug};
78use rustc_mir_dataflow::impls::{
79    MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive,
80    always_storage_live_locals,
81};
82use rustc_mir_dataflow::{
83    Analysis, Results, ResultsCursor, ResultsVisitor, visit_reachable_results,
84};
85use rustc_span::def_id::{DefId, LocalDefId};
86use rustc_span::source_map::dummy_spanned;
87use rustc_span::symbol::sym;
88use rustc_span::{DUMMY_SP, Span};
89use rustc_trait_selection::error_reporting::InferCtxtErrorExt;
90use rustc_trait_selection::infer::TyCtxtInferExt as _;
91use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode, ObligationCtxt};
92use tracing::{debug, instrument, trace};
93
94use crate::deref_separator::deref_finder;
95use crate::{abort_unwinding_calls, errors, pass_manager as pm, simplify};
96
97pub(super) struct StateTransform;
98
99struct RenameLocalVisitor<'tcx> {
100    from: Local,
101    to: Local,
102    tcx: TyCtxt<'tcx>,
103}
104
105impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor<'tcx> {
106    fn tcx(&self) -> TyCtxt<'tcx> {
107        self.tcx
108    }
109
110    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
111        if *local == self.from {
112            *local = self.to;
113        }
114    }
115
116    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
117        match terminator.kind {
118            TerminatorKind::Return => {
119                // Do not replace the implicit `_0` access here, as that's not possible. The
120                // transform already handles `return` correctly.
121            }
122            _ => self.super_terminator(terminator, location),
123        }
124    }
125}
126
127struct SelfArgVisitor<'tcx> {
128    tcx: TyCtxt<'tcx>,
129    new_base: Place<'tcx>,
130}
131
132impl<'tcx> SelfArgVisitor<'tcx> {
133    fn new(tcx: TyCtxt<'tcx>, elem: ProjectionElem<Local, Ty<'tcx>>) -> Self {
134        Self { tcx, new_base: Place { local: SELF_ARG, projection: tcx.mk_place_elems(&[elem]) } }
135    }
136}
137
138impl<'tcx> MutVisitor<'tcx> for SelfArgVisitor<'tcx> {
139    fn tcx(&self) -> TyCtxt<'tcx> {
140        self.tcx
141    }
142
143    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
144        assert_ne!(*local, SELF_ARG);
145    }
146
147    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
148        if place.local == SELF_ARG {
149            replace_base(place, self.new_base, self.tcx);
150        } else {
151            self.visit_local(&mut place.local, context, location);
152
153            for elem in place.projection.iter() {
154                if let PlaceElem::Index(local) = elem {
155                    assert_ne!(local, SELF_ARG);
156                }
157            }
158        }
159    }
160}
161
162fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtxt<'tcx>) {
163    place.local = new_base.local;
164
165    let mut new_projection = new_base.projection.to_vec();
166    new_projection.append(&mut place.projection.to_vec());
167
168    place.projection = tcx.mk_place_elems(&new_projection);
169}
170
171const SELF_ARG: Local = Local::from_u32(1);
172const CTX_ARG: Local = Local::from_u32(2);
173
174/// A `yield` point in the coroutine.
175struct SuspensionPoint<'tcx> {
176    /// State discriminant used when suspending or resuming at this point.
177    state: usize,
178    /// The block to jump to after resumption.
179    resume: BasicBlock,
180    /// Where to move the resume argument after resumption.
181    resume_arg: Place<'tcx>,
182    /// Which block to jump to if the coroutine is dropped in this state.
183    drop: Option<BasicBlock>,
184    /// Set of locals that have live storage while at this suspension point.
185    storage_liveness: GrowableBitSet<Local>,
186}
187
188struct TransformVisitor<'tcx> {
189    tcx: TyCtxt<'tcx>,
190    coroutine_kind: hir::CoroutineKind,
191
192    // The type of the discriminant in the coroutine struct
193    discr_ty: Ty<'tcx>,
194
195    // Mapping from Local to (type of local, coroutine struct index)
196    remap: IndexVec<Local, Option<(Ty<'tcx>, VariantIdx, FieldIdx)>>,
197
198    // A map from a suspension point in a block to the locals which have live storage at that point
199    storage_liveness: IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
200
201    // A list of suspension points, generated during the transform
202    suspension_points: Vec<SuspensionPoint<'tcx>>,
203
204    // The set of locals that have no `StorageLive`/`StorageDead` annotations.
205    always_live_locals: DenseBitSet<Local>,
206
207    // The original RETURN_PLACE local
208    old_ret_local: Local,
209
210    old_yield_ty: Ty<'tcx>,
211
212    old_ret_ty: Ty<'tcx>,
213}
214
215impl<'tcx> TransformVisitor<'tcx> {
216    fn insert_none_ret_block(&self, body: &mut Body<'tcx>) -> BasicBlock {
217        let block = body.basic_blocks.next_index();
218        let source_info = SourceInfo::outermost(body.span);
219
220        let none_value = match self.coroutine_kind {
221            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
222                span_bug!(body.span, "`Future`s are not fused inherently")
223            }
224            CoroutineKind::Coroutine(_) => span_bug!(body.span, "`Coroutine`s cannot be fused"),
225            // `gen` continues return `None`
226            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
227                let option_def_id = self.tcx.require_lang_item(LangItem::Option, body.span);
228                make_aggregate_adt(
229                    option_def_id,
230                    VariantIdx::ZERO,
231                    self.tcx.mk_args(&[self.old_yield_ty.into()]),
232                    IndexVec::new(),
233                )
234            }
235            // `async gen` continues to return `Poll::Ready(None)`
236            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
237                let ty::Adt(_poll_adt, args) = *self.old_yield_ty.kind() else { bug!() };
238                let ty::Adt(_option_adt, args) = *args.type_at(0).kind() else { bug!() };
239                let yield_ty = args.type_at(0);
240                Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
241                    span: source_info.span,
242                    const_: Const::Unevaluated(
243                        UnevaluatedConst::new(
244                            self.tcx.require_lang_item(LangItem::AsyncGenFinished, body.span),
245                            self.tcx.mk_args(&[yield_ty.into()]),
246                        ),
247                        self.old_yield_ty,
248                    ),
249                    user_ty: None,
250                })))
251            }
252        };
253
254        let statements = vec![Statement::new(
255            source_info,
256            StatementKind::Assign(Box::new((Place::return_place(), none_value))),
257        )];
258
259        body.basic_blocks_mut().push(BasicBlockData::new_stmts(
260            statements,
261            Some(Terminator { source_info, kind: TerminatorKind::Return }),
262            false,
263        ));
264
265        block
266    }
267
268    // Make a `CoroutineState` or `Poll` variant assignment.
269    //
270    // `core::ops::CoroutineState` only has single element tuple variants,
271    // so we can just write to the downcasted first field and then set the
272    // discriminant to the appropriate variant.
273    fn make_state(
274        &self,
275        val: Operand<'tcx>,
276        source_info: SourceInfo,
277        is_return: bool,
278        statements: &mut Vec<Statement<'tcx>>,
279    ) {
280        const ZERO: VariantIdx = VariantIdx::ZERO;
281        const ONE: VariantIdx = VariantIdx::from_usize(1);
282        let rvalue = match self.coroutine_kind {
283            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
284                let poll_def_id = self.tcx.require_lang_item(LangItem::Poll, source_info.span);
285                let args = self.tcx.mk_args(&[self.old_ret_ty.into()]);
286                let (variant_idx, operands) = if is_return {
287                    (ZERO, IndexVec::from_raw(vec![val])) // Poll::Ready(val)
288                } else {
289                    (ONE, IndexVec::new()) // Poll::Pending
290                };
291                make_aggregate_adt(poll_def_id, variant_idx, args, operands)
292            }
293            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
294                let option_def_id = self.tcx.require_lang_item(LangItem::Option, source_info.span);
295                let args = self.tcx.mk_args(&[self.old_yield_ty.into()]);
296                let (variant_idx, operands) = if is_return {
297                    (ZERO, IndexVec::new()) // None
298                } else {
299                    (ONE, IndexVec::from_raw(vec![val])) // Some(val)
300                };
301                make_aggregate_adt(option_def_id, variant_idx, args, operands)
302            }
303            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
304                if is_return {
305                    let ty::Adt(_poll_adt, args) = *self.old_yield_ty.kind() else { bug!() };
306                    let ty::Adt(_option_adt, args) = *args.type_at(0).kind() else { bug!() };
307                    let yield_ty = args.type_at(0);
308                    Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
309                        span: source_info.span,
310                        const_: Const::Unevaluated(
311                            UnevaluatedConst::new(
312                                self.tcx.require_lang_item(
313                                    LangItem::AsyncGenFinished,
314                                    source_info.span,
315                                ),
316                                self.tcx.mk_args(&[yield_ty.into()]),
317                            ),
318                            self.old_yield_ty,
319                        ),
320                        user_ty: None,
321                    })))
322                } else {
323                    Rvalue::Use(val)
324                }
325            }
326            CoroutineKind::Coroutine(_) => {
327                let coroutine_state_def_id =
328                    self.tcx.require_lang_item(LangItem::CoroutineState, source_info.span);
329                let args = self.tcx.mk_args(&[self.old_yield_ty.into(), self.old_ret_ty.into()]);
330                let variant_idx = if is_return {
331                    ONE // CoroutineState::Complete(val)
332                } else {
333                    ZERO // CoroutineState::Yielded(val)
334                };
335                make_aggregate_adt(
336                    coroutine_state_def_id,
337                    variant_idx,
338                    args,
339                    IndexVec::from_raw(vec![val]),
340                )
341            }
342        };
343
344        statements.push(Statement::new(
345            source_info,
346            StatementKind::Assign(Box::new((Place::return_place(), rvalue))),
347        ));
348    }
349
350    // Create a Place referencing a coroutine struct field
351    fn make_field(&self, variant_index: VariantIdx, idx: FieldIdx, ty: Ty<'tcx>) -> Place<'tcx> {
352        let self_place = Place::from(SELF_ARG);
353        let base = self.tcx.mk_place_downcast_unnamed(self_place, variant_index);
354        let mut projection = base.projection.to_vec();
355        projection.push(ProjectionElem::Field(idx, ty));
356
357        Place { local: base.local, projection: self.tcx.mk_place_elems(&projection) }
358    }
359
360    // Create a statement which changes the discriminant
361    fn set_discr(&self, state_disc: VariantIdx, source_info: SourceInfo) -> Statement<'tcx> {
362        let self_place = Place::from(SELF_ARG);
363        Statement::new(
364            source_info,
365            StatementKind::SetDiscriminant {
366                place: Box::new(self_place),
367                variant_index: state_disc,
368            },
369        )
370    }
371
372    // Create a statement which reads the discriminant into a temporary
373    fn get_discr(&self, body: &mut Body<'tcx>) -> (Statement<'tcx>, Place<'tcx>) {
374        let temp_decl = LocalDecl::new(self.discr_ty, body.span);
375        let local_decls_len = body.local_decls.push(temp_decl);
376        let temp = Place::from(local_decls_len);
377
378        let self_place = Place::from(SELF_ARG);
379        let assign = Statement::new(
380            SourceInfo::outermost(body.span),
381            StatementKind::Assign(Box::new((temp, Rvalue::Discriminant(self_place)))),
382        );
383        (assign, temp)
384    }
385}
386
387impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> {
388    fn tcx(&self) -> TyCtxt<'tcx> {
389        self.tcx
390    }
391
392    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
393        assert!(!self.remap.contains(*local));
394    }
395
396    fn visit_place(
397        &mut self,
398        place: &mut Place<'tcx>,
399        _context: PlaceContext,
400        _location: Location,
401    ) {
402        // Replace an Local in the remap with a coroutine struct access
403        if let Some(&Some((ty, variant_index, idx))) = self.remap.get(place.local) {
404            replace_base(place, self.make_field(variant_index, idx, ty), self.tcx);
405        }
406    }
407
408    fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
409        // Remove StorageLive and StorageDead statements for remapped locals
410        for s in &mut data.statements {
411            if let StatementKind::StorageLive(l) | StatementKind::StorageDead(l) = s.kind
412                && self.remap.contains(l)
413            {
414                s.make_nop();
415            }
416        }
417
418        let ret_val = match data.terminator().kind {
419            TerminatorKind::Return => {
420                Some((true, None, Operand::Move(Place::from(self.old_ret_local)), None))
421            }
422            TerminatorKind::Yield { ref value, resume, resume_arg, drop } => {
423                Some((false, Some((resume, resume_arg)), value.clone(), drop))
424            }
425            _ => None,
426        };
427
428        if let Some((is_return, resume, v, drop)) = ret_val {
429            let source_info = data.terminator().source_info;
430            // We must assign the value first in case it gets declared dead below
431            self.make_state(v, source_info, is_return, &mut data.statements);
432            let state = if let Some((resume, mut resume_arg)) = resume {
433                // Yield
434                let state = CoroutineArgs::RESERVED_VARIANTS + self.suspension_points.len();
435
436                // The resume arg target location might itself be remapped if its base local is
437                // live across a yield.
438                if let Some(&Some((ty, variant, idx))) = self.remap.get(resume_arg.local) {
439                    replace_base(&mut resume_arg, self.make_field(variant, idx, ty), self.tcx);
440                }
441
442                let storage_liveness: GrowableBitSet<Local> =
443                    self.storage_liveness[block].clone().unwrap().into();
444
445                for i in 0..self.always_live_locals.domain_size() {
446                    let l = Local::new(i);
447                    let needs_storage_dead = storage_liveness.contains(l)
448                        && !self.remap.contains(l)
449                        && !self.always_live_locals.contains(l);
450                    if needs_storage_dead {
451                        data.statements
452                            .push(Statement::new(source_info, StatementKind::StorageDead(l)));
453                    }
454                }
455
456                self.suspension_points.push(SuspensionPoint {
457                    state,
458                    resume,
459                    resume_arg,
460                    drop,
461                    storage_liveness,
462                });
463
464                VariantIdx::new(state)
465            } else {
466                // Return
467                VariantIdx::new(CoroutineArgs::RETURNED) // state for returned
468            };
469            data.statements.push(self.set_discr(state, source_info));
470            data.terminator_mut().kind = TerminatorKind::Return;
471        }
472
473        self.super_basic_block_data(block, data);
474    }
475}
476
477fn make_aggregate_adt<'tcx>(
478    def_id: DefId,
479    variant_idx: VariantIdx,
480    args: GenericArgsRef<'tcx>,
481    operands: IndexVec<FieldIdx, Operand<'tcx>>,
482) -> Rvalue<'tcx> {
483    Rvalue::Aggregate(Box::new(AggregateKind::Adt(def_id, variant_idx, args, None, None)), operands)
484}
485
486fn make_coroutine_state_argument_indirect<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
487    let coroutine_ty = body.local_decls.raw[1].ty;
488
489    let ref_coroutine_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty);
490
491    // Replace the by value coroutine argument
492    body.local_decls.raw[1].ty = ref_coroutine_ty;
493
494    // Add a deref to accesses of the coroutine state
495    SelfArgVisitor::new(tcx, ProjectionElem::Deref).visit_body(body);
496}
497
498fn make_coroutine_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
499    let ref_coroutine_ty = body.local_decls.raw[1].ty;
500
501    let pin_did = tcx.require_lang_item(LangItem::Pin, body.span);
502    let pin_adt_ref = tcx.adt_def(pin_did);
503    let args = tcx.mk_args(&[ref_coroutine_ty.into()]);
504    let pin_ref_coroutine_ty = Ty::new_adt(tcx, pin_adt_ref, args);
505
506    // Replace the by ref coroutine argument
507    body.local_decls.raw[1].ty = pin_ref_coroutine_ty;
508
509    // Add the Pin field access to accesses of the coroutine state
510    SelfArgVisitor::new(tcx, ProjectionElem::Field(FieldIdx::ZERO, ref_coroutine_ty))
511        .visit_body(body);
512}
513
514/// Allocates a new local and replaces all references of `local` with it. Returns the new local.
515///
516/// `local` will be changed to a new local decl with type `ty`.
517///
518/// Note that the new local will be uninitialized. It is the caller's responsibility to assign some
519/// valid value to it before its first use.
520fn replace_local<'tcx>(
521    local: Local,
522    ty: Ty<'tcx>,
523    body: &mut Body<'tcx>,
524    tcx: TyCtxt<'tcx>,
525) -> Local {
526    let new_decl = LocalDecl::new(ty, body.span);
527    let new_local = body.local_decls.push(new_decl);
528    body.local_decls.swap(local, new_local);
529
530    RenameLocalVisitor { from: local, to: new_local, tcx }.visit_body(body);
531
532    new_local
533}
534
535/// Transforms the `body` of the coroutine applying the following transforms:
536///
537/// - Eliminates all the `get_context` calls that async lowering created.
538/// - Replace all `Local` `ResumeTy` types with `&mut Context<'_>` (`context_mut_ref`).
539///
540/// The `Local`s that have their types replaced are:
541/// - The `resume` argument itself.
542/// - The argument to `get_context`.
543/// - The yielded value of a `yield`.
544///
545/// The `ResumeTy` hides a `&mut Context<'_>` behind an unsafe raw pointer, and the
546/// `get_context` function is being used to convert that back to a `&mut Context<'_>`.
547///
548/// Ideally the async lowering would not use the `ResumeTy`/`get_context` indirection,
549/// but rather directly use `&mut Context<'_>`, however that would currently
550/// lead to higher-kinded lifetime errors.
551/// See <https://github.com/rust-lang/rust/issues/105501>.
552///
553/// The async lowering step and the type / lifetime inference / checking are
554/// still using the `ResumeTy` indirection for the time being, and that indirection
555/// is removed here. After this transform, the coroutine body only knows about `&mut Context<'_>`.
556fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> Ty<'tcx> {
557    let context_mut_ref = Ty::new_task_context(tcx);
558
559    // replace the type of the `resume` argument
560    replace_resume_ty_local(tcx, body, CTX_ARG, context_mut_ref);
561
562    let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, body.span);
563
564    for bb in body.basic_blocks.indices() {
565        let bb_data = &body[bb];
566        if bb_data.is_cleanup {
567            continue;
568        }
569
570        match &bb_data.terminator().kind {
571            TerminatorKind::Call { func, .. } => {
572                let func_ty = func.ty(body, tcx);
573                if let ty::FnDef(def_id, _) = *func_ty.kind()
574                    && def_id == get_context_def_id
575                {
576                    let local = eliminate_get_context_call(&mut body[bb]);
577                    replace_resume_ty_local(tcx, body, local, context_mut_ref);
578                }
579            }
580            TerminatorKind::Yield { resume_arg, .. } => {
581                replace_resume_ty_local(tcx, body, resume_arg.local, context_mut_ref);
582            }
583            _ => {}
584        }
585    }
586    context_mut_ref
587}
588
589fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local {
590    let terminator = bb_data.terminator.take().unwrap();
591    let TerminatorKind::Call { args, destination, target, .. } = terminator.kind else {
592        bug!();
593    };
594    let [arg] = *Box::try_from(args).unwrap();
595    let local = arg.node.place().unwrap().local;
596
597    let arg = Rvalue::Use(arg.node);
598    let assign =
599        Statement::new(terminator.source_info, StatementKind::Assign(Box::new((destination, arg))));
600    bb_data.statements.push(assign);
601    bb_data.terminator = Some(Terminator {
602        source_info: terminator.source_info,
603        kind: TerminatorKind::Goto { target: target.unwrap() },
604    });
605    local
606}
607
608#[cfg_attr(not(debug_assertions), allow(unused))]
609fn replace_resume_ty_local<'tcx>(
610    tcx: TyCtxt<'tcx>,
611    body: &mut Body<'tcx>,
612    local: Local,
613    context_mut_ref: Ty<'tcx>,
614) {
615    let local_ty = std::mem::replace(&mut body.local_decls[local].ty, context_mut_ref);
616    // We have to replace the `ResumeTy` that is used for type and borrow checking
617    // with `&mut Context<'_>` in MIR.
618    #[cfg(debug_assertions)]
619    {
620        if let ty::Adt(resume_ty_adt, _) = local_ty.kind() {
621            let expected_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, body.span));
622            assert_eq!(*resume_ty_adt, expected_adt);
623        } else {
624            panic!("expected `ResumeTy`, found `{:?}`", local_ty);
625        };
626    }
627}
628
629/// Transforms the `body` of the coroutine applying the following transform:
630///
631/// - Remove the `resume` argument.
632///
633/// Ideally the async lowering would not add the `resume` argument.
634///
635/// The async lowering step and the type / lifetime inference / checking are
636/// still using the `resume` argument for the time being. After this transform,
637/// the coroutine body doesn't have the `resume` argument.
638fn transform_gen_context<'tcx>(body: &mut Body<'tcx>) {
639    // This leaves the local representing the `resume` argument in place,
640    // but turns it into a regular local variable. This is cheaper than
641    // adjusting all local references in the body after removing it.
642    body.arg_count = 1;
643}
644
645struct LivenessInfo {
646    /// Which locals are live across any suspension point.
647    saved_locals: CoroutineSavedLocals,
648
649    /// The set of saved locals live at each suspension point.
650    live_locals_at_suspension_points: Vec<DenseBitSet<CoroutineSavedLocal>>,
651
652    /// Parallel vec to the above with SourceInfo for each yield terminator.
653    source_info_at_suspension_points: Vec<SourceInfo>,
654
655    /// For every saved local, the set of other saved locals that are
656    /// storage-live at the same time as this local. We cannot overlap locals in
657    /// the layout which have conflicting storage.
658    storage_conflicts: BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal>,
659
660    /// For every suspending block, the locals which are storage-live across
661    /// that suspension point.
662    storage_liveness: IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
663}
664
665/// Computes which locals have to be stored in the state-machine for the
666/// given coroutine.
667///
668/// The basic idea is as follows:
669/// - a local is live until we encounter a `StorageDead` statement. In
670///   case none exist, the local is considered to be always live.
671/// - a local has to be stored if it is either directly used after the
672///   the suspend point, or if it is live and has been previously borrowed.
673fn locals_live_across_suspend_points<'tcx>(
674    tcx: TyCtxt<'tcx>,
675    body: &Body<'tcx>,
676    always_live_locals: &DenseBitSet<Local>,
677    movable: bool,
678) -> LivenessInfo {
679    // Calculate when MIR locals have live storage. This gives us an upper bound of their
680    // lifetimes.
681    let mut storage_live = MaybeStorageLive::new(std::borrow::Cow::Borrowed(always_live_locals))
682        .iterate_to_fixpoint(tcx, body, None)
683        .into_results_cursor(body);
684
685    // Calculate the MIR locals that have been previously borrowed (even if they are still active).
686    let borrowed_locals = MaybeBorrowedLocals.iterate_to_fixpoint(tcx, body, Some("coroutine"));
687    let mut borrowed_locals_analysis1 = borrowed_locals.analysis;
688    let mut borrowed_locals_analysis2 = borrowed_locals_analysis1.clone(); // trivial
689    let borrowed_locals_cursor1 = ResultsCursor::new_borrowing(
690        body,
691        &mut borrowed_locals_analysis1,
692        &borrowed_locals.results,
693    );
694    let mut borrowed_locals_cursor2 = ResultsCursor::new_borrowing(
695        body,
696        &mut borrowed_locals_analysis2,
697        &borrowed_locals.results,
698    );
699
700    // Calculate the MIR locals that we need to keep storage around for.
701    let mut requires_storage =
702        MaybeRequiresStorage::new(borrowed_locals_cursor1).iterate_to_fixpoint(tcx, body, None);
703    let mut requires_storage_cursor = ResultsCursor::new_borrowing(
704        body,
705        &mut requires_storage.analysis,
706        &requires_storage.results,
707    );
708
709    // Calculate the liveness of MIR locals ignoring borrows.
710    let mut liveness =
711        MaybeLiveLocals.iterate_to_fixpoint(tcx, body, Some("coroutine")).into_results_cursor(body);
712
713    let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks);
714    let mut live_locals_at_suspension_points = Vec::new();
715    let mut source_info_at_suspension_points = Vec::new();
716    let mut live_locals_at_any_suspension_point = DenseBitSet::new_empty(body.local_decls.len());
717
718    for (block, data) in body.basic_blocks.iter_enumerated() {
719        if let TerminatorKind::Yield { .. } = data.terminator().kind {
720            let loc = Location { block, statement_index: data.statements.len() };
721
722            liveness.seek_to_block_end(block);
723            let mut live_locals = liveness.get().clone();
724
725            if !movable {
726                // The `liveness` variable contains the liveness of MIR locals ignoring borrows.
727                // This is correct for movable coroutines since borrows cannot live across
728                // suspension points. However for immovable coroutines we need to account for
729                // borrows, so we conservatively assume that all borrowed locals are live until
730                // we find a StorageDead statement referencing the locals.
731                // To do this we just union our `liveness` result with `borrowed_locals`, which
732                // contains all the locals which has been borrowed before this suspension point.
733                // If a borrow is converted to a raw reference, we must also assume that it lives
734                // forever. Note that the final liveness is still bounded by the storage liveness
735                // of the local, which happens using the `intersect` operation below.
736                borrowed_locals_cursor2.seek_before_primary_effect(loc);
737                live_locals.union(borrowed_locals_cursor2.get());
738            }
739
740            // Store the storage liveness for later use so we can restore the state
741            // after a suspension point
742            storage_live.seek_before_primary_effect(loc);
743            storage_liveness_map[block] = Some(storage_live.get().clone());
744
745            // Locals live are live at this point only if they are used across
746            // suspension points (the `liveness` variable)
747            // and their storage is required (the `storage_required` variable)
748            requires_storage_cursor.seek_before_primary_effect(loc);
749            live_locals.intersect(requires_storage_cursor.get());
750
751            // The coroutine argument is ignored.
752            live_locals.remove(SELF_ARG);
753
754            debug!("loc = {:?}, live_locals = {:?}", loc, live_locals);
755
756            // Add the locals live at this suspension point to the set of locals which live across
757            // any suspension points
758            live_locals_at_any_suspension_point.union(&live_locals);
759
760            live_locals_at_suspension_points.push(live_locals);
761            source_info_at_suspension_points.push(data.terminator().source_info);
762        }
763    }
764
765    debug!("live_locals_anywhere = {:?}", live_locals_at_any_suspension_point);
766    let saved_locals = CoroutineSavedLocals(live_locals_at_any_suspension_point);
767
768    // Renumber our liveness_map bitsets to include only the locals we are
769    // saving.
770    let live_locals_at_suspension_points = live_locals_at_suspension_points
771        .iter()
772        .map(|live_here| saved_locals.renumber_bitset(live_here))
773        .collect();
774
775    let storage_conflicts = compute_storage_conflicts(
776        body,
777        &saved_locals,
778        always_live_locals.clone(),
779        &mut requires_storage.analysis,
780        &requires_storage.results,
781    );
782
783    LivenessInfo {
784        saved_locals,
785        live_locals_at_suspension_points,
786        source_info_at_suspension_points,
787        storage_conflicts,
788        storage_liveness: storage_liveness_map,
789    }
790}
791
792/// The set of `Local`s that must be saved across yield points.
793///
794/// `CoroutineSavedLocal` is indexed in terms of the elements in this set;
795/// i.e. `CoroutineSavedLocal::new(1)` corresponds to the second local
796/// included in this set.
797struct CoroutineSavedLocals(DenseBitSet<Local>);
798
799impl CoroutineSavedLocals {
800    /// Returns an iterator over each `CoroutineSavedLocal` along with the `Local` it corresponds
801    /// to.
802    fn iter_enumerated(&self) -> impl '_ + Iterator<Item = (CoroutineSavedLocal, Local)> {
803        self.iter().enumerate().map(|(i, l)| (CoroutineSavedLocal::from(i), l))
804    }
805
806    /// Transforms a `DenseBitSet<Local>` that contains only locals saved across yield points to the
807    /// equivalent `DenseBitSet<CoroutineSavedLocal>`.
808    fn renumber_bitset(&self, input: &DenseBitSet<Local>) -> DenseBitSet<CoroutineSavedLocal> {
809        assert!(self.superset(input), "{:?} not a superset of {:?}", self.0, input);
810        let mut out = DenseBitSet::new_empty(self.count());
811        for (saved_local, local) in self.iter_enumerated() {
812            if input.contains(local) {
813                out.insert(saved_local);
814            }
815        }
816        out
817    }
818
819    fn get(&self, local: Local) -> Option<CoroutineSavedLocal> {
820        if !self.contains(local) {
821            return None;
822        }
823
824        let idx = self.iter().take_while(|&l| l < local).count();
825        Some(CoroutineSavedLocal::new(idx))
826    }
827}
828
829impl ops::Deref for CoroutineSavedLocals {
830    type Target = DenseBitSet<Local>;
831
832    fn deref(&self) -> &Self::Target {
833        &self.0
834    }
835}
836
837/// For every saved local, looks for which locals are StorageLive at the same
838/// time. Generates a bitset for every local of all the other locals that may be
839/// StorageLive simultaneously with that local. This is used in the layout
840/// computation; see `CoroutineLayout` for more.
841fn compute_storage_conflicts<'mir, 'tcx>(
842    body: &'mir Body<'tcx>,
843    saved_locals: &'mir CoroutineSavedLocals,
844    always_live_locals: DenseBitSet<Local>,
845    analysis: &mut MaybeRequiresStorage<'mir, 'tcx>,
846    results: &Results<DenseBitSet<Local>>,
847) -> BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal> {
848    assert_eq!(body.local_decls.len(), saved_locals.domain_size());
849
850    debug!("compute_storage_conflicts({:?})", body.span);
851    debug!("always_live = {:?}", always_live_locals);
852
853    // Locals that are always live or ones that need to be stored across
854    // suspension points are not eligible for overlap.
855    let mut ineligible_locals = always_live_locals;
856    ineligible_locals.intersect(&**saved_locals);
857
858    // Compute the storage conflicts for all eligible locals.
859    let mut visitor = StorageConflictVisitor {
860        body,
861        saved_locals,
862        local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()),
863        eligible_storage_live: DenseBitSet::new_empty(body.local_decls.len()),
864    };
865
866    visit_reachable_results(body, analysis, results, &mut visitor);
867
868    let local_conflicts = visitor.local_conflicts;
869
870    // Compress the matrix using only stored locals (Local -> CoroutineSavedLocal).
871    //
872    // NOTE: Today we store a full conflict bitset for every local. Technically
873    // this is twice as many bits as we need, since the relation is symmetric.
874    // However, in practice these bitsets are not usually large. The layout code
875    // also needs to keep track of how many conflicts each local has, so it's
876    // simpler to keep it this way for now.
877    let mut storage_conflicts = BitMatrix::new(saved_locals.count(), saved_locals.count());
878    for (saved_local_a, local_a) in saved_locals.iter_enumerated() {
879        if ineligible_locals.contains(local_a) {
880            // Conflicts with everything.
881            storage_conflicts.insert_all_into_row(saved_local_a);
882        } else {
883            // Keep overlap information only for stored locals.
884            for (saved_local_b, local_b) in saved_locals.iter_enumerated() {
885                if local_conflicts.contains(local_a, local_b) {
886                    storage_conflicts.insert(saved_local_a, saved_local_b);
887                }
888            }
889        }
890    }
891    storage_conflicts
892}
893
894struct StorageConflictVisitor<'a, 'tcx> {
895    body: &'a Body<'tcx>,
896    saved_locals: &'a CoroutineSavedLocals,
897    // FIXME(tmandry): Consider using sparse bitsets here once we have good
898    // benchmarks for coroutines.
899    local_conflicts: BitMatrix<Local, Local>,
900    // We keep this bitset as a buffer to avoid reallocating memory.
901    eligible_storage_live: DenseBitSet<Local>,
902}
903
904impl<'a, 'tcx> ResultsVisitor<'tcx, MaybeRequiresStorage<'a, 'tcx>>
905    for StorageConflictVisitor<'a, 'tcx>
906{
907    fn visit_after_early_statement_effect(
908        &mut self,
909        _analysis: &mut MaybeRequiresStorage<'a, 'tcx>,
910        state: &DenseBitSet<Local>,
911        _statement: &Statement<'tcx>,
912        loc: Location,
913    ) {
914        self.apply_state(state, loc);
915    }
916
917    fn visit_after_early_terminator_effect(
918        &mut self,
919        _analysis: &mut MaybeRequiresStorage<'a, 'tcx>,
920        state: &DenseBitSet<Local>,
921        _terminator: &Terminator<'tcx>,
922        loc: Location,
923    ) {
924        self.apply_state(state, loc);
925    }
926}
927
928impl StorageConflictVisitor<'_, '_> {
929    fn apply_state(&mut self, state: &DenseBitSet<Local>, loc: Location) {
930        // Ignore unreachable blocks.
931        if let TerminatorKind::Unreachable = self.body.basic_blocks[loc.block].terminator().kind {
932            return;
933        }
934
935        self.eligible_storage_live.clone_from(state);
936        self.eligible_storage_live.intersect(&**self.saved_locals);
937
938        for local in self.eligible_storage_live.iter() {
939            self.local_conflicts.union_row_with(&self.eligible_storage_live, local);
940        }
941
942        if self.eligible_storage_live.count() > 1 {
943            trace!("at {:?}, eligible_storage_live={:?}", loc, self.eligible_storage_live);
944        }
945    }
946}
947
948fn compute_layout<'tcx>(
949    liveness: LivenessInfo,
950    body: &Body<'tcx>,
951) -> (
952    IndexVec<Local, Option<(Ty<'tcx>, VariantIdx, FieldIdx)>>,
953    CoroutineLayout<'tcx>,
954    IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
955) {
956    let LivenessInfo {
957        saved_locals,
958        live_locals_at_suspension_points,
959        source_info_at_suspension_points,
960        storage_conflicts,
961        storage_liveness,
962    } = liveness;
963
964    // Gather live local types and their indices.
965    let mut locals = IndexVec::<CoroutineSavedLocal, _>::new();
966    let mut tys = IndexVec::<CoroutineSavedLocal, _>::new();
967    for (saved_local, local) in saved_locals.iter_enumerated() {
968        debug!("coroutine saved local {:?} => {:?}", saved_local, local);
969
970        locals.push(local);
971        let decl = &body.local_decls[local];
972        debug!(?decl);
973
974        // Do not `unwrap_crate_local` here, as post-borrowck cleanup may have already cleared
975        // the information. This is alright, since `ignore_for_traits` is only relevant when
976        // this code runs on pre-cleanup MIR, and `ignore_for_traits = false` is the safer
977        // default.
978        let ignore_for_traits = match decl.local_info {
979            // Do not include raw pointers created from accessing `static` items, as those could
980            // well be re-created by another access to the same static.
981            ClearCrossCrate::Set(box LocalInfo::StaticRef { is_thread_local, .. }) => {
982                !is_thread_local
983            }
984            // Fake borrows are only read by fake reads, so do not have any reality in
985            // post-analysis MIR.
986            ClearCrossCrate::Set(box LocalInfo::FakeBorrow) => true,
987            _ => false,
988        };
989        let decl =
990            CoroutineSavedTy { ty: decl.ty, source_info: decl.source_info, ignore_for_traits };
991        debug!(?decl);
992
993        tys.push(decl);
994    }
995
996    // Leave empty variants for the UNRESUMED, RETURNED, and POISONED states.
997    // In debuginfo, these will correspond to the beginning (UNRESUMED) or end
998    // (RETURNED, POISONED) of the function.
999    let body_span = body.source_scopes[OUTERMOST_SOURCE_SCOPE].span;
1000    let mut variant_source_info: IndexVec<VariantIdx, SourceInfo> = [
1001        SourceInfo::outermost(body_span.shrink_to_lo()),
1002        SourceInfo::outermost(body_span.shrink_to_hi()),
1003        SourceInfo::outermost(body_span.shrink_to_hi()),
1004    ]
1005    .iter()
1006    .copied()
1007    .collect();
1008
1009    // Build the coroutine variant field list.
1010    // Create a map from local indices to coroutine struct indices.
1011    let mut variant_fields: IndexVec<VariantIdx, IndexVec<FieldIdx, CoroutineSavedLocal>> =
1012        iter::repeat(IndexVec::new()).take(CoroutineArgs::RESERVED_VARIANTS).collect();
1013    let mut remap = IndexVec::from_elem_n(None, saved_locals.domain_size());
1014    for (suspension_point_idx, live_locals) in live_locals_at_suspension_points.iter().enumerate() {
1015        let variant_index =
1016            VariantIdx::from(CoroutineArgs::RESERVED_VARIANTS + suspension_point_idx);
1017        let mut fields = IndexVec::new();
1018        for (idx, saved_local) in live_locals.iter().enumerate() {
1019            fields.push(saved_local);
1020            // Note that if a field is included in multiple variants, we will
1021            // just use the first one here. That's fine; fields do not move
1022            // around inside coroutines, so it doesn't matter which variant
1023            // index we access them by.
1024            let idx = FieldIdx::from_usize(idx);
1025            remap[locals[saved_local]] = Some((tys[saved_local].ty, variant_index, idx));
1026        }
1027        variant_fields.push(fields);
1028        variant_source_info.push(source_info_at_suspension_points[suspension_point_idx]);
1029    }
1030    debug!("coroutine variant_fields = {:?}", variant_fields);
1031    debug!("coroutine storage_conflicts = {:#?}", storage_conflicts);
1032
1033    let mut field_names = IndexVec::from_elem(None, &tys);
1034    for var in &body.var_debug_info {
1035        let VarDebugInfoContents::Place(place) = &var.value else { continue };
1036        let Some(local) = place.as_local() else { continue };
1037        let Some(&Some((_, variant, field))) = remap.get(local) else {
1038            continue;
1039        };
1040
1041        let saved_local = variant_fields[variant][field];
1042        field_names.get_or_insert_with(saved_local, || var.name);
1043    }
1044
1045    let layout = CoroutineLayout {
1046        field_tys: tys,
1047        field_names,
1048        variant_fields,
1049        variant_source_info,
1050        storage_conflicts,
1051    };
1052    debug!(?layout);
1053
1054    (remap, layout, storage_liveness)
1055}
1056
1057/// Replaces the entry point of `body` with a block that switches on the coroutine discriminant and
1058/// dispatches to blocks according to `cases`.
1059///
1060/// After this function, the former entry point of the function will be bb1.
1061fn insert_switch<'tcx>(
1062    body: &mut Body<'tcx>,
1063    cases: Vec<(usize, BasicBlock)>,
1064    transform: &TransformVisitor<'tcx>,
1065    default_block: BasicBlock,
1066) {
1067    let (assign, discr) = transform.get_discr(body);
1068    let switch_targets =
1069        SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block);
1070    let switch = TerminatorKind::SwitchInt { discr: Operand::Move(discr), targets: switch_targets };
1071
1072    let source_info = SourceInfo::outermost(body.span);
1073    body.basic_blocks_mut().raw.insert(
1074        0,
1075        BasicBlockData::new_stmts(
1076            vec![assign],
1077            Some(Terminator { source_info, kind: switch }),
1078            false,
1079        ),
1080    );
1081
1082    for b in body.basic_blocks_mut().iter_mut() {
1083        b.terminator_mut().successors_mut(|target| *target += 1);
1084    }
1085}
1086
1087fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock {
1088    let source_info = SourceInfo::outermost(body.span);
1089    body.basic_blocks_mut().push(BasicBlockData::new(Some(Terminator { source_info, kind }), false))
1090}
1091
1092fn return_poll_ready_assign<'tcx>(tcx: TyCtxt<'tcx>, source_info: SourceInfo) -> Statement<'tcx> {
1093    // Poll::Ready(())
1094    let poll_def_id = tcx.require_lang_item(LangItem::Poll, source_info.span);
1095    let args = tcx.mk_args(&[tcx.types.unit.into()]);
1096    let val = Operand::Constant(Box::new(ConstOperand {
1097        span: source_info.span,
1098        user_ty: None,
1099        const_: Const::zero_sized(tcx.types.unit),
1100    }));
1101    let ready_val = Rvalue::Aggregate(
1102        Box::new(AggregateKind::Adt(poll_def_id, VariantIdx::from_usize(0), args, None, None)),
1103        IndexVec::from_raw(vec![val]),
1104    );
1105    Statement::new(source_info, StatementKind::Assign(Box::new((Place::return_place(), ready_val))))
1106}
1107
1108fn insert_poll_ready_block<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> BasicBlock {
1109    let source_info = SourceInfo::outermost(body.span);
1110    body.basic_blocks_mut().push(BasicBlockData::new_stmts(
1111        [return_poll_ready_assign(tcx, source_info)].to_vec(),
1112        Some(Terminator { source_info, kind: TerminatorKind::Return }),
1113        false,
1114    ))
1115}
1116
1117fn insert_panic_block<'tcx>(
1118    tcx: TyCtxt<'tcx>,
1119    body: &mut Body<'tcx>,
1120    message: AssertMessage<'tcx>,
1121) -> BasicBlock {
1122    let assert_block = body.basic_blocks.next_index();
1123    let kind = TerminatorKind::Assert {
1124        cond: Operand::Constant(Box::new(ConstOperand {
1125            span: body.span,
1126            user_ty: None,
1127            const_: Const::from_bool(tcx, false),
1128        })),
1129        expected: true,
1130        msg: Box::new(message),
1131        target: assert_block,
1132        unwind: UnwindAction::Continue,
1133    };
1134
1135    insert_term_block(body, kind)
1136}
1137
1138fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> bool {
1139    // Returning from a function with an uninhabited return type is undefined behavior.
1140    if body.return_ty().is_privately_uninhabited(tcx, typing_env) {
1141        return false;
1142    }
1143
1144    // If there's a return terminator the function may return.
1145    body.basic_blocks.iter().any(|block| matches!(block.terminator().kind, TerminatorKind::Return))
1146    // Otherwise the function can't return.
1147}
1148
1149fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
1150    // Nothing can unwind when landing pads are off.
1151    if !tcx.sess.panic_strategy().unwinds() {
1152        return false;
1153    }
1154
1155    // Unwinds can only start at certain terminators.
1156    for block in body.basic_blocks.iter() {
1157        match block.terminator().kind {
1158            // These never unwind.
1159            TerminatorKind::Goto { .. }
1160            | TerminatorKind::SwitchInt { .. }
1161            | TerminatorKind::UnwindTerminate(_)
1162            | TerminatorKind::Return
1163            | TerminatorKind::Unreachable
1164            | TerminatorKind::CoroutineDrop
1165            | TerminatorKind::FalseEdge { .. }
1166            | TerminatorKind::FalseUnwind { .. } => {}
1167
1168            // Resume will *continue* unwinding, but if there's no other unwinding terminator it
1169            // will never be reached.
1170            TerminatorKind::UnwindResume => {}
1171
1172            TerminatorKind::Yield { .. } => {
1173                unreachable!("`can_unwind` called before coroutine transform")
1174            }
1175
1176            // These may unwind.
1177            TerminatorKind::Drop { .. }
1178            | TerminatorKind::Call { .. }
1179            | TerminatorKind::InlineAsm { .. }
1180            | TerminatorKind::Assert { .. } => return true,
1181
1182            TerminatorKind::TailCall { .. } => {
1183                unreachable!("tail calls can't be present in generators")
1184            }
1185        }
1186    }
1187
1188    // If we didn't find an unwinding terminator, the function cannot unwind.
1189    false
1190}
1191
1192// Poison the coroutine when it unwinds
1193fn generate_poison_block_and_redirect_unwinds_there<'tcx>(
1194    transform: &TransformVisitor<'tcx>,
1195    body: &mut Body<'tcx>,
1196) {
1197    let source_info = SourceInfo::outermost(body.span);
1198    let poison_block = body.basic_blocks_mut().push(BasicBlockData::new_stmts(
1199        vec![transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info)],
1200        Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }),
1201        true,
1202    ));
1203
1204    for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() {
1205        let source_info = block.terminator().source_info;
1206
1207        if let TerminatorKind::UnwindResume = block.terminator().kind {
1208            // An existing `Resume` terminator is redirected to jump to our dedicated
1209            // "poisoning block" above.
1210            if idx != poison_block {
1211                *block.terminator_mut() =
1212                    Terminator { source_info, kind: TerminatorKind::Goto { target: poison_block } };
1213            }
1214        } else if !block.is_cleanup
1215            // Any terminators that *can* unwind but don't have an unwind target set are also
1216            // pointed at our poisoning block (unless they're part of the cleanup path).
1217            && let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut()
1218        {
1219            *unwind = UnwindAction::Cleanup(poison_block);
1220        }
1221    }
1222}
1223
1224fn create_coroutine_resume_function<'tcx>(
1225    tcx: TyCtxt<'tcx>,
1226    transform: TransformVisitor<'tcx>,
1227    body: &mut Body<'tcx>,
1228    can_return: bool,
1229    can_unwind: bool,
1230) {
1231    // Poison the coroutine when it unwinds
1232    if can_unwind {
1233        generate_poison_block_and_redirect_unwinds_there(&transform, body);
1234    }
1235
1236    let mut cases = create_cases(body, &transform, Operation::Resume);
1237
1238    use rustc_middle::mir::AssertKind::{ResumedAfterPanic, ResumedAfterReturn};
1239
1240    // Jump to the entry point on the unresumed
1241    cases.insert(0, (CoroutineArgs::UNRESUMED, START_BLOCK));
1242
1243    // Panic when resumed on the returned or poisoned state
1244    if can_unwind {
1245        cases.insert(
1246            1,
1247            (
1248                CoroutineArgs::POISONED,
1249                insert_panic_block(tcx, body, ResumedAfterPanic(transform.coroutine_kind)),
1250            ),
1251        );
1252    }
1253
1254    if can_return {
1255        let block = match transform.coroutine_kind {
1256            CoroutineKind::Desugared(CoroutineDesugaring::Async, _)
1257            | CoroutineKind::Coroutine(_) => {
1258                // For `async_drop_in_place<T>::{closure}` we just keep return Poll::Ready,
1259                // because async drop of such coroutine keeps polling original coroutine
1260                if tcx.is_async_drop_in_place_coroutine(body.source.def_id()) {
1261                    insert_poll_ready_block(tcx, body)
1262                } else {
1263                    insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind))
1264                }
1265            }
1266            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)
1267            | CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
1268                transform.insert_none_ret_block(body)
1269            }
1270        };
1271        cases.insert(1, (CoroutineArgs::RETURNED, block));
1272    }
1273
1274    let default_block = insert_term_block(body, TerminatorKind::Unreachable);
1275    insert_switch(body, cases, &transform, default_block);
1276
1277    make_coroutine_state_argument_indirect(tcx, body);
1278
1279    match transform.coroutine_kind {
1280        CoroutineKind::Coroutine(_)
1281        | CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) =>
1282        {
1283            make_coroutine_state_argument_pinned(tcx, body);
1284        }
1285        // Iterator::next doesn't accept a pinned argument,
1286        // unlike for all other coroutine kinds.
1287        CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {}
1288    }
1289
1290    // Make sure we remove dead blocks to remove
1291    // unrelated code from the drop part of the function
1292    simplify::remove_dead_blocks(body);
1293
1294    pm::run_passes_no_validate(tcx, body, &[&abort_unwinding_calls::AbortUnwindingCalls], None);
1295
1296    if let Some(dumper) = MirDumper::new(tcx, "coroutine_resume", body) {
1297        dumper.dump_mir(body);
1298    }
1299}
1300
1301/// An operation that can be performed on a coroutine.
1302#[derive(PartialEq, Copy, Clone)]
1303enum Operation {
1304    Resume,
1305    Drop,
1306}
1307
1308impl Operation {
1309    fn target_block(self, point: &SuspensionPoint<'_>) -> Option<BasicBlock> {
1310        match self {
1311            Operation::Resume => Some(point.resume),
1312            Operation::Drop => point.drop,
1313        }
1314    }
1315}
1316
1317fn create_cases<'tcx>(
1318    body: &mut Body<'tcx>,
1319    transform: &TransformVisitor<'tcx>,
1320    operation: Operation,
1321) -> Vec<(usize, BasicBlock)> {
1322    let source_info = SourceInfo::outermost(body.span);
1323
1324    transform
1325        .suspension_points
1326        .iter()
1327        .filter_map(|point| {
1328            // Find the target for this suspension point, if applicable
1329            operation.target_block(point).map(|target| {
1330                let mut statements = Vec::new();
1331
1332                // Create StorageLive instructions for locals with live storage
1333                for l in body.local_decls.indices() {
1334                    let needs_storage_live = point.storage_liveness.contains(l)
1335                        && !transform.remap.contains(l)
1336                        && !transform.always_live_locals.contains(l);
1337                    if needs_storage_live {
1338                        statements.push(Statement::new(source_info, StatementKind::StorageLive(l)));
1339                    }
1340                }
1341
1342                if operation == Operation::Resume && point.resume_arg != CTX_ARG.into() {
1343                    // Move the resume argument to the destination place of the `Yield` terminator
1344                    statements.push(Statement::new(
1345                        source_info,
1346                        StatementKind::Assign(Box::new((
1347                            point.resume_arg,
1348                            Rvalue::Use(Operand::Move(CTX_ARG.into())),
1349                        ))),
1350                    ));
1351                }
1352
1353                // Then jump to the real target
1354                let block = body.basic_blocks_mut().push(BasicBlockData::new_stmts(
1355                    statements,
1356                    Some(Terminator { source_info, kind: TerminatorKind::Goto { target } }),
1357                    false,
1358                ));
1359
1360                (point.state, block)
1361            })
1362        })
1363        .collect()
1364}
1365
1366#[instrument(level = "debug", skip(tcx), ret)]
1367pub(crate) fn mir_coroutine_witnesses<'tcx>(
1368    tcx: TyCtxt<'tcx>,
1369    def_id: LocalDefId,
1370) -> Option<CoroutineLayout<'tcx>> {
1371    let (body, _) = tcx.mir_promoted(def_id);
1372    let body = body.borrow();
1373    let body = &*body;
1374
1375    // The first argument is the coroutine type passed by value
1376    let coroutine_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty;
1377
1378    let movable = match *coroutine_ty.kind() {
1379        ty::Coroutine(def_id, _) => tcx.coroutine_movability(def_id) == hir::Movability::Movable,
1380        ty::Error(_) => return None,
1381        _ => span_bug!(body.span, "unexpected coroutine type {}", coroutine_ty),
1382    };
1383
1384    // The witness simply contains all locals live across suspend points.
1385
1386    let always_live_locals = always_storage_live_locals(body);
1387    let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1388
1389    // Extract locals which are live across suspension point into `layout`
1390    // `remap` gives a mapping from local indices onto coroutine struct indices
1391    // `storage_liveness` tells us which locals have live storage at suspension points
1392    let (_, coroutine_layout, _) = compute_layout(liveness_info, body);
1393
1394    check_suspend_tys(tcx, &coroutine_layout, body);
1395    check_field_tys_sized(tcx, &coroutine_layout, def_id);
1396
1397    Some(coroutine_layout)
1398}
1399
1400fn check_field_tys_sized<'tcx>(
1401    tcx: TyCtxt<'tcx>,
1402    coroutine_layout: &CoroutineLayout<'tcx>,
1403    def_id: LocalDefId,
1404) {
1405    // No need to check if unsized_fn_params is disabled,
1406    // since we will error during typeck.
1407    if !tcx.features().unsized_fn_params() {
1408        return;
1409    }
1410
1411    // FIXME(#132279): @lcnr believes that we may want to support coroutines
1412    // whose `Sized`-ness relies on the hidden types of opaques defined by the
1413    // parent function. In this case we'd have to be able to reveal only these
1414    // opaques here.
1415    let infcx = tcx.infer_ctxt().ignoring_regions().build(TypingMode::non_body_analysis());
1416    let param_env = tcx.param_env(def_id);
1417
1418    let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
1419    for field_ty in &coroutine_layout.field_tys {
1420        ocx.register_bound(
1421            ObligationCause::new(
1422                field_ty.source_info.span,
1423                def_id,
1424                ObligationCauseCode::SizedCoroutineInterior(def_id),
1425            ),
1426            param_env,
1427            field_ty.ty,
1428            tcx.require_lang_item(hir::LangItem::Sized, field_ty.source_info.span),
1429        );
1430    }
1431
1432    let errors = ocx.select_all_or_error();
1433    debug!(?errors);
1434    if !errors.is_empty() {
1435        infcx.err_ctxt().report_fulfillment_errors(errors);
1436    }
1437}
1438
1439impl<'tcx> crate::MirPass<'tcx> for StateTransform {
1440    #[instrument(level = "debug", skip(self, tcx, body), ret)]
1441    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
1442        debug!(def_id = ?body.source.def_id());
1443
1444        let Some(old_yield_ty) = body.yield_ty() else {
1445            // This only applies to coroutines
1446            return;
1447        };
1448        let old_ret_ty = body.return_ty();
1449
1450        assert!(body.coroutine_drop().is_none() && body.coroutine_drop_async().is_none());
1451
1452        if let Some(dumper) = MirDumper::new(tcx, "coroutine_before", body) {
1453            dumper.dump_mir(body);
1454        }
1455
1456        // The first argument is the coroutine type passed by value
1457        let coroutine_ty = body.local_decls.raw[1].ty;
1458        let coroutine_kind = body.coroutine_kind().unwrap();
1459
1460        // Get the discriminant type and args which typeck computed
1461        let ty::Coroutine(_, args) = coroutine_ty.kind() else {
1462            tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}"));
1463        };
1464        let discr_ty = args.as_coroutine().discr_ty(tcx);
1465
1466        let new_ret_ty = match coroutine_kind {
1467            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
1468                // Compute Poll<return_ty>
1469                let poll_did = tcx.require_lang_item(LangItem::Poll, body.span);
1470                let poll_adt_ref = tcx.adt_def(poll_did);
1471                let poll_args = tcx.mk_args(&[old_ret_ty.into()]);
1472                Ty::new_adt(tcx, poll_adt_ref, poll_args)
1473            }
1474            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
1475                // Compute Option<yield_ty>
1476                let option_did = tcx.require_lang_item(LangItem::Option, body.span);
1477                let option_adt_ref = tcx.adt_def(option_did);
1478                let option_args = tcx.mk_args(&[old_yield_ty.into()]);
1479                Ty::new_adt(tcx, option_adt_ref, option_args)
1480            }
1481            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
1482                // The yield ty is already `Poll<Option<yield_ty>>`
1483                old_yield_ty
1484            }
1485            CoroutineKind::Coroutine(_) => {
1486                // Compute CoroutineState<yield_ty, return_ty>
1487                let state_did = tcx.require_lang_item(LangItem::CoroutineState, body.span);
1488                let state_adt_ref = tcx.adt_def(state_did);
1489                let state_args = tcx.mk_args(&[old_yield_ty.into(), old_ret_ty.into()]);
1490                Ty::new_adt(tcx, state_adt_ref, state_args)
1491            }
1492        };
1493
1494        // We rename RETURN_PLACE which has type mir.return_ty to old_ret_local
1495        // RETURN_PLACE then is a fresh unused local with type ret_ty.
1496        let old_ret_local = replace_local(RETURN_PLACE, new_ret_ty, body, tcx);
1497
1498        // We need to insert clean drop for unresumed state and perform drop elaboration
1499        // (finally in open_drop_for_tuple) before async drop expansion.
1500        // Async drops, produced by this drop elaboration, will be expanded,
1501        // and corresponding futures kept in layout.
1502        let has_async_drops = matches!(
1503            coroutine_kind,
1504            CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
1505        ) && has_expandable_async_drops(tcx, body, coroutine_ty);
1506
1507        // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies.
1508        if matches!(
1509            coroutine_kind,
1510            CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
1511        ) {
1512            let context_mut_ref = transform_async_context(tcx, body);
1513            expand_async_drops(tcx, body, context_mut_ref, coroutine_kind, coroutine_ty);
1514
1515            if let Some(dumper) = MirDumper::new(tcx, "coroutine_async_drop_expand", body) {
1516                dumper.dump_mir(body);
1517            }
1518        } else {
1519            cleanup_async_drops(body);
1520        }
1521
1522        let always_live_locals = always_storage_live_locals(body);
1523        let movable = coroutine_kind.movability() == hir::Movability::Movable;
1524        let liveness_info =
1525            locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1526
1527        if tcx.sess.opts.unstable_opts.validate_mir {
1528            let mut vis = EnsureCoroutineFieldAssignmentsNeverAlias {
1529                assigned_local: None,
1530                saved_locals: &liveness_info.saved_locals,
1531                storage_conflicts: &liveness_info.storage_conflicts,
1532            };
1533
1534            vis.visit_body(body);
1535        }
1536
1537        // Extract locals which are live across suspension point into `layout`
1538        // `remap` gives a mapping from local indices onto coroutine struct indices
1539        // `storage_liveness` tells us which locals have live storage at suspension points
1540        let (remap, layout, storage_liveness) = compute_layout(liveness_info, body);
1541
1542        let can_return = can_return(tcx, body, body.typing_env(tcx));
1543
1544        // Run the transformation which converts Places from Local to coroutine struct
1545        // accesses for locals in `remap`.
1546        // It also rewrites `return x` and `yield y` as writing a new coroutine state and returning
1547        // either `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`,
1548        // or `Poll::Ready(x)` and `Poll::Pending` respectively depending on the coroutine kind.
1549        let mut transform = TransformVisitor {
1550            tcx,
1551            coroutine_kind,
1552            remap,
1553            storage_liveness,
1554            always_live_locals,
1555            suspension_points: Vec::new(),
1556            old_ret_local,
1557            discr_ty,
1558            old_ret_ty,
1559            old_yield_ty,
1560        };
1561        transform.visit_body(body);
1562
1563        // MIR parameters are not explicitly assigned-to when entering the MIR body.
1564        // If we want to save their values inside the coroutine state, we need to do so explicitly.
1565        let source_info = SourceInfo::outermost(body.span);
1566        let args_iter = body.args_iter();
1567        body.basic_blocks.as_mut()[START_BLOCK].statements.splice(
1568            0..0,
1569            args_iter.filter_map(|local| {
1570                let (ty, variant_index, idx) = transform.remap[local]?;
1571                let lhs = transform.make_field(variant_index, idx, ty);
1572                let rhs = Rvalue::Use(Operand::Move(local.into()));
1573                let assign = StatementKind::Assign(Box::new((lhs, rhs)));
1574                Some(Statement::new(source_info, assign))
1575            }),
1576        );
1577
1578        // Update our MIR struct to reflect the changes we've made
1579        body.arg_count = 2; // self, resume arg
1580        body.spread_arg = None;
1581
1582        // Remove the context argument within generator bodies.
1583        if matches!(coroutine_kind, CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) {
1584            transform_gen_context(body);
1585        }
1586
1587        // The original arguments to the function are no longer arguments, mark them as such.
1588        // Otherwise they'll conflict with our new arguments, which although they don't have
1589        // argument_index set, will get emitted as unnamed arguments.
1590        for var in &mut body.var_debug_info {
1591            var.argument_index = None;
1592        }
1593
1594        body.coroutine.as_mut().unwrap().yield_ty = None;
1595        body.coroutine.as_mut().unwrap().resume_ty = None;
1596        body.coroutine.as_mut().unwrap().coroutine_layout = Some(layout);
1597
1598        // FIXME: Drops, produced by insert_clean_drop + elaborate_coroutine_drops,
1599        // are currently sync only. To allow async for them, we need to move those calls
1600        // before expand_async_drops, and fix the related problems.
1601        //
1602        // Insert `drop(coroutine_struct)` which is used to drop upvars for coroutines in
1603        // the unresumed state.
1604        // This is expanded to a drop ladder in `elaborate_coroutine_drops`.
1605        let drop_clean = insert_clean_drop(tcx, body, has_async_drops);
1606
1607        if let Some(dumper) = MirDumper::new(tcx, "coroutine_pre-elab", body) {
1608            dumper.dump_mir(body);
1609        }
1610
1611        // Expand `drop(coroutine_struct)` to a drop ladder which destroys upvars.
1612        // If any upvars are moved out of, drop elaboration will handle upvar destruction.
1613        // However we need to also elaborate the code generated by `insert_clean_drop`.
1614        elaborate_coroutine_drops(tcx, body);
1615
1616        if let Some(dumper) = MirDumper::new(tcx, "coroutine_post-transform", body) {
1617            dumper.dump_mir(body);
1618        }
1619
1620        let can_unwind = can_unwind(tcx, body);
1621
1622        // Create a copy of our MIR and use it to create the drop shim for the coroutine
1623        if has_async_drops {
1624            // If coroutine has async drops, generating async drop shim
1625            let mut drop_shim =
1626                create_coroutine_drop_shim_async(tcx, &transform, body, drop_clean, can_unwind);
1627            // Run derefer to fix Derefs that are not in the first place
1628            deref_finder(tcx, &mut drop_shim);
1629            body.coroutine.as_mut().unwrap().coroutine_drop_async = Some(drop_shim);
1630        } else {
1631            // If coroutine has no async drops, generating sync drop shim
1632            let mut drop_shim =
1633                create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean);
1634            // Run derefer to fix Derefs that are not in the first place
1635            deref_finder(tcx, &mut drop_shim);
1636            body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim);
1637
1638            // For coroutine with sync drop, generating async proxy for `future_drop_poll` call
1639            let mut proxy_shim = create_coroutine_drop_shim_proxy_async(tcx, body);
1640            deref_finder(tcx, &mut proxy_shim);
1641            body.coroutine.as_mut().unwrap().coroutine_drop_proxy_async = Some(proxy_shim);
1642        }
1643
1644        // Create the Coroutine::resume / Future::poll function
1645        create_coroutine_resume_function(tcx, transform, body, can_return, can_unwind);
1646
1647        // Run derefer to fix Derefs that are not in the first place
1648        deref_finder(tcx, body);
1649    }
1650
1651    fn is_required(&self) -> bool {
1652        true
1653    }
1654}
1655
1656/// Looks for any assignments between locals (e.g., `_4 = _5`) that will both be converted to fields
1657/// in the coroutine state machine but whose storage is not marked as conflicting
1658///
1659/// Validation needs to happen immediately *before* `TransformVisitor` is invoked, not after.
1660///
1661/// This condition would arise when the assignment is the last use of `_5` but the initial
1662/// definition of `_4` if we weren't extra careful to mark all locals used inside a statement as
1663/// conflicting. Non-conflicting coroutine saved locals may be stored at the same location within
1664/// the coroutine state machine, which would result in ill-formed MIR: the left-hand and right-hand
1665/// sides of an assignment may not alias. This caused a miscompilation in [#73137].
1666///
1667/// [#73137]: https://github.com/rust-lang/rust/issues/73137
1668struct EnsureCoroutineFieldAssignmentsNeverAlias<'a> {
1669    saved_locals: &'a CoroutineSavedLocals,
1670    storage_conflicts: &'a BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal>,
1671    assigned_local: Option<CoroutineSavedLocal>,
1672}
1673
1674impl EnsureCoroutineFieldAssignmentsNeverAlias<'_> {
1675    fn saved_local_for_direct_place(&self, place: Place<'_>) -> Option<CoroutineSavedLocal> {
1676        if place.is_indirect() {
1677            return None;
1678        }
1679
1680        self.saved_locals.get(place.local)
1681    }
1682
1683    fn check_assigned_place(&mut self, place: Place<'_>, f: impl FnOnce(&mut Self)) {
1684        if let Some(assigned_local) = self.saved_local_for_direct_place(place) {
1685            assert!(self.assigned_local.is_none(), "`check_assigned_place` must not recurse");
1686
1687            self.assigned_local = Some(assigned_local);
1688            f(self);
1689            self.assigned_local = None;
1690        }
1691    }
1692}
1693
1694impl<'tcx> Visitor<'tcx> for EnsureCoroutineFieldAssignmentsNeverAlias<'_> {
1695    fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
1696        let Some(lhs) = self.assigned_local else {
1697            // This visitor only invokes `visit_place` for the right-hand side of an assignment
1698            // and only after setting `self.assigned_local`. However, the default impl of
1699            // `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places
1700            // with debuginfo. Ignore them here.
1701            assert!(!context.is_use());
1702            return;
1703        };
1704
1705        let Some(rhs) = self.saved_local_for_direct_place(*place) else { return };
1706
1707        if !self.storage_conflicts.contains(lhs, rhs) {
1708            bug!(
1709                "Assignment between coroutine saved locals whose storage is not \
1710                    marked as conflicting: {:?}: {:?} = {:?}",
1711                location,
1712                lhs,
1713                rhs,
1714            );
1715        }
1716    }
1717
1718    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1719        match &statement.kind {
1720            StatementKind::Assign(box (lhs, rhs)) => {
1721                self.check_assigned_place(*lhs, |this| this.visit_rvalue(rhs, location));
1722            }
1723
1724            StatementKind::FakeRead(..)
1725            | StatementKind::SetDiscriminant { .. }
1726            | StatementKind::Deinit(..)
1727            | StatementKind::StorageLive(_)
1728            | StatementKind::StorageDead(_)
1729            | StatementKind::Retag(..)
1730            | StatementKind::AscribeUserType(..)
1731            | StatementKind::PlaceMention(..)
1732            | StatementKind::Coverage(..)
1733            | StatementKind::Intrinsic(..)
1734            | StatementKind::ConstEvalCounter
1735            | StatementKind::BackwardIncompatibleDropHint { .. }
1736            | StatementKind::Nop => {}
1737        }
1738    }
1739
1740    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1741        // Checking for aliasing in terminators is probably overkill, but until we have actual
1742        // semantics, we should be conservative here.
1743        match &terminator.kind {
1744            TerminatorKind::Call {
1745                func,
1746                args,
1747                destination,
1748                target: Some(_),
1749                unwind: _,
1750                call_source: _,
1751                fn_span: _,
1752            } => {
1753                self.check_assigned_place(*destination, |this| {
1754                    this.visit_operand(func, location);
1755                    for arg in args {
1756                        this.visit_operand(&arg.node, location);
1757                    }
1758                });
1759            }
1760
1761            TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => {
1762                self.check_assigned_place(*resume_arg, |this| this.visit_operand(value, location));
1763            }
1764
1765            // FIXME: Does `asm!` have any aliasing requirements?
1766            TerminatorKind::InlineAsm { .. } => {}
1767
1768            TerminatorKind::Call { .. }
1769            | TerminatorKind::Goto { .. }
1770            | TerminatorKind::SwitchInt { .. }
1771            | TerminatorKind::UnwindResume
1772            | TerminatorKind::UnwindTerminate(_)
1773            | TerminatorKind::Return
1774            | TerminatorKind::TailCall { .. }
1775            | TerminatorKind::Unreachable
1776            | TerminatorKind::Drop { .. }
1777            | TerminatorKind::Assert { .. }
1778            | TerminatorKind::CoroutineDrop
1779            | TerminatorKind::FalseEdge { .. }
1780            | TerminatorKind::FalseUnwind { .. } => {}
1781        }
1782    }
1783}
1784
1785fn check_suspend_tys<'tcx>(tcx: TyCtxt<'tcx>, layout: &CoroutineLayout<'tcx>, body: &Body<'tcx>) {
1786    let mut linted_tys = FxHashSet::default();
1787
1788    for (variant, yield_source_info) in
1789        layout.variant_fields.iter().zip(&layout.variant_source_info)
1790    {
1791        debug!(?variant);
1792        for &local in variant {
1793            let decl = &layout.field_tys[local];
1794            debug!(?decl);
1795
1796            if !decl.ignore_for_traits && linted_tys.insert(decl.ty) {
1797                let Some(hir_id) = decl.source_info.scope.lint_root(&body.source_scopes) else {
1798                    continue;
1799                };
1800
1801                check_must_not_suspend_ty(
1802                    tcx,
1803                    decl.ty,
1804                    hir_id,
1805                    SuspendCheckData {
1806                        source_span: decl.source_info.span,
1807                        yield_span: yield_source_info.span,
1808                        plural_len: 1,
1809                        ..Default::default()
1810                    },
1811                );
1812            }
1813        }
1814    }
1815}
1816
1817#[derive(Default)]
1818struct SuspendCheckData<'a> {
1819    source_span: Span,
1820    yield_span: Span,
1821    descr_pre: &'a str,
1822    descr_post: &'a str,
1823    plural_len: usize,
1824}
1825
1826// Returns whether it emitted a diagnostic or not
1827// Note that this fn and the proceeding one are based on the code
1828// for creating must_use diagnostics
1829//
1830// Note that this technique was chosen over things like a `Suspend` marker trait
1831// as it is simpler and has precedent in the compiler
1832fn check_must_not_suspend_ty<'tcx>(
1833    tcx: TyCtxt<'tcx>,
1834    ty: Ty<'tcx>,
1835    hir_id: hir::HirId,
1836    data: SuspendCheckData<'_>,
1837) -> bool {
1838    if ty.is_unit() {
1839        return false;
1840    }
1841
1842    let plural_suffix = pluralize!(data.plural_len);
1843
1844    debug!("Checking must_not_suspend for {}", ty);
1845
1846    match *ty.kind() {
1847        ty::Adt(_, args) if ty.is_box() => {
1848            let boxed_ty = args.type_at(0);
1849            let allocator_ty = args.type_at(1);
1850            check_must_not_suspend_ty(
1851                tcx,
1852                boxed_ty,
1853                hir_id,
1854                SuspendCheckData { descr_pre: &format!("{}boxed ", data.descr_pre), ..data },
1855            ) || check_must_not_suspend_ty(
1856                tcx,
1857                allocator_ty,
1858                hir_id,
1859                SuspendCheckData { descr_pre: &format!("{}allocator ", data.descr_pre), ..data },
1860            )
1861        }
1862        ty::Adt(def, _) => check_must_not_suspend_def(tcx, def.did(), hir_id, data),
1863        // FIXME: support adding the attribute to TAITs
1864        ty::Alias(ty::Opaque, ty::AliasTy { def_id: def, .. }) => {
1865            let mut has_emitted = false;
1866            for &(predicate, _) in tcx.explicit_item_bounds(def).skip_binder() {
1867                // We only look at the `DefId`, so it is safe to skip the binder here.
1868                if let ty::ClauseKind::Trait(ref poly_trait_predicate) =
1869                    predicate.kind().skip_binder()
1870                {
1871                    let def_id = poly_trait_predicate.trait_ref.def_id;
1872                    let descr_pre = &format!("{}implementer{} of ", data.descr_pre, plural_suffix);
1873                    if check_must_not_suspend_def(
1874                        tcx,
1875                        def_id,
1876                        hir_id,
1877                        SuspendCheckData { descr_pre, ..data },
1878                    ) {
1879                        has_emitted = true;
1880                        break;
1881                    }
1882                }
1883            }
1884            has_emitted
1885        }
1886        ty::Dynamic(binder, _) => {
1887            let mut has_emitted = false;
1888            for predicate in binder.iter() {
1889                if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() {
1890                    let def_id = trait_ref.def_id;
1891                    let descr_post = &format!(" trait object{}{}", plural_suffix, data.descr_post);
1892                    if check_must_not_suspend_def(
1893                        tcx,
1894                        def_id,
1895                        hir_id,
1896                        SuspendCheckData { descr_post, ..data },
1897                    ) {
1898                        has_emitted = true;
1899                        break;
1900                    }
1901                }
1902            }
1903            has_emitted
1904        }
1905        ty::Tuple(fields) => {
1906            let mut has_emitted = false;
1907            for (i, ty) in fields.iter().enumerate() {
1908                let descr_post = &format!(" in tuple element {i}");
1909                if check_must_not_suspend_ty(
1910                    tcx,
1911                    ty,
1912                    hir_id,
1913                    SuspendCheckData { descr_post, ..data },
1914                ) {
1915                    has_emitted = true;
1916                }
1917            }
1918            has_emitted
1919        }
1920        ty::Array(ty, len) => {
1921            let descr_pre = &format!("{}array{} of ", data.descr_pre, plural_suffix);
1922            check_must_not_suspend_ty(
1923                tcx,
1924                ty,
1925                hir_id,
1926                SuspendCheckData {
1927                    descr_pre,
1928                    // FIXME(must_not_suspend): This is wrong. We should handle printing unevaluated consts.
1929                    plural_len: len.try_to_target_usize(tcx).unwrap_or(0) as usize + 1,
1930                    ..data
1931                },
1932            )
1933        }
1934        // If drop tracking is enabled, we want to look through references, since the referent
1935        // may not be considered live across the await point.
1936        ty::Ref(_region, ty, _mutability) => {
1937            let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix);
1938            check_must_not_suspend_ty(tcx, ty, hir_id, SuspendCheckData { descr_pre, ..data })
1939        }
1940        _ => false,
1941    }
1942}
1943
1944fn check_must_not_suspend_def(
1945    tcx: TyCtxt<'_>,
1946    def_id: DefId,
1947    hir_id: hir::HirId,
1948    data: SuspendCheckData<'_>,
1949) -> bool {
1950    if let Some(attr) = tcx.get_attr(def_id, sym::must_not_suspend) {
1951        let reason = attr.value_str().map(|s| errors::MustNotSuspendReason {
1952            span: data.source_span,
1953            reason: s.as_str().to_string(),
1954        });
1955        tcx.emit_node_span_lint(
1956            rustc_session::lint::builtin::MUST_NOT_SUSPEND,
1957            hir_id,
1958            data.source_span,
1959            errors::MustNotSupend {
1960                tcx,
1961                yield_sp: data.yield_span,
1962                reason,
1963                src_sp: data.source_span,
1964                pre: data.descr_pre,
1965                def_id,
1966                post: data.descr_post,
1967            },
1968        );
1969
1970        true
1971    } else {
1972        false
1973    }
1974}