rustc_mir_transform/
coroutine.rs

1//! This is the implementation of the pass which transforms coroutines into state machines.
2//!
3//! MIR generation for coroutines creates a function which has a self argument which
4//! passes by value. This argument is effectively a coroutine type which only contains upvars and
5//! is only used for this argument inside the MIR for the coroutine.
6//! It is passed by value to enable upvars to be moved out of it. Drop elaboration runs on that
7//! MIR before this pass and creates drop flags for MIR locals.
8//! It will also drop the coroutine argument (which only consists of upvars) if any of the upvars
9//! are moved out of. This pass elaborates the drops of upvars / coroutine argument in the case
10//! that none of the upvars were moved out of. This is because we cannot have any drops of this
11//! coroutine in the MIR, since it is used to create the drop glue for the coroutine. We'd get
12//! infinite recursion otherwise.
13//!
14//! This pass creates the implementation for either the `Coroutine::resume` or `Future::poll`
15//! function and the drop shim for the coroutine based on the MIR input.
16//! It converts the coroutine argument from Self to &mut Self adding derefs in the MIR as needed.
17//! It computes the final layout of the coroutine struct which looks like this:
18//!     First upvars are stored
19//!     It is followed by the coroutine state field.
20//!     Then finally the MIR locals which are live across a suspension point are stored.
21//!     ```ignore (illustrative)
22//!     struct Coroutine {
23//!         upvars...,
24//!         state: u32,
25//!         mir_locals...,
26//!     }
27//!     ```
28//! This pass computes the meaning of the state field and the MIR locals which are live
29//! across a suspension point. There are however three hardcoded coroutine states:
30//!     0 - Coroutine have not been resumed yet
31//!     1 - Coroutine has returned / is completed
32//!     2 - Coroutine has been poisoned
33//!
34//! It also rewrites `return x` and `yield y` as setting a new coroutine state and returning
35//! `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`,
36//! or `Poll::Ready(x)` and `Poll::Pending` respectively.
37//! MIR locals which are live across a suspension point are moved to the coroutine struct
38//! with references to them being updated with references to the coroutine struct.
39//!
40//! The pass creates two functions which have a switch on the coroutine state giving
41//! the action to take.
42//!
43//! One of them is the implementation of `Coroutine::resume` / `Future::poll`.
44//! For coroutines with state 0 (unresumed) it starts the execution of the coroutine.
45//! For coroutines with state 1 (returned) and state 2 (poisoned) it panics.
46//! Otherwise it continues the execution from the last suspension point.
47//!
48//! The other function is the drop glue for the coroutine.
49//! For coroutines with state 0 (unresumed) it drops the upvars of the coroutine.
50//! For coroutines with state 1 (returned) and state 2 (poisoned) it does nothing.
51//! Otherwise it drops all the values in scope at the last suspension point.
52
53mod by_move_body;
54mod drop;
55use std::ops;
56
57pub(super) use by_move_body::coroutine_by_move_body_def_id;
58use drop::{
59    cleanup_async_drops, create_coroutine_drop_shim, create_coroutine_drop_shim_async,
60    create_coroutine_drop_shim_proxy_async, elaborate_coroutine_drops, expand_async_drops,
61    has_expandable_async_drops, insert_clean_drop,
62};
63use itertools::izip;
64use rustc_abi::{FieldIdx, VariantIdx};
65use rustc_data_structures::fx::FxHashSet;
66use rustc_errors::pluralize;
67use rustc_hir as hir;
68use rustc_hir::lang_items::LangItem;
69use rustc_hir::{CoroutineDesugaring, CoroutineKind};
70use rustc_index::bit_set::{BitMatrix, DenseBitSet, GrowableBitSet};
71use rustc_index::{Idx, IndexVec, indexvec};
72use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
73use rustc_middle::mir::*;
74use rustc_middle::ty::util::Discr;
75use rustc_middle::ty::{
76    self, CoroutineArgs, CoroutineArgsExt, GenericArgsRef, InstanceKind, Ty, TyCtxt, TypingMode,
77};
78use rustc_middle::{bug, span_bug};
79use rustc_mir_dataflow::impls::{
80    MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive,
81    always_storage_live_locals,
82};
83use rustc_mir_dataflow::{
84    Analysis, Results, ResultsCursor, ResultsVisitor, visit_reachable_results,
85};
86use rustc_span::def_id::{DefId, LocalDefId};
87use rustc_span::source_map::dummy_spanned;
88use rustc_span::symbol::sym;
89use rustc_span::{DUMMY_SP, Span};
90use rustc_trait_selection::error_reporting::InferCtxtErrorExt;
91use rustc_trait_selection::infer::TyCtxtInferExt as _;
92use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode, ObligationCtxt};
93use tracing::{debug, instrument, trace};
94
95use crate::deref_separator::deref_finder;
96use crate::{abort_unwinding_calls, errors, pass_manager as pm, simplify};
97
98pub(super) struct StateTransform;
99
100struct RenameLocalVisitor<'tcx> {
101    from: Local,
102    to: Local,
103    tcx: TyCtxt<'tcx>,
104}
105
106impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor<'tcx> {
107    fn tcx(&self) -> TyCtxt<'tcx> {
108        self.tcx
109    }
110
111    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
112        if *local == self.from {
113            *local = self.to;
114        } else if *local == self.to {
115            *local = self.from;
116        }
117    }
118
119    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
120        match terminator.kind {
121            TerminatorKind::Return => {
122                // Do not replace the implicit `_0` access here, as that's not possible. The
123                // transform already handles `return` correctly.
124            }
125            _ => self.super_terminator(terminator, location),
126        }
127    }
128}
129
130struct SelfArgVisitor<'tcx> {
131    tcx: TyCtxt<'tcx>,
132    new_base: Place<'tcx>,
133}
134
135impl<'tcx> SelfArgVisitor<'tcx> {
136    fn new(tcx: TyCtxt<'tcx>, new_base: Place<'tcx>) -> Self {
137        Self { tcx, new_base }
138    }
139}
140
141impl<'tcx> MutVisitor<'tcx> for SelfArgVisitor<'tcx> {
142    fn tcx(&self) -> TyCtxt<'tcx> {
143        self.tcx
144    }
145
146    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
147        assert_ne!(*local, SELF_ARG);
148    }
149
150    fn visit_place(&mut self, place: &mut Place<'tcx>, _: PlaceContext, _: Location) {
151        if place.local == SELF_ARG {
152            replace_base(place, self.new_base, self.tcx);
153        }
154
155        for elem in place.projection.iter() {
156            if let PlaceElem::Index(local) = elem {
157                assert_ne!(local, SELF_ARG);
158            }
159        }
160    }
161}
162
163#[tracing::instrument(level = "trace", skip(tcx))]
164fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtxt<'tcx>) {
165    place.local = new_base.local;
166
167    let mut new_projection = new_base.projection.to_vec();
168    new_projection.append(&mut place.projection.to_vec());
169
170    place.projection = tcx.mk_place_elems(&new_projection);
171    tracing::trace!(?place);
172}
173
174const SELF_ARG: Local = Local::from_u32(1);
175const CTX_ARG: Local = Local::from_u32(2);
176
177/// A `yield` point in the coroutine.
178struct SuspensionPoint<'tcx> {
179    /// State discriminant used when suspending or resuming at this point.
180    state: usize,
181    /// The block to jump to after resumption.
182    resume: BasicBlock,
183    /// Where to move the resume argument after resumption.
184    resume_arg: Place<'tcx>,
185    /// Which block to jump to if the coroutine is dropped in this state.
186    drop: Option<BasicBlock>,
187    /// Set of locals that have live storage while at this suspension point.
188    storage_liveness: GrowableBitSet<Local>,
189}
190
191struct TransformVisitor<'tcx> {
192    tcx: TyCtxt<'tcx>,
193    coroutine_kind: hir::CoroutineKind,
194
195    // The type of the discriminant in the coroutine struct
196    discr_ty: Ty<'tcx>,
197
198    // Mapping from Local to (type of local, coroutine struct index)
199    remap: IndexVec<Local, Option<(Ty<'tcx>, VariantIdx, FieldIdx)>>,
200
201    // A map from a suspension point in a block to the locals which have live storage at that point
202    storage_liveness: IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
203
204    // A list of suspension points, generated during the transform
205    suspension_points: Vec<SuspensionPoint<'tcx>>,
206
207    // The set of locals that have no `StorageLive`/`StorageDead` annotations.
208    always_live_locals: DenseBitSet<Local>,
209
210    // New local we just create to hold the `CoroutineState` value.
211    new_ret_local: Local,
212
213    old_yield_ty: Ty<'tcx>,
214
215    old_ret_ty: Ty<'tcx>,
216}
217
218impl<'tcx> TransformVisitor<'tcx> {
219    fn insert_none_ret_block(&self, body: &mut Body<'tcx>) -> BasicBlock {
220        let block = body.basic_blocks.next_index();
221        let source_info = SourceInfo::outermost(body.span);
222
223        let none_value = match self.coroutine_kind {
224            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
225                span_bug!(body.span, "`Future`s are not fused inherently")
226            }
227            CoroutineKind::Coroutine(_) => span_bug!(body.span, "`Coroutine`s cannot be fused"),
228            // `gen` continues return `None`
229            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
230                let option_def_id = self.tcx.require_lang_item(LangItem::Option, body.span);
231                make_aggregate_adt(
232                    option_def_id,
233                    VariantIdx::ZERO,
234                    self.tcx.mk_args(&[self.old_yield_ty.into()]),
235                    IndexVec::new(),
236                )
237            }
238            // `async gen` continues to return `Poll::Ready(None)`
239            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
240                let ty::Adt(_poll_adt, args) = *self.old_yield_ty.kind() else { bug!() };
241                let ty::Adt(_option_adt, args) = *args.type_at(0).kind() else { bug!() };
242                let yield_ty = args.type_at(0);
243                Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
244                    span: source_info.span,
245                    const_: Const::Unevaluated(
246                        UnevaluatedConst::new(
247                            self.tcx.require_lang_item(LangItem::AsyncGenFinished, body.span),
248                            self.tcx.mk_args(&[yield_ty.into()]),
249                        ),
250                        self.old_yield_ty,
251                    ),
252                    user_ty: None,
253                })))
254            }
255        };
256
257        let statements = vec![Statement::new(
258            source_info,
259            StatementKind::Assign(Box::new((Place::return_place(), none_value))),
260        )];
261
262        body.basic_blocks_mut().push(BasicBlockData::new_stmts(
263            statements,
264            Some(Terminator { source_info, kind: TerminatorKind::Return }),
265            false,
266        ));
267
268        block
269    }
270
271    // Make a `CoroutineState` or `Poll` variant assignment.
272    //
273    // `core::ops::CoroutineState` only has single element tuple variants,
274    // so we can just write to the downcasted first field and then set the
275    // discriminant to the appropriate variant.
276    #[tracing::instrument(level = "trace", skip(self, statements))]
277    fn make_state(
278        &self,
279        val: Operand<'tcx>,
280        source_info: SourceInfo,
281        is_return: bool,
282        statements: &mut Vec<Statement<'tcx>>,
283    ) {
284        const ZERO: VariantIdx = VariantIdx::ZERO;
285        const ONE: VariantIdx = VariantIdx::from_usize(1);
286        let rvalue = match self.coroutine_kind {
287            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
288                let poll_def_id = self.tcx.require_lang_item(LangItem::Poll, source_info.span);
289                let args = self.tcx.mk_args(&[self.old_ret_ty.into()]);
290                let (variant_idx, operands) = if is_return {
291                    (ZERO, indexvec![val]) // Poll::Ready(val)
292                } else {
293                    (ONE, IndexVec::new()) // Poll::Pending
294                };
295                make_aggregate_adt(poll_def_id, variant_idx, args, operands)
296            }
297            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
298                let option_def_id = self.tcx.require_lang_item(LangItem::Option, source_info.span);
299                let args = self.tcx.mk_args(&[self.old_yield_ty.into()]);
300                let (variant_idx, operands) = if is_return {
301                    (ZERO, IndexVec::new()) // None
302                } else {
303                    (ONE, indexvec![val]) // Some(val)
304                };
305                make_aggregate_adt(option_def_id, variant_idx, args, operands)
306            }
307            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
308                if is_return {
309                    let ty::Adt(_poll_adt, args) = *self.old_yield_ty.kind() else { bug!() };
310                    let ty::Adt(_option_adt, args) = *args.type_at(0).kind() else { bug!() };
311                    let yield_ty = args.type_at(0);
312                    Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
313                        span: source_info.span,
314                        const_: Const::Unevaluated(
315                            UnevaluatedConst::new(
316                                self.tcx.require_lang_item(
317                                    LangItem::AsyncGenFinished,
318                                    source_info.span,
319                                ),
320                                self.tcx.mk_args(&[yield_ty.into()]),
321                            ),
322                            self.old_yield_ty,
323                        ),
324                        user_ty: None,
325                    })))
326                } else {
327                    Rvalue::Use(val)
328                }
329            }
330            CoroutineKind::Coroutine(_) => {
331                let coroutine_state_def_id =
332                    self.tcx.require_lang_item(LangItem::CoroutineState, source_info.span);
333                let args = self.tcx.mk_args(&[self.old_yield_ty.into(), self.old_ret_ty.into()]);
334                let variant_idx = if is_return {
335                    ONE // CoroutineState::Complete(val)
336                } else {
337                    ZERO // CoroutineState::Yielded(val)
338                };
339                make_aggregate_adt(coroutine_state_def_id, variant_idx, args, indexvec![val])
340            }
341        };
342
343        // Assign to `new_ret_local`, which will be replaced by `RETURN_PLACE` later.
344        statements.push(Statement::new(
345            source_info,
346            StatementKind::Assign(Box::new((self.new_ret_local.into(), rvalue))),
347        ));
348    }
349
350    // Create a Place referencing a coroutine struct field
351    #[tracing::instrument(level = "trace", skip(self), ret)]
352    fn make_field(&self, variant_index: VariantIdx, idx: FieldIdx, ty: Ty<'tcx>) -> Place<'tcx> {
353        let self_place = Place::from(SELF_ARG);
354        let base = self.tcx.mk_place_downcast_unnamed(self_place, variant_index);
355        let mut projection = base.projection.to_vec();
356        projection.push(ProjectionElem::Field(idx, ty));
357
358        Place { local: base.local, projection: self.tcx.mk_place_elems(&projection) }
359    }
360
361    // Create a statement which changes the discriminant
362    #[tracing::instrument(level = "trace", skip(self))]
363    fn set_discr(&self, state_disc: VariantIdx, source_info: SourceInfo) -> Statement<'tcx> {
364        let self_place = Place::from(SELF_ARG);
365        Statement::new(
366            source_info,
367            StatementKind::SetDiscriminant {
368                place: Box::new(self_place),
369                variant_index: state_disc,
370            },
371        )
372    }
373
374    // Create a statement which reads the discriminant into a temporary
375    #[tracing::instrument(level = "trace", skip(self, body))]
376    fn get_discr(&self, body: &mut Body<'tcx>) -> (Statement<'tcx>, Place<'tcx>) {
377        let temp_decl = LocalDecl::new(self.discr_ty, body.span);
378        let local_decls_len = body.local_decls.push(temp_decl);
379        let temp = Place::from(local_decls_len);
380
381        let self_place = Place::from(SELF_ARG);
382        let assign = Statement::new(
383            SourceInfo::outermost(body.span),
384            StatementKind::Assign(Box::new((temp, Rvalue::Discriminant(self_place)))),
385        );
386        (assign, temp)
387    }
388
389    /// Swaps all references of `old_local` and `new_local`.
390    #[tracing::instrument(level = "trace", skip(self, body))]
391    fn replace_local(&mut self, old_local: Local, new_local: Local, body: &mut Body<'tcx>) {
392        body.local_decls.swap(old_local, new_local);
393
394        let mut visitor = RenameLocalVisitor { from: old_local, to: new_local, tcx: self.tcx };
395        visitor.visit_body(body);
396        for suspension in &mut self.suspension_points {
397            let ctxt = PlaceContext::MutatingUse(MutatingUseContext::Yield);
398            let location = Location { block: START_BLOCK, statement_index: 0 };
399            visitor.visit_place(&mut suspension.resume_arg, ctxt, location);
400        }
401    }
402}
403
404impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> {
405    fn tcx(&self) -> TyCtxt<'tcx> {
406        self.tcx
407    }
408
409    #[tracing::instrument(level = "trace", skip(self), ret)]
410    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _location: Location) {
411        assert!(!self.remap.contains(*local));
412    }
413
414    #[tracing::instrument(level = "trace", skip(self), ret)]
415    fn visit_place(&mut self, place: &mut Place<'tcx>, _: PlaceContext, _location: Location) {
416        // Replace an Local in the remap with a coroutine struct access
417        if let Some(&Some((ty, variant_index, idx))) = self.remap.get(place.local) {
418            replace_base(place, self.make_field(variant_index, idx, ty), self.tcx);
419        }
420    }
421
422    #[tracing::instrument(level = "trace", skip(self, stmt), ret)]
423    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, location: Location) {
424        // Remove StorageLive and StorageDead statements for remapped locals
425        if let StatementKind::StorageLive(l) | StatementKind::StorageDead(l) = stmt.kind
426            && self.remap.contains(l)
427        {
428            stmt.make_nop(true);
429        }
430        self.super_statement(stmt, location);
431    }
432
433    #[tracing::instrument(level = "trace", skip(self, term), ret)]
434    fn visit_terminator(&mut self, term: &mut Terminator<'tcx>, location: Location) {
435        if let TerminatorKind::Return = term.kind {
436            // `visit_basic_block_data` introduces `Return` terminators which read `RETURN_PLACE`.
437            // But this `RETURN_PLACE` is already remapped, so we should not touch it again.
438            return;
439        }
440        self.super_terminator(term, location);
441    }
442
443    #[tracing::instrument(level = "trace", skip(self, data), ret)]
444    fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
445        match data.terminator().kind {
446            TerminatorKind::Return => {
447                let source_info = data.terminator().source_info;
448                // We must assign the value first in case it gets declared dead below
449                self.make_state(
450                    Operand::Move(Place::return_place()),
451                    source_info,
452                    true,
453                    &mut data.statements,
454                );
455                // Return state.
456                let state = VariantIdx::new(CoroutineArgs::RETURNED);
457                data.statements.push(self.set_discr(state, source_info));
458                data.terminator_mut().kind = TerminatorKind::Return;
459            }
460            TerminatorKind::Yield { ref value, resume, mut resume_arg, drop } => {
461                let source_info = data.terminator().source_info;
462                // We must assign the value first in case it gets declared dead below
463                self.make_state(value.clone(), source_info, false, &mut data.statements);
464                // Yield state.
465                let state = CoroutineArgs::RESERVED_VARIANTS + self.suspension_points.len();
466
467                // The resume arg target location might itself be remapped if its base local is
468                // live across a yield.
469                if let Some(&Some((ty, variant, idx))) = self.remap.get(resume_arg.local) {
470                    replace_base(&mut resume_arg, self.make_field(variant, idx, ty), self.tcx);
471                }
472
473                let storage_liveness: GrowableBitSet<Local> =
474                    self.storage_liveness[block].clone().unwrap().into();
475
476                for i in 0..self.always_live_locals.domain_size() {
477                    let l = Local::new(i);
478                    let needs_storage_dead = storage_liveness.contains(l)
479                        && !self.remap.contains(l)
480                        && !self.always_live_locals.contains(l);
481                    if needs_storage_dead {
482                        data.statements
483                            .push(Statement::new(source_info, StatementKind::StorageDead(l)));
484                    }
485                }
486
487                self.suspension_points.push(SuspensionPoint {
488                    state,
489                    resume,
490                    resume_arg,
491                    drop,
492                    storage_liveness,
493                });
494
495                let state = VariantIdx::new(state);
496                data.statements.push(self.set_discr(state, source_info));
497                data.terminator_mut().kind = TerminatorKind::Return;
498            }
499            _ => {}
500        }
501
502        self.super_basic_block_data(block, data);
503    }
504}
505
506fn make_aggregate_adt<'tcx>(
507    def_id: DefId,
508    variant_idx: VariantIdx,
509    args: GenericArgsRef<'tcx>,
510    operands: IndexVec<FieldIdx, Operand<'tcx>>,
511) -> Rvalue<'tcx> {
512    Rvalue::Aggregate(Box::new(AggregateKind::Adt(def_id, variant_idx, args, None, None)), operands)
513}
514
515#[tracing::instrument(level = "trace", skip(tcx, body))]
516fn make_coroutine_state_argument_indirect<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
517    let coroutine_ty = body.local_decls[SELF_ARG].ty;
518
519    let ref_coroutine_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty);
520
521    // Replace the by value coroutine argument
522    body.local_decls[SELF_ARG].ty = ref_coroutine_ty;
523
524    // Add a deref to accesses of the coroutine state
525    SelfArgVisitor::new(tcx, tcx.mk_place_deref(SELF_ARG.into())).visit_body(body);
526}
527
528#[tracing::instrument(level = "trace", skip(tcx, body))]
529fn make_coroutine_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
530    let coroutine_ty = body.local_decls[SELF_ARG].ty;
531
532    let ref_coroutine_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty);
533
534    let pin_did = tcx.require_lang_item(LangItem::Pin, body.span);
535    let pin_adt_ref = tcx.adt_def(pin_did);
536    let args = tcx.mk_args(&[ref_coroutine_ty.into()]);
537    let pin_ref_coroutine_ty = Ty::new_adt(tcx, pin_adt_ref, args);
538
539    // Replace the by ref coroutine argument
540    body.local_decls[SELF_ARG].ty = pin_ref_coroutine_ty;
541
542    let unpinned_local = body.local_decls.push(LocalDecl::new(ref_coroutine_ty, body.span));
543
544    // Add the Pin field access to accesses of the coroutine state
545    SelfArgVisitor::new(tcx, tcx.mk_place_deref(unpinned_local.into())).visit_body(body);
546
547    let source_info = SourceInfo::outermost(body.span);
548    let pin_field = tcx.mk_place_field(SELF_ARG.into(), FieldIdx::ZERO, ref_coroutine_ty);
549
550    let statements = &mut body.basic_blocks.as_mut_preserves_cfg()[START_BLOCK].statements;
551    // Miri requires retags to be the very first thing in the body.
552    // We insert this assignment just after.
553    let insert_point = statements
554        .iter()
555        .position(|stmt| !matches!(stmt.kind, StatementKind::Retag(..)))
556        .unwrap_or(statements.len());
557    statements.insert(
558        insert_point,
559        Statement::new(
560            source_info,
561            StatementKind::Assign(Box::new((
562                unpinned_local.into(),
563                Rvalue::Use(Operand::Copy(pin_field)),
564            ))),
565        ),
566    );
567}
568
569/// Transforms the `body` of the coroutine applying the following transforms:
570///
571/// - Eliminates all the `get_context` calls that async lowering created.
572/// - Replace all `Local` `ResumeTy` types with `&mut Context<'_>` (`context_mut_ref`).
573///
574/// The `Local`s that have their types replaced are:
575/// - The `resume` argument itself.
576/// - The argument to `get_context`.
577/// - The yielded value of a `yield`.
578///
579/// The `ResumeTy` hides a `&mut Context<'_>` behind an unsafe raw pointer, and the
580/// `get_context` function is being used to convert that back to a `&mut Context<'_>`.
581///
582/// Ideally the async lowering would not use the `ResumeTy`/`get_context` indirection,
583/// but rather directly use `&mut Context<'_>`, however that would currently
584/// lead to higher-kinded lifetime errors.
585/// See <https://github.com/rust-lang/rust/issues/105501>.
586///
587/// The async lowering step and the type / lifetime inference / checking are
588/// still using the `ResumeTy` indirection for the time being, and that indirection
589/// is removed here. After this transform, the coroutine body only knows about `&mut Context<'_>`.
590#[tracing::instrument(level = "trace", skip(tcx, body), ret)]
591fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> Ty<'tcx> {
592    let context_mut_ref = Ty::new_task_context(tcx);
593
594    // replace the type of the `resume` argument
595    replace_resume_ty_local(tcx, body, CTX_ARG, context_mut_ref);
596
597    let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, body.span);
598
599    for bb in body.basic_blocks.indices() {
600        let bb_data = &body[bb];
601        if bb_data.is_cleanup {
602            continue;
603        }
604
605        match &bb_data.terminator().kind {
606            TerminatorKind::Call { func, .. } => {
607                let func_ty = func.ty(body, tcx);
608                if let ty::FnDef(def_id, _) = *func_ty.kind()
609                    && def_id == get_context_def_id
610                {
611                    let local = eliminate_get_context_call(&mut body[bb]);
612                    replace_resume_ty_local(tcx, body, local, context_mut_ref);
613                }
614            }
615            TerminatorKind::Yield { resume_arg, .. } => {
616                replace_resume_ty_local(tcx, body, resume_arg.local, context_mut_ref);
617            }
618            _ => {}
619        }
620    }
621    context_mut_ref
622}
623
624fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local {
625    let terminator = bb_data.terminator.take().unwrap();
626    let TerminatorKind::Call { args, destination, target, .. } = terminator.kind else {
627        bug!();
628    };
629    let [arg] = *Box::try_from(args).unwrap();
630    let local = arg.node.place().unwrap().local;
631
632    let arg = Rvalue::Use(arg.node);
633    let assign =
634        Statement::new(terminator.source_info, StatementKind::Assign(Box::new((destination, arg))));
635    bb_data.statements.push(assign);
636    bb_data.terminator = Some(Terminator {
637        source_info: terminator.source_info,
638        kind: TerminatorKind::Goto { target: target.unwrap() },
639    });
640    local
641}
642
643#[cfg_attr(not(debug_assertions), allow(unused))]
644#[tracing::instrument(level = "trace", skip(tcx, body), ret)]
645fn replace_resume_ty_local<'tcx>(
646    tcx: TyCtxt<'tcx>,
647    body: &mut Body<'tcx>,
648    local: Local,
649    context_mut_ref: Ty<'tcx>,
650) {
651    let local_ty = std::mem::replace(&mut body.local_decls[local].ty, context_mut_ref);
652    // We have to replace the `ResumeTy` that is used for type and borrow checking
653    // with `&mut Context<'_>` in MIR.
654    #[cfg(debug_assertions)]
655    {
656        if let ty::Adt(resume_ty_adt, _) = local_ty.kind() {
657            let expected_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, body.span));
658            assert_eq!(*resume_ty_adt, expected_adt);
659        } else {
660            panic!("expected `ResumeTy`, found `{:?}`", local_ty);
661        };
662    }
663}
664
665/// Transforms the `body` of the coroutine applying the following transform:
666///
667/// - Remove the `resume` argument.
668///
669/// Ideally the async lowering would not add the `resume` argument.
670///
671/// The async lowering step and the type / lifetime inference / checking are
672/// still using the `resume` argument for the time being. After this transform,
673/// the coroutine body doesn't have the `resume` argument.
674fn transform_gen_context<'tcx>(body: &mut Body<'tcx>) {
675    // This leaves the local representing the `resume` argument in place,
676    // but turns it into a regular local variable. This is cheaper than
677    // adjusting all local references in the body after removing it.
678    body.arg_count = 1;
679}
680
681struct LivenessInfo {
682    /// Which locals are live across any suspension point.
683    saved_locals: CoroutineSavedLocals,
684
685    /// The set of saved locals live at each suspension point.
686    live_locals_at_suspension_points: Vec<DenseBitSet<CoroutineSavedLocal>>,
687
688    /// Parallel vec to the above with SourceInfo for each yield terminator.
689    source_info_at_suspension_points: Vec<SourceInfo>,
690
691    /// For every saved local, the set of other saved locals that are
692    /// storage-live at the same time as this local. We cannot overlap locals in
693    /// the layout which have conflicting storage.
694    storage_conflicts: BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal>,
695
696    /// For every suspending block, the locals which are storage-live across
697    /// that suspension point.
698    storage_liveness: IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
699}
700
701/// Computes which locals have to be stored in the state-machine for the
702/// given coroutine.
703///
704/// The basic idea is as follows:
705/// - a local is live until we encounter a `StorageDead` statement. In
706///   case none exist, the local is considered to be always live.
707/// - a local has to be stored if it is either directly used after the
708///   the suspend point, or if it is live and has been previously borrowed.
709#[tracing::instrument(level = "trace", skip(tcx, body))]
710fn locals_live_across_suspend_points<'tcx>(
711    tcx: TyCtxt<'tcx>,
712    body: &Body<'tcx>,
713    always_live_locals: &DenseBitSet<Local>,
714    movable: bool,
715) -> LivenessInfo {
716    // Calculate when MIR locals have live storage. This gives us an upper bound of their
717    // lifetimes.
718    let mut storage_live = MaybeStorageLive::new(std::borrow::Cow::Borrowed(always_live_locals))
719        .iterate_to_fixpoint(tcx, body, None)
720        .into_results_cursor(body);
721
722    // Calculate the MIR locals that have been previously borrowed (even if they are still active).
723    let borrowed_locals = MaybeBorrowedLocals.iterate_to_fixpoint(tcx, body, Some("coroutine"));
724    let borrowed_locals_cursor1 = ResultsCursor::new_borrowing(body, &borrowed_locals);
725    let mut borrowed_locals_cursor2 = ResultsCursor::new_borrowing(body, &borrowed_locals);
726
727    // Calculate the MIR locals that we need to keep storage around for.
728    let requires_storage =
729        MaybeRequiresStorage::new(borrowed_locals_cursor1).iterate_to_fixpoint(tcx, body, None);
730    let mut requires_storage_cursor = ResultsCursor::new_borrowing(body, &requires_storage);
731
732    // Calculate the liveness of MIR locals ignoring borrows.
733    let mut liveness =
734        MaybeLiveLocals.iterate_to_fixpoint(tcx, body, Some("coroutine")).into_results_cursor(body);
735
736    let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks);
737    let mut live_locals_at_suspension_points = Vec::new();
738    let mut source_info_at_suspension_points = Vec::new();
739    let mut live_locals_at_any_suspension_point = DenseBitSet::new_empty(body.local_decls.len());
740
741    for (block, data) in body.basic_blocks.iter_enumerated() {
742        let TerminatorKind::Yield { .. } = data.terminator().kind else { continue };
743
744        let loc = Location { block, statement_index: data.statements.len() };
745
746        liveness.seek_to_block_end(block);
747        let mut live_locals = liveness.get().clone();
748
749        if !movable {
750            // The `liveness` variable contains the liveness of MIR locals ignoring borrows.
751            // This is correct for movable coroutines since borrows cannot live across
752            // suspension points. However for immovable coroutines we need to account for
753            // borrows, so we conservatively assume that all borrowed locals are live until
754            // we find a StorageDead statement referencing the locals.
755            // To do this we just union our `liveness` result with `borrowed_locals`, which
756            // contains all the locals which has been borrowed before this suspension point.
757            // If a borrow is converted to a raw reference, we must also assume that it lives
758            // forever. Note that the final liveness is still bounded by the storage liveness
759            // of the local, which happens using the `intersect` operation below.
760            borrowed_locals_cursor2.seek_before_primary_effect(loc);
761            live_locals.union(borrowed_locals_cursor2.get());
762        }
763
764        // Store the storage liveness for later use so we can restore the state
765        // after a suspension point
766        storage_live.seek_before_primary_effect(loc);
767        storage_liveness_map[block] = Some(storage_live.get().clone());
768
769        // Locals live are live at this point only if they are used across
770        // suspension points (the `liveness` variable)
771        // and their storage is required (the `storage_required` variable)
772        requires_storage_cursor.seek_before_primary_effect(loc);
773        live_locals.intersect(requires_storage_cursor.get());
774
775        // The coroutine argument is ignored.
776        live_locals.remove(SELF_ARG);
777
778        debug!(?loc, ?live_locals);
779
780        // Add the locals live at this suspension point to the set of locals which live across
781        // any suspension points
782        live_locals_at_any_suspension_point.union(&live_locals);
783
784        live_locals_at_suspension_points.push(live_locals);
785        source_info_at_suspension_points.push(data.terminator().source_info);
786    }
787
788    debug!(?live_locals_at_any_suspension_point);
789    let saved_locals = CoroutineSavedLocals(live_locals_at_any_suspension_point);
790
791    // Renumber our liveness_map bitsets to include only the locals we are
792    // saving.
793    let live_locals_at_suspension_points = live_locals_at_suspension_points
794        .iter()
795        .map(|live_here| saved_locals.renumber_bitset(live_here))
796        .collect();
797
798    let storage_conflicts = compute_storage_conflicts(
799        body,
800        &saved_locals,
801        always_live_locals.clone(),
802        &requires_storage,
803    );
804
805    LivenessInfo {
806        saved_locals,
807        live_locals_at_suspension_points,
808        source_info_at_suspension_points,
809        storage_conflicts,
810        storage_liveness: storage_liveness_map,
811    }
812}
813
814/// The set of `Local`s that must be saved across yield points.
815///
816/// `CoroutineSavedLocal` is indexed in terms of the elements in this set;
817/// i.e. `CoroutineSavedLocal::new(1)` corresponds to the second local
818/// included in this set.
819struct CoroutineSavedLocals(DenseBitSet<Local>);
820
821impl CoroutineSavedLocals {
822    /// Returns an iterator over each `CoroutineSavedLocal` along with the `Local` it corresponds
823    /// to.
824    fn iter_enumerated(&self) -> impl '_ + Iterator<Item = (CoroutineSavedLocal, Local)> {
825        self.iter().enumerate().map(|(i, l)| (CoroutineSavedLocal::from(i), l))
826    }
827
828    /// Transforms a `DenseBitSet<Local>` that contains only locals saved across yield points to the
829    /// equivalent `DenseBitSet<CoroutineSavedLocal>`.
830    fn renumber_bitset(&self, input: &DenseBitSet<Local>) -> DenseBitSet<CoroutineSavedLocal> {
831        assert!(self.superset(input), "{:?} not a superset of {:?}", self.0, input);
832        let mut out = DenseBitSet::new_empty(self.count());
833        for (saved_local, local) in self.iter_enumerated() {
834            if input.contains(local) {
835                out.insert(saved_local);
836            }
837        }
838        out
839    }
840
841    fn get(&self, local: Local) -> Option<CoroutineSavedLocal> {
842        if !self.contains(local) {
843            return None;
844        }
845
846        let idx = self.iter().take_while(|&l| l < local).count();
847        Some(CoroutineSavedLocal::new(idx))
848    }
849}
850
851impl ops::Deref for CoroutineSavedLocals {
852    type Target = DenseBitSet<Local>;
853
854    fn deref(&self) -> &Self::Target {
855        &self.0
856    }
857}
858
859/// For every saved local, looks for which locals are StorageLive at the same
860/// time. Generates a bitset for every local of all the other locals that may be
861/// StorageLive simultaneously with that local. This is used in the layout
862/// computation; see `CoroutineLayout` for more.
863fn compute_storage_conflicts<'mir, 'tcx>(
864    body: &'mir Body<'tcx>,
865    saved_locals: &'mir CoroutineSavedLocals,
866    always_live_locals: DenseBitSet<Local>,
867    results: &Results<'tcx, MaybeRequiresStorage<'mir, 'tcx>>,
868) -> BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal> {
869    assert_eq!(body.local_decls.len(), saved_locals.domain_size());
870
871    debug!("compute_storage_conflicts({:?})", body.span);
872    debug!("always_live = {:?}", always_live_locals);
873
874    // Locals that are always live or ones that need to be stored across
875    // suspension points are not eligible for overlap.
876    let mut ineligible_locals = always_live_locals;
877    ineligible_locals.intersect(&**saved_locals);
878
879    // Compute the storage conflicts for all eligible locals.
880    let mut visitor = StorageConflictVisitor {
881        body,
882        saved_locals,
883        local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()),
884        eligible_storage_live: DenseBitSet::new_empty(body.local_decls.len()),
885    };
886
887    visit_reachable_results(body, results, &mut visitor);
888
889    let local_conflicts = visitor.local_conflicts;
890
891    // Compress the matrix using only stored locals (Local -> CoroutineSavedLocal).
892    //
893    // NOTE: Today we store a full conflict bitset for every local. Technically
894    // this is twice as many bits as we need, since the relation is symmetric.
895    // However, in practice these bitsets are not usually large. The layout code
896    // also needs to keep track of how many conflicts each local has, so it's
897    // simpler to keep it this way for now.
898    let mut storage_conflicts = BitMatrix::new(saved_locals.count(), saved_locals.count());
899    for (saved_local_a, local_a) in saved_locals.iter_enumerated() {
900        if ineligible_locals.contains(local_a) {
901            // Conflicts with everything.
902            storage_conflicts.insert_all_into_row(saved_local_a);
903        } else {
904            // Keep overlap information only for stored locals.
905            for (saved_local_b, local_b) in saved_locals.iter_enumerated() {
906                if local_conflicts.contains(local_a, local_b) {
907                    storage_conflicts.insert(saved_local_a, saved_local_b);
908                }
909            }
910        }
911    }
912    storage_conflicts
913}
914
915struct StorageConflictVisitor<'a, 'tcx> {
916    body: &'a Body<'tcx>,
917    saved_locals: &'a CoroutineSavedLocals,
918    // FIXME(tmandry): Consider using sparse bitsets here once we have good
919    // benchmarks for coroutines.
920    local_conflicts: BitMatrix<Local, Local>,
921    // We keep this bitset as a buffer to avoid reallocating memory.
922    eligible_storage_live: DenseBitSet<Local>,
923}
924
925impl<'a, 'tcx> ResultsVisitor<'tcx, MaybeRequiresStorage<'a, 'tcx>>
926    for StorageConflictVisitor<'a, 'tcx>
927{
928    fn visit_after_early_statement_effect(
929        &mut self,
930        _analysis: &MaybeRequiresStorage<'a, 'tcx>,
931        state: &DenseBitSet<Local>,
932        _statement: &Statement<'tcx>,
933        loc: Location,
934    ) {
935        self.apply_state(state, loc);
936    }
937
938    fn visit_after_early_terminator_effect(
939        &mut self,
940        _analysis: &MaybeRequiresStorage<'a, 'tcx>,
941        state: &DenseBitSet<Local>,
942        _terminator: &Terminator<'tcx>,
943        loc: Location,
944    ) {
945        self.apply_state(state, loc);
946    }
947}
948
949impl StorageConflictVisitor<'_, '_> {
950    fn apply_state(&mut self, state: &DenseBitSet<Local>, loc: Location) {
951        // Ignore unreachable blocks.
952        if let TerminatorKind::Unreachable = self.body.basic_blocks[loc.block].terminator().kind {
953            return;
954        }
955
956        self.eligible_storage_live.clone_from(state);
957        self.eligible_storage_live.intersect(&**self.saved_locals);
958
959        for local in self.eligible_storage_live.iter() {
960            self.local_conflicts.union_row_with(&self.eligible_storage_live, local);
961        }
962
963        if self.eligible_storage_live.count() > 1 {
964            trace!("at {:?}, eligible_storage_live={:?}", loc, self.eligible_storage_live);
965        }
966    }
967}
968
969#[tracing::instrument(level = "trace", skip(liveness, body))]
970fn compute_layout<'tcx>(
971    liveness: LivenessInfo,
972    body: &Body<'tcx>,
973) -> (
974    IndexVec<Local, Option<(Ty<'tcx>, VariantIdx, FieldIdx)>>,
975    CoroutineLayout<'tcx>,
976    IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
977) {
978    let LivenessInfo {
979        saved_locals,
980        live_locals_at_suspension_points,
981        source_info_at_suspension_points,
982        storage_conflicts,
983        storage_liveness,
984    } = liveness;
985
986    // Gather live local types and their indices.
987    let mut locals = IndexVec::<CoroutineSavedLocal, _>::with_capacity(saved_locals.domain_size());
988    let mut tys = IndexVec::<CoroutineSavedLocal, _>::with_capacity(saved_locals.domain_size());
989    for (saved_local, local) in saved_locals.iter_enumerated() {
990        debug!("coroutine saved local {:?} => {:?}", saved_local, local);
991
992        locals.push(local);
993        let decl = &body.local_decls[local];
994        debug!(?decl);
995
996        // Do not `unwrap_crate_local` here, as post-borrowck cleanup may have already cleared
997        // the information. This is alright, since `ignore_for_traits` is only relevant when
998        // this code runs on pre-cleanup MIR, and `ignore_for_traits = false` is the safer
999        // default.
1000        let ignore_for_traits = match decl.local_info {
1001            // Do not include raw pointers created from accessing `static` items, as those could
1002            // well be re-created by another access to the same static.
1003            ClearCrossCrate::Set(box LocalInfo::StaticRef { is_thread_local, .. }) => {
1004                !is_thread_local
1005            }
1006            // Fake borrows are only read by fake reads, so do not have any reality in
1007            // post-analysis MIR.
1008            ClearCrossCrate::Set(box LocalInfo::FakeBorrow) => true,
1009            _ => false,
1010        };
1011        let decl =
1012            CoroutineSavedTy { ty: decl.ty, source_info: decl.source_info, ignore_for_traits };
1013        debug!(?decl);
1014
1015        tys.push(decl);
1016    }
1017
1018    // Leave empty variants for the UNRESUMED, RETURNED, and POISONED states.
1019    // In debuginfo, these will correspond to the beginning (UNRESUMED) or end
1020    // (RETURNED, POISONED) of the function.
1021    let body_span = body.source_scopes[OUTERMOST_SOURCE_SCOPE].span;
1022    let mut variant_source_info: IndexVec<VariantIdx, SourceInfo> = IndexVec::with_capacity(
1023        CoroutineArgs::RESERVED_VARIANTS + live_locals_at_suspension_points.len(),
1024    );
1025    variant_source_info.extend([
1026        SourceInfo::outermost(body_span.shrink_to_lo()),
1027        SourceInfo::outermost(body_span.shrink_to_hi()),
1028        SourceInfo::outermost(body_span.shrink_to_hi()),
1029    ]);
1030
1031    // Build the coroutine variant field list.
1032    // Create a map from local indices to coroutine struct indices.
1033    let mut variant_fields: IndexVec<VariantIdx, _> = IndexVec::from_elem_n(
1034        IndexVec::new(),
1035        CoroutineArgs::RESERVED_VARIANTS + live_locals_at_suspension_points.len(),
1036    );
1037    let mut remap = IndexVec::from_elem_n(None, saved_locals.domain_size());
1038    for (live_locals, &source_info_at_suspension_point, (variant_index, fields)) in izip!(
1039        &live_locals_at_suspension_points,
1040        &source_info_at_suspension_points,
1041        variant_fields.iter_enumerated_mut().skip(CoroutineArgs::RESERVED_VARIANTS)
1042    ) {
1043        *fields = live_locals.iter().collect();
1044        for (idx, &saved_local) in fields.iter_enumerated() {
1045            // Note that if a field is included in multiple variants, we will
1046            // just use the first one here. That's fine; fields do not move
1047            // around inside coroutines, so it doesn't matter which variant
1048            // index we access them by.
1049            remap[locals[saved_local]] = Some((tys[saved_local].ty, variant_index, idx));
1050        }
1051        variant_source_info.push(source_info_at_suspension_point);
1052    }
1053    debug!(?variant_fields);
1054    debug!(?storage_conflicts);
1055
1056    let mut field_names = IndexVec::from_elem(None, &tys);
1057    for var in &body.var_debug_info {
1058        let VarDebugInfoContents::Place(place) = &var.value else { continue };
1059        let Some(local) = place.as_local() else { continue };
1060        let Some(&Some((_, variant, field))) = remap.get(local) else {
1061            continue;
1062        };
1063
1064        let saved_local = variant_fields[variant][field];
1065        field_names.get_or_insert_with(saved_local, || var.name);
1066    }
1067
1068    let layout = CoroutineLayout {
1069        field_tys: tys,
1070        field_names,
1071        variant_fields,
1072        variant_source_info,
1073        storage_conflicts,
1074    };
1075    debug!(?remap);
1076    debug!(?layout);
1077    debug!(?storage_liveness);
1078
1079    (remap, layout, storage_liveness)
1080}
1081
1082/// Replaces the entry point of `body` with a block that switches on the coroutine discriminant and
1083/// dispatches to blocks according to `cases`.
1084///
1085/// After this function, the former entry point of the function will be bb1.
1086fn insert_switch<'tcx>(
1087    body: &mut Body<'tcx>,
1088    cases: Vec<(usize, BasicBlock)>,
1089    transform: &TransformVisitor<'tcx>,
1090    default_block: BasicBlock,
1091) {
1092    let (assign, discr) = transform.get_discr(body);
1093    let switch_targets =
1094        SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block);
1095    let switch = TerminatorKind::SwitchInt { discr: Operand::Move(discr), targets: switch_targets };
1096
1097    let source_info = SourceInfo::outermost(body.span);
1098    body.basic_blocks_mut().raw.insert(
1099        0,
1100        BasicBlockData::new_stmts(
1101            vec![assign],
1102            Some(Terminator { source_info, kind: switch }),
1103            false,
1104        ),
1105    );
1106
1107    for b in body.basic_blocks_mut().iter_mut() {
1108        b.terminator_mut().successors_mut(|target| *target += 1);
1109    }
1110}
1111
1112fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock {
1113    let source_info = SourceInfo::outermost(body.span);
1114    body.basic_blocks_mut().push(BasicBlockData::new(Some(Terminator { source_info, kind }), false))
1115}
1116
1117fn return_poll_ready_assign<'tcx>(tcx: TyCtxt<'tcx>, source_info: SourceInfo) -> Statement<'tcx> {
1118    // Poll::Ready(())
1119    let poll_def_id = tcx.require_lang_item(LangItem::Poll, source_info.span);
1120    let args = tcx.mk_args(&[tcx.types.unit.into()]);
1121    let val = Operand::Constant(Box::new(ConstOperand {
1122        span: source_info.span,
1123        user_ty: None,
1124        const_: Const::zero_sized(tcx.types.unit),
1125    }));
1126    let ready_val = Rvalue::Aggregate(
1127        Box::new(AggregateKind::Adt(poll_def_id, VariantIdx::from_usize(0), args, None, None)),
1128        indexvec![val],
1129    );
1130    Statement::new(source_info, StatementKind::Assign(Box::new((Place::return_place(), ready_val))))
1131}
1132
1133fn insert_poll_ready_block<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> BasicBlock {
1134    let source_info = SourceInfo::outermost(body.span);
1135    body.basic_blocks_mut().push(BasicBlockData::new_stmts(
1136        [return_poll_ready_assign(tcx, source_info)].to_vec(),
1137        Some(Terminator { source_info, kind: TerminatorKind::Return }),
1138        false,
1139    ))
1140}
1141
1142fn insert_panic_block<'tcx>(
1143    tcx: TyCtxt<'tcx>,
1144    body: &mut Body<'tcx>,
1145    message: AssertMessage<'tcx>,
1146) -> BasicBlock {
1147    let assert_block = body.basic_blocks.next_index();
1148    let kind = TerminatorKind::Assert {
1149        cond: Operand::Constant(Box::new(ConstOperand {
1150            span: body.span,
1151            user_ty: None,
1152            const_: Const::from_bool(tcx, false),
1153        })),
1154        expected: true,
1155        msg: Box::new(message),
1156        target: assert_block,
1157        unwind: UnwindAction::Continue,
1158    };
1159
1160    insert_term_block(body, kind)
1161}
1162
1163fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> bool {
1164    // Returning from a function with an uninhabited return type is undefined behavior.
1165    if body.return_ty().is_privately_uninhabited(tcx, typing_env) {
1166        return false;
1167    }
1168
1169    // If there's a return terminator the function may return.
1170    body.basic_blocks.iter().any(|block| matches!(block.terminator().kind, TerminatorKind::Return))
1171    // Otherwise the function can't return.
1172}
1173
1174fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
1175    // Nothing can unwind when landing pads are off.
1176    if !tcx.sess.panic_strategy().unwinds() {
1177        return false;
1178    }
1179
1180    // Unwinds can only start at certain terminators.
1181    for block in body.basic_blocks.iter() {
1182        match block.terminator().kind {
1183            // These never unwind.
1184            TerminatorKind::Goto { .. }
1185            | TerminatorKind::SwitchInt { .. }
1186            | TerminatorKind::UnwindTerminate(_)
1187            | TerminatorKind::Return
1188            | TerminatorKind::Unreachable
1189            | TerminatorKind::CoroutineDrop
1190            | TerminatorKind::FalseEdge { .. }
1191            | TerminatorKind::FalseUnwind { .. } => {}
1192
1193            // Resume will *continue* unwinding, but if there's no other unwinding terminator it
1194            // will never be reached.
1195            TerminatorKind::UnwindResume => {}
1196
1197            TerminatorKind::Yield { .. } => {
1198                unreachable!("`can_unwind` called before coroutine transform")
1199            }
1200
1201            // These may unwind.
1202            TerminatorKind::Drop { .. }
1203            | TerminatorKind::Call { .. }
1204            | TerminatorKind::InlineAsm { .. }
1205            | TerminatorKind::Assert { .. } => return true,
1206
1207            TerminatorKind::TailCall { .. } => {
1208                unreachable!("tail calls can't be present in generators")
1209            }
1210        }
1211    }
1212
1213    // If we didn't find an unwinding terminator, the function cannot unwind.
1214    false
1215}
1216
1217// Poison the coroutine when it unwinds
1218fn generate_poison_block_and_redirect_unwinds_there<'tcx>(
1219    transform: &TransformVisitor<'tcx>,
1220    body: &mut Body<'tcx>,
1221) {
1222    let source_info = SourceInfo::outermost(body.span);
1223    let poison_block = body.basic_blocks_mut().push(BasicBlockData::new_stmts(
1224        vec![transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info)],
1225        Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }),
1226        true,
1227    ));
1228
1229    for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() {
1230        let source_info = block.terminator().source_info;
1231
1232        if let TerminatorKind::UnwindResume = block.terminator().kind {
1233            // An existing `Resume` terminator is redirected to jump to our dedicated
1234            // "poisoning block" above.
1235            if idx != poison_block {
1236                *block.terminator_mut() =
1237                    Terminator { source_info, kind: TerminatorKind::Goto { target: poison_block } };
1238            }
1239        } else if !block.is_cleanup
1240            // Any terminators that *can* unwind but don't have an unwind target set are also
1241            // pointed at our poisoning block (unless they're part of the cleanup path).
1242            && let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut()
1243        {
1244            *unwind = UnwindAction::Cleanup(poison_block);
1245        }
1246    }
1247}
1248
1249#[tracing::instrument(level = "trace", skip(tcx, transform, body))]
1250fn create_coroutine_resume_function<'tcx>(
1251    tcx: TyCtxt<'tcx>,
1252    transform: TransformVisitor<'tcx>,
1253    body: &mut Body<'tcx>,
1254    can_return: bool,
1255    can_unwind: bool,
1256) {
1257    // Poison the coroutine when it unwinds
1258    if can_unwind {
1259        generate_poison_block_and_redirect_unwinds_there(&transform, body);
1260    }
1261
1262    let mut cases = create_cases(body, &transform, Operation::Resume);
1263
1264    use rustc_middle::mir::AssertKind::{ResumedAfterPanic, ResumedAfterReturn};
1265
1266    // Jump to the entry point on the unresumed
1267    cases.insert(0, (CoroutineArgs::UNRESUMED, START_BLOCK));
1268
1269    // Panic when resumed on the returned or poisoned state
1270    if can_unwind {
1271        cases.insert(
1272            1,
1273            (
1274                CoroutineArgs::POISONED,
1275                insert_panic_block(tcx, body, ResumedAfterPanic(transform.coroutine_kind)),
1276            ),
1277        );
1278    }
1279
1280    if can_return {
1281        let block = match transform.coroutine_kind {
1282            CoroutineKind::Desugared(CoroutineDesugaring::Async, _)
1283            | CoroutineKind::Coroutine(_) => {
1284                // For `async_drop_in_place<T>::{closure}` we just keep return Poll::Ready,
1285                // because async drop of such coroutine keeps polling original coroutine
1286                if tcx.is_async_drop_in_place_coroutine(body.source.def_id()) {
1287                    insert_poll_ready_block(tcx, body)
1288                } else {
1289                    insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind))
1290                }
1291            }
1292            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)
1293            | CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
1294                transform.insert_none_ret_block(body)
1295            }
1296        };
1297        cases.insert(1, (CoroutineArgs::RETURNED, block));
1298    }
1299
1300    let default_block = insert_term_block(body, TerminatorKind::Unreachable);
1301    insert_switch(body, cases, &transform, default_block);
1302
1303    match transform.coroutine_kind {
1304        CoroutineKind::Coroutine(_)
1305        | CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) =>
1306        {
1307            make_coroutine_state_argument_pinned(tcx, body);
1308        }
1309        // Iterator::next doesn't accept a pinned argument,
1310        // unlike for all other coroutine kinds.
1311        CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
1312            make_coroutine_state_argument_indirect(tcx, body);
1313        }
1314    }
1315
1316    // Make sure we remove dead blocks to remove
1317    // unrelated code from the drop part of the function
1318    simplify::remove_dead_blocks(body);
1319
1320    pm::run_passes_no_validate(tcx, body, &[&abort_unwinding_calls::AbortUnwindingCalls], None);
1321
1322    if let Some(dumper) = MirDumper::new(tcx, "coroutine_resume", body) {
1323        dumper.dump_mir(body);
1324    }
1325}
1326
1327/// An operation that can be performed on a coroutine.
1328#[derive(PartialEq, Copy, Clone, Debug)]
1329enum Operation {
1330    Resume,
1331    Drop,
1332}
1333
1334impl Operation {
1335    fn target_block(self, point: &SuspensionPoint<'_>) -> Option<BasicBlock> {
1336        match self {
1337            Operation::Resume => Some(point.resume),
1338            Operation::Drop => point.drop,
1339        }
1340    }
1341}
1342
1343#[tracing::instrument(level = "trace", skip(transform, body))]
1344fn create_cases<'tcx>(
1345    body: &mut Body<'tcx>,
1346    transform: &TransformVisitor<'tcx>,
1347    operation: Operation,
1348) -> Vec<(usize, BasicBlock)> {
1349    let source_info = SourceInfo::outermost(body.span);
1350
1351    transform
1352        .suspension_points
1353        .iter()
1354        .filter_map(|point| {
1355            // Find the target for this suspension point, if applicable
1356            operation.target_block(point).map(|target| {
1357                let mut statements = Vec::new();
1358
1359                // Create StorageLive instructions for locals with live storage
1360                for l in body.local_decls.indices() {
1361                    let needs_storage_live = point.storage_liveness.contains(l)
1362                        && !transform.remap.contains(l)
1363                        && !transform.always_live_locals.contains(l);
1364                    if needs_storage_live {
1365                        statements.push(Statement::new(source_info, StatementKind::StorageLive(l)));
1366                    }
1367                }
1368
1369                if operation == Operation::Resume && point.resume_arg != CTX_ARG.into() {
1370                    // Move the resume argument to the destination place of the `Yield` terminator
1371                    statements.push(Statement::new(
1372                        source_info,
1373                        StatementKind::Assign(Box::new((
1374                            point.resume_arg,
1375                            Rvalue::Use(Operand::Move(CTX_ARG.into())),
1376                        ))),
1377                    ));
1378                }
1379
1380                // Then jump to the real target
1381                let block = body.basic_blocks_mut().push(BasicBlockData::new_stmts(
1382                    statements,
1383                    Some(Terminator { source_info, kind: TerminatorKind::Goto { target } }),
1384                    false,
1385                ));
1386
1387                (point.state, block)
1388            })
1389        })
1390        .collect()
1391}
1392
1393#[instrument(level = "debug", skip(tcx), ret)]
1394pub(crate) fn mir_coroutine_witnesses<'tcx>(
1395    tcx: TyCtxt<'tcx>,
1396    def_id: LocalDefId,
1397) -> Option<CoroutineLayout<'tcx>> {
1398    let (body, _) = tcx.mir_promoted(def_id);
1399    let body = body.borrow();
1400    let body = &*body;
1401
1402    // The first argument is the coroutine type passed by value
1403    let coroutine_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty;
1404
1405    let movable = match *coroutine_ty.kind() {
1406        ty::Coroutine(def_id, _) => tcx.coroutine_movability(def_id) == hir::Movability::Movable,
1407        ty::Error(_) => return None,
1408        _ => span_bug!(body.span, "unexpected coroutine type {}", coroutine_ty),
1409    };
1410
1411    // The witness simply contains all locals live across suspend points.
1412
1413    let always_live_locals = always_storage_live_locals(body);
1414    let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1415
1416    // Extract locals which are live across suspension point into `layout`
1417    // `remap` gives a mapping from local indices onto coroutine struct indices
1418    // `storage_liveness` tells us which locals have live storage at suspension points
1419    let (_, coroutine_layout, _) = compute_layout(liveness_info, body);
1420
1421    check_suspend_tys(tcx, &coroutine_layout, body);
1422    check_field_tys_sized(tcx, &coroutine_layout, def_id);
1423
1424    Some(coroutine_layout)
1425}
1426
1427fn check_field_tys_sized<'tcx>(
1428    tcx: TyCtxt<'tcx>,
1429    coroutine_layout: &CoroutineLayout<'tcx>,
1430    def_id: LocalDefId,
1431) {
1432    // No need to check if unsized_fn_params is disabled,
1433    // since we will error during typeck.
1434    if !tcx.features().unsized_fn_params() {
1435        return;
1436    }
1437
1438    // FIXME(#132279): @lcnr believes that we may want to support coroutines
1439    // whose `Sized`-ness relies on the hidden types of opaques defined by the
1440    // parent function. In this case we'd have to be able to reveal only these
1441    // opaques here.
1442    let infcx = tcx.infer_ctxt().ignoring_regions().build(TypingMode::non_body_analysis());
1443    let param_env = tcx.param_env(def_id);
1444
1445    let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
1446    for field_ty in &coroutine_layout.field_tys {
1447        ocx.register_bound(
1448            ObligationCause::new(
1449                field_ty.source_info.span,
1450                def_id,
1451                ObligationCauseCode::SizedCoroutineInterior(def_id),
1452            ),
1453            param_env,
1454            field_ty.ty,
1455            tcx.require_lang_item(hir::LangItem::Sized, field_ty.source_info.span),
1456        );
1457    }
1458
1459    let errors = ocx.evaluate_obligations_error_on_ambiguity();
1460    debug!(?errors);
1461    if !errors.is_empty() {
1462        infcx.err_ctxt().report_fulfillment_errors(errors);
1463    }
1464}
1465
1466impl<'tcx> crate::MirPass<'tcx> for StateTransform {
1467    #[instrument(level = "debug", skip(self, tcx, body), ret)]
1468    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
1469        debug!(def_id = ?body.source.def_id());
1470
1471        let Some(old_yield_ty) = body.yield_ty() else {
1472            // This only applies to coroutines
1473            return;
1474        };
1475        tracing::trace!(def_id = ?body.source.def_id());
1476
1477        let old_ret_ty = body.return_ty();
1478
1479        assert!(body.coroutine_drop().is_none() && body.coroutine_drop_async().is_none());
1480
1481        if let Some(dumper) = MirDumper::new(tcx, "coroutine_before", body) {
1482            dumper.dump_mir(body);
1483        }
1484
1485        // The first argument is the coroutine type passed by value
1486        let coroutine_ty = body.local_decls.raw[1].ty;
1487        let coroutine_kind = body.coroutine_kind().unwrap();
1488
1489        // Get the discriminant type and args which typeck computed
1490        let ty::Coroutine(_, args) = coroutine_ty.kind() else {
1491            tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}"));
1492        };
1493        let discr_ty = args.as_coroutine().discr_ty(tcx);
1494
1495        let new_ret_ty = match coroutine_kind {
1496            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
1497                // Compute Poll<return_ty>
1498                let poll_did = tcx.require_lang_item(LangItem::Poll, body.span);
1499                let poll_adt_ref = tcx.adt_def(poll_did);
1500                let poll_args = tcx.mk_args(&[old_ret_ty.into()]);
1501                Ty::new_adt(tcx, poll_adt_ref, poll_args)
1502            }
1503            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
1504                // Compute Option<yield_ty>
1505                let option_did = tcx.require_lang_item(LangItem::Option, body.span);
1506                let option_adt_ref = tcx.adt_def(option_did);
1507                let option_args = tcx.mk_args(&[old_yield_ty.into()]);
1508                Ty::new_adt(tcx, option_adt_ref, option_args)
1509            }
1510            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
1511                // The yield ty is already `Poll<Option<yield_ty>>`
1512                old_yield_ty
1513            }
1514            CoroutineKind::Coroutine(_) => {
1515                // Compute CoroutineState<yield_ty, return_ty>
1516                let state_did = tcx.require_lang_item(LangItem::CoroutineState, body.span);
1517                let state_adt_ref = tcx.adt_def(state_did);
1518                let state_args = tcx.mk_args(&[old_yield_ty.into(), old_ret_ty.into()]);
1519                Ty::new_adt(tcx, state_adt_ref, state_args)
1520            }
1521        };
1522
1523        // We need to insert clean drop for unresumed state and perform drop elaboration
1524        // (finally in open_drop_for_tuple) before async drop expansion.
1525        // Async drops, produced by this drop elaboration, will be expanded,
1526        // and corresponding futures kept in layout.
1527        let has_async_drops = matches!(
1528            coroutine_kind,
1529            CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
1530        ) && has_expandable_async_drops(tcx, body, coroutine_ty);
1531
1532        // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies.
1533        if matches!(
1534            coroutine_kind,
1535            CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
1536        ) {
1537            let context_mut_ref = transform_async_context(tcx, body);
1538            expand_async_drops(tcx, body, context_mut_ref, coroutine_kind, coroutine_ty);
1539
1540            if let Some(dumper) = MirDumper::new(tcx, "coroutine_async_drop_expand", body) {
1541                dumper.dump_mir(body);
1542            }
1543        } else {
1544            cleanup_async_drops(body);
1545        }
1546
1547        let always_live_locals = always_storage_live_locals(body);
1548        let movable = coroutine_kind.movability() == hir::Movability::Movable;
1549        let liveness_info =
1550            locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1551
1552        if tcx.sess.opts.unstable_opts.validate_mir {
1553            let mut vis = EnsureCoroutineFieldAssignmentsNeverAlias {
1554                assigned_local: None,
1555                saved_locals: &liveness_info.saved_locals,
1556                storage_conflicts: &liveness_info.storage_conflicts,
1557            };
1558
1559            vis.visit_body(body);
1560        }
1561
1562        // Extract locals which are live across suspension point into `layout`
1563        // `remap` gives a mapping from local indices onto coroutine struct indices
1564        // `storage_liveness` tells us which locals have live storage at suspension points
1565        let (remap, layout, storage_liveness) = compute_layout(liveness_info, body);
1566
1567        let can_return = can_return(tcx, body, body.typing_env(tcx));
1568
1569        // We rename RETURN_PLACE which has type mir.return_ty to new_ret_local
1570        // RETURN_PLACE then is a fresh unused local with type ret_ty.
1571        let new_ret_local = body.local_decls.push(LocalDecl::new(new_ret_ty, body.span));
1572        tracing::trace!(?new_ret_local);
1573
1574        // Run the transformation which converts Places from Local to coroutine struct
1575        // accesses for locals in `remap`.
1576        // It also rewrites `return x` and `yield y` as writing a new coroutine state and returning
1577        // either `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`,
1578        // or `Poll::Ready(x)` and `Poll::Pending` respectively depending on the coroutine kind.
1579        let mut transform = TransformVisitor {
1580            tcx,
1581            coroutine_kind,
1582            remap,
1583            storage_liveness,
1584            always_live_locals,
1585            suspension_points: Vec::new(),
1586            discr_ty,
1587            new_ret_local,
1588            old_ret_ty,
1589            old_yield_ty,
1590        };
1591        transform.visit_body(body);
1592
1593        // Swap the actual `RETURN_PLACE` and the provisional `new_ret_local`.
1594        transform.replace_local(RETURN_PLACE, new_ret_local, body);
1595
1596        // MIR parameters are not explicitly assigned-to when entering the MIR body.
1597        // If we want to save their values inside the coroutine state, we need to do so explicitly.
1598        let source_info = SourceInfo::outermost(body.span);
1599        let args_iter = body.args_iter();
1600        body.basic_blocks.as_mut()[START_BLOCK].statements.splice(
1601            0..0,
1602            args_iter.filter_map(|local| {
1603                let (ty, variant_index, idx) = transform.remap[local]?;
1604                let lhs = transform.make_field(variant_index, idx, ty);
1605                let rhs = Rvalue::Use(Operand::Move(local.into()));
1606                let assign = StatementKind::Assign(Box::new((lhs, rhs)));
1607                Some(Statement::new(source_info, assign))
1608            }),
1609        );
1610
1611        // Update our MIR struct to reflect the changes we've made
1612        body.arg_count = 2; // self, resume arg
1613        body.spread_arg = None;
1614
1615        // Remove the context argument within generator bodies.
1616        if matches!(coroutine_kind, CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) {
1617            transform_gen_context(body);
1618        }
1619
1620        // The original arguments to the function are no longer arguments, mark them as such.
1621        // Otherwise they'll conflict with our new arguments, which although they don't have
1622        // argument_index set, will get emitted as unnamed arguments.
1623        for var in &mut body.var_debug_info {
1624            var.argument_index = None;
1625        }
1626
1627        body.coroutine.as_mut().unwrap().yield_ty = None;
1628        body.coroutine.as_mut().unwrap().resume_ty = None;
1629        body.coroutine.as_mut().unwrap().coroutine_layout = Some(layout);
1630
1631        // FIXME: Drops, produced by insert_clean_drop + elaborate_coroutine_drops,
1632        // are currently sync only. To allow async for them, we need to move those calls
1633        // before expand_async_drops, and fix the related problems.
1634        //
1635        // Insert `drop(coroutine_struct)` which is used to drop upvars for coroutines in
1636        // the unresumed state.
1637        // This is expanded to a drop ladder in `elaborate_coroutine_drops`.
1638        let drop_clean = insert_clean_drop(tcx, body, has_async_drops);
1639
1640        if let Some(dumper) = MirDumper::new(tcx, "coroutine_pre-elab", body) {
1641            dumper.dump_mir(body);
1642        }
1643
1644        // Expand `drop(coroutine_struct)` to a drop ladder which destroys upvars.
1645        // If any upvars are moved out of, drop elaboration will handle upvar destruction.
1646        // However we need to also elaborate the code generated by `insert_clean_drop`.
1647        elaborate_coroutine_drops(tcx, body);
1648
1649        if let Some(dumper) = MirDumper::new(tcx, "coroutine_post-transform", body) {
1650            dumper.dump_mir(body);
1651        }
1652
1653        let can_unwind = can_unwind(tcx, body);
1654
1655        // Create a copy of our MIR and use it to create the drop shim for the coroutine
1656        if has_async_drops {
1657            // If coroutine has async drops, generating async drop shim
1658            let mut drop_shim =
1659                create_coroutine_drop_shim_async(tcx, &transform, body, drop_clean, can_unwind);
1660            // Run derefer to fix Derefs that are not in the first place
1661            deref_finder(tcx, &mut drop_shim, false);
1662            body.coroutine.as_mut().unwrap().coroutine_drop_async = Some(drop_shim);
1663        } else {
1664            // If coroutine has no async drops, generating sync drop shim
1665            let mut drop_shim =
1666                create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean);
1667            // Run derefer to fix Derefs that are not in the first place
1668            deref_finder(tcx, &mut drop_shim, false);
1669            body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim);
1670
1671            // For coroutine with sync drop, generating async proxy for `future_drop_poll` call
1672            let mut proxy_shim = create_coroutine_drop_shim_proxy_async(tcx, body);
1673            deref_finder(tcx, &mut proxy_shim, false);
1674            body.coroutine.as_mut().unwrap().coroutine_drop_proxy_async = Some(proxy_shim);
1675        }
1676
1677        // Create the Coroutine::resume / Future::poll function
1678        create_coroutine_resume_function(tcx, transform, body, can_return, can_unwind);
1679
1680        // Run derefer to fix Derefs that are not in the first place
1681        deref_finder(tcx, body, false);
1682    }
1683
1684    fn is_required(&self) -> bool {
1685        true
1686    }
1687}
1688
1689/// Looks for any assignments between locals (e.g., `_4 = _5`) that will both be converted to fields
1690/// in the coroutine state machine but whose storage is not marked as conflicting
1691///
1692/// Validation needs to happen immediately *before* `TransformVisitor` is invoked, not after.
1693///
1694/// This condition would arise when the assignment is the last use of `_5` but the initial
1695/// definition of `_4` if we weren't extra careful to mark all locals used inside a statement as
1696/// conflicting. Non-conflicting coroutine saved locals may be stored at the same location within
1697/// the coroutine state machine, which would result in ill-formed MIR: the left-hand and right-hand
1698/// sides of an assignment may not alias. This caused a miscompilation in [#73137].
1699///
1700/// [#73137]: https://github.com/rust-lang/rust/issues/73137
1701struct EnsureCoroutineFieldAssignmentsNeverAlias<'a> {
1702    saved_locals: &'a CoroutineSavedLocals,
1703    storage_conflicts: &'a BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal>,
1704    assigned_local: Option<CoroutineSavedLocal>,
1705}
1706
1707impl EnsureCoroutineFieldAssignmentsNeverAlias<'_> {
1708    fn saved_local_for_direct_place(&self, place: Place<'_>) -> Option<CoroutineSavedLocal> {
1709        if place.is_indirect() {
1710            return None;
1711        }
1712
1713        self.saved_locals.get(place.local)
1714    }
1715
1716    fn check_assigned_place(&mut self, place: Place<'_>, f: impl FnOnce(&mut Self)) {
1717        if let Some(assigned_local) = self.saved_local_for_direct_place(place) {
1718            assert!(self.assigned_local.is_none(), "`check_assigned_place` must not recurse");
1719
1720            self.assigned_local = Some(assigned_local);
1721            f(self);
1722            self.assigned_local = None;
1723        }
1724    }
1725}
1726
1727impl<'tcx> Visitor<'tcx> for EnsureCoroutineFieldAssignmentsNeverAlias<'_> {
1728    fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
1729        let Some(lhs) = self.assigned_local else {
1730            // This visitor only invokes `visit_place` for the right-hand side of an assignment
1731            // and only after setting `self.assigned_local`. However, the default impl of
1732            // `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places
1733            // with debuginfo. Ignore them here.
1734            assert!(!context.is_use());
1735            return;
1736        };
1737
1738        let Some(rhs) = self.saved_local_for_direct_place(*place) else { return };
1739
1740        if !self.storage_conflicts.contains(lhs, rhs) {
1741            bug!(
1742                "Assignment between coroutine saved locals whose storage is not \
1743                    marked as conflicting: {:?}: {:?} = {:?}",
1744                location,
1745                lhs,
1746                rhs,
1747            );
1748        }
1749    }
1750
1751    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1752        match &statement.kind {
1753            StatementKind::Assign(box (lhs, rhs)) => {
1754                self.check_assigned_place(*lhs, |this| this.visit_rvalue(rhs, location));
1755            }
1756
1757            StatementKind::FakeRead(..)
1758            | StatementKind::SetDiscriminant { .. }
1759            | StatementKind::StorageLive(_)
1760            | StatementKind::StorageDead(_)
1761            | StatementKind::Retag(..)
1762            | StatementKind::AscribeUserType(..)
1763            | StatementKind::PlaceMention(..)
1764            | StatementKind::Coverage(..)
1765            | StatementKind::Intrinsic(..)
1766            | StatementKind::ConstEvalCounter
1767            | StatementKind::BackwardIncompatibleDropHint { .. }
1768            | StatementKind::Nop => {}
1769        }
1770    }
1771
1772    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1773        // Checking for aliasing in terminators is probably overkill, but until we have actual
1774        // semantics, we should be conservative here.
1775        match &terminator.kind {
1776            TerminatorKind::Call {
1777                func,
1778                args,
1779                destination,
1780                target: Some(_),
1781                unwind: _,
1782                call_source: _,
1783                fn_span: _,
1784            } => {
1785                self.check_assigned_place(*destination, |this| {
1786                    this.visit_operand(func, location);
1787                    for arg in args {
1788                        this.visit_operand(&arg.node, location);
1789                    }
1790                });
1791            }
1792
1793            TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => {
1794                self.check_assigned_place(*resume_arg, |this| this.visit_operand(value, location));
1795            }
1796
1797            // FIXME: Does `asm!` have any aliasing requirements?
1798            TerminatorKind::InlineAsm { .. } => {}
1799
1800            TerminatorKind::Call { .. }
1801            | TerminatorKind::Goto { .. }
1802            | TerminatorKind::SwitchInt { .. }
1803            | TerminatorKind::UnwindResume
1804            | TerminatorKind::UnwindTerminate(_)
1805            | TerminatorKind::Return
1806            | TerminatorKind::TailCall { .. }
1807            | TerminatorKind::Unreachable
1808            | TerminatorKind::Drop { .. }
1809            | TerminatorKind::Assert { .. }
1810            | TerminatorKind::CoroutineDrop
1811            | TerminatorKind::FalseEdge { .. }
1812            | TerminatorKind::FalseUnwind { .. } => {}
1813        }
1814    }
1815}
1816
1817fn check_suspend_tys<'tcx>(tcx: TyCtxt<'tcx>, layout: &CoroutineLayout<'tcx>, body: &Body<'tcx>) {
1818    let mut linted_tys = FxHashSet::default();
1819
1820    for (variant, yield_source_info) in
1821        layout.variant_fields.iter().zip(&layout.variant_source_info)
1822    {
1823        debug!(?variant);
1824        for &local in variant {
1825            let decl = &layout.field_tys[local];
1826            debug!(?decl);
1827
1828            if !decl.ignore_for_traits && linted_tys.insert(decl.ty) {
1829                let Some(hir_id) = decl.source_info.scope.lint_root(&body.source_scopes) else {
1830                    continue;
1831                };
1832
1833                check_must_not_suspend_ty(
1834                    tcx,
1835                    decl.ty,
1836                    hir_id,
1837                    SuspendCheckData {
1838                        source_span: decl.source_info.span,
1839                        yield_span: yield_source_info.span,
1840                        plural_len: 1,
1841                        ..Default::default()
1842                    },
1843                );
1844            }
1845        }
1846    }
1847}
1848
1849#[derive(Default)]
1850struct SuspendCheckData<'a> {
1851    source_span: Span,
1852    yield_span: Span,
1853    descr_pre: &'a str,
1854    descr_post: &'a str,
1855    plural_len: usize,
1856}
1857
1858// Returns whether it emitted a diagnostic or not
1859// Note that this fn and the proceeding one are based on the code
1860// for creating must_use diagnostics
1861//
1862// Note that this technique was chosen over things like a `Suspend` marker trait
1863// as it is simpler and has precedent in the compiler
1864fn check_must_not_suspend_ty<'tcx>(
1865    tcx: TyCtxt<'tcx>,
1866    ty: Ty<'tcx>,
1867    hir_id: hir::HirId,
1868    data: SuspendCheckData<'_>,
1869) -> bool {
1870    if ty.is_unit() {
1871        return false;
1872    }
1873
1874    let plural_suffix = pluralize!(data.plural_len);
1875
1876    debug!("Checking must_not_suspend for {}", ty);
1877
1878    match *ty.kind() {
1879        ty::Adt(_, args) if ty.is_box() => {
1880            let boxed_ty = args.type_at(0);
1881            let allocator_ty = args.type_at(1);
1882            check_must_not_suspend_ty(
1883                tcx,
1884                boxed_ty,
1885                hir_id,
1886                SuspendCheckData { descr_pre: &format!("{}boxed ", data.descr_pre), ..data },
1887            ) || check_must_not_suspend_ty(
1888                tcx,
1889                allocator_ty,
1890                hir_id,
1891                SuspendCheckData { descr_pre: &format!("{}allocator ", data.descr_pre), ..data },
1892            )
1893        }
1894        ty::Adt(def, _) => check_must_not_suspend_def(tcx, def.did(), hir_id, data),
1895        // FIXME: support adding the attribute to TAITs
1896        ty::Alias(ty::Opaque, ty::AliasTy { def_id: def, .. }) => {
1897            let mut has_emitted = false;
1898            for &(predicate, _) in tcx.explicit_item_bounds(def).skip_binder() {
1899                // We only look at the `DefId`, so it is safe to skip the binder here.
1900                if let ty::ClauseKind::Trait(ref poly_trait_predicate) =
1901                    predicate.kind().skip_binder()
1902                {
1903                    let def_id = poly_trait_predicate.trait_ref.def_id;
1904                    let descr_pre = &format!("{}implementer{} of ", data.descr_pre, plural_suffix);
1905                    if check_must_not_suspend_def(
1906                        tcx,
1907                        def_id,
1908                        hir_id,
1909                        SuspendCheckData { descr_pre, ..data },
1910                    ) {
1911                        has_emitted = true;
1912                        break;
1913                    }
1914                }
1915            }
1916            has_emitted
1917        }
1918        ty::Dynamic(binder, _) => {
1919            let mut has_emitted = false;
1920            for predicate in binder.iter() {
1921                if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() {
1922                    let def_id = trait_ref.def_id;
1923                    let descr_post = &format!(" trait object{}{}", plural_suffix, data.descr_post);
1924                    if check_must_not_suspend_def(
1925                        tcx,
1926                        def_id,
1927                        hir_id,
1928                        SuspendCheckData { descr_post, ..data },
1929                    ) {
1930                        has_emitted = true;
1931                        break;
1932                    }
1933                }
1934            }
1935            has_emitted
1936        }
1937        ty::Tuple(fields) => {
1938            let mut has_emitted = false;
1939            for (i, ty) in fields.iter().enumerate() {
1940                let descr_post = &format!(" in tuple element {i}");
1941                if check_must_not_suspend_ty(
1942                    tcx,
1943                    ty,
1944                    hir_id,
1945                    SuspendCheckData { descr_post, ..data },
1946                ) {
1947                    has_emitted = true;
1948                }
1949            }
1950            has_emitted
1951        }
1952        ty::Array(ty, len) => {
1953            let descr_pre = &format!("{}array{} of ", data.descr_pre, plural_suffix);
1954            check_must_not_suspend_ty(
1955                tcx,
1956                ty,
1957                hir_id,
1958                SuspendCheckData {
1959                    descr_pre,
1960                    // FIXME(must_not_suspend): This is wrong. We should handle printing unevaluated consts.
1961                    plural_len: len.try_to_target_usize(tcx).unwrap_or(0) as usize + 1,
1962                    ..data
1963                },
1964            )
1965        }
1966        // If drop tracking is enabled, we want to look through references, since the referent
1967        // may not be considered live across the await point.
1968        ty::Ref(_region, ty, _mutability) => {
1969            let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix);
1970            check_must_not_suspend_ty(tcx, ty, hir_id, SuspendCheckData { descr_pre, ..data })
1971        }
1972        _ => false,
1973    }
1974}
1975
1976fn check_must_not_suspend_def(
1977    tcx: TyCtxt<'_>,
1978    def_id: DefId,
1979    hir_id: hir::HirId,
1980    data: SuspendCheckData<'_>,
1981) -> bool {
1982    if let Some(attr) = tcx.get_attr(def_id, sym::must_not_suspend) {
1983        let reason = attr.value_str().map(|s| errors::MustNotSuspendReason {
1984            span: data.source_span,
1985            reason: s.as_str().to_string(),
1986        });
1987        tcx.emit_node_span_lint(
1988            rustc_session::lint::builtin::MUST_NOT_SUSPEND,
1989            hir_id,
1990            data.source_span,
1991            errors::MustNotSupend {
1992                tcx,
1993                yield_sp: data.yield_span,
1994                reason,
1995                src_sp: data.source_span,
1996                pre: data.descr_pre,
1997                def_id,
1998                post: data.descr_post,
1999            },
2000        );
2001
2002        true
2003    } else {
2004        false
2005    }
2006}