Skip to main content

rustc_ast_lowering/
expr.rs

1use std::mem;
2use std::ops::ControlFlow;
3use std::sync::Arc;
4
5use rustc_ast::node_id::NodeMap;
6use rustc_ast::*;
7use rustc_ast_pretty::pprust::expr_to_string;
8use rustc_data_structures::stack::ensure_sufficient_stack;
9use rustc_errors::msg;
10use rustc_hir as hir;
11use rustc_hir::def::{DefKind, Res};
12use rustc_hir::{HirId, Target, find_attr};
13use rustc_middle::span_bug;
14use rustc_middle::ty::TyCtxt;
15use rustc_session::errors::report_lit_error;
16use rustc_span::{ByteSymbol, DUMMY_SP, DesugaringKind, Ident, Span, Spanned, Symbol, respan, sym};
17use thin_vec::{ThinVec, thin_vec};
18use visit::{Visitor, walk_expr};
19
20mod closure;
21
22use super::errors::{
23    AsyncCoroutinesNotSupported, AwaitOnlyInAsyncFnAndBlocks,
24    FunctionalRecordUpdateDestructuringAssignment, InclusiveRangeWithNoEnd, MatchArmWithNoBody,
25    MoveExprOnlyInPlainClosures, NeverPatternWithBody, NeverPatternWithGuard,
26    UnderscoreExprLhsAssign,
27};
28use super::{
29    GenericArgsMode, ImplTraitContext, LoweringContext, ParamMode, ResolverAstLoweringExt,
30};
31use crate::errors::{InvalidLegacyConstGenericArg, UseConstGenericArg, YieldInClosure};
32use crate::{AllowReturnTypeNotation, ImplTraitPosition, TryBlockScope};
33
34pub(super) struct WillCreateDefIdsVisitor;
35
36/// A `move(...)` expression found while looking up generated initializers.
37struct MoveExprInitializer<'a> {
38    /// The `NodeId` of the outer `move(...)` expression.
39    id: NodeId,
40    /// Span of the `move` token, used for the generated binding name.
41    move_kw_span: Span,
42    /// The expression inside `move(...)`; e.g. `foo.bar` in `move(foo.bar)`.
43    expr: &'a Expr,
44}
45
46/// State for `move(...)` expressions found while lowering one plain closure body.
47pub(super) struct MoveExprState<'hir> {
48    pub(super) bindings: NodeMap<(Ident, HirId)>,
49    pub(super) occurrences: Vec<MoveExprOccurrence<'hir>>,
50}
51
52impl<'hir> Default for MoveExprState<'hir> {
53    fn default() -> Self {
54        Self { bindings: NodeMap::default(), occurrences: Vec::new() }
55    }
56}
57
58pub(super) struct MoveExprOccurrence<'hir> {
59    id: NodeId,
60    ident: Ident,
61    pat: &'hir hir::Pat<'hir>,
62    binding: HirId,
63    explicit_capture: bool,
64}
65
66/// Looks up the initializer expression for each `move(...)` occurrence.
67struct MoveExprInitializerFinder<'a> {
68    initializers: Vec<MoveExprInitializer<'a>>,
69}
70
71impl<'a> MoveExprInitializerFinder<'a> {
72    fn collect(expr: &'a Expr) -> Vec<MoveExprInitializer<'a>> {
73        let mut this = Self { initializers: Vec::new() };
74        this.visit_expr(expr);
75        this.initializers
76    }
77}
78
79impl<'a> Visitor<'a> for MoveExprInitializerFinder<'a> {
80    fn visit_expr(&mut self, expr: &'a Expr) {
81        match &expr.kind {
82            ExprKind::Move(inner, move_kw_span) => {
83                self.visit_expr(inner);
84                self.initializers.push(MoveExprInitializer {
85                    id: expr.id,
86                    move_kw_span: *move_kw_span,
87                    expr: inner,
88                });
89            }
90            ExprKind::Closure(..) | ExprKind::Gen(..) | ExprKind::ConstBlock(..) => {}
91            _ => walk_expr(self, expr),
92        }
93    }
94
95    fn visit_item(&mut self, _: &'a Item) {}
96}
97
98impl<'v> rustc_ast::visit::Visitor<'v> for WillCreateDefIdsVisitor {
99    type Result = ControlFlow<Span>;
100
101    fn visit_anon_const(&mut self, c: &'v AnonConst) -> Self::Result {
102        ControlFlow::Break(c.value.span)
103    }
104
105    fn visit_item(&mut self, item: &'v Item) -> Self::Result {
106        ControlFlow::Break(item.span)
107    }
108
109    fn visit_expr(&mut self, ex: &'v Expr) -> Self::Result {
110        match ex.kind {
111            ExprKind::Gen(..) | ExprKind::ConstBlock(..) | ExprKind::Closure(..) => {
112                ControlFlow::Break(ex.span)
113            }
114            _ => walk_expr(self, ex),
115        }
116    }
117}
118
119impl<'hir> LoweringContext<'_, 'hir> {
120    fn with_move_expr_bindings<T>(
121        &mut self,
122        state: Option<MoveExprState<'hir>>,
123        f: impl FnOnce(&mut Self) -> T,
124    ) -> (T, Option<MoveExprState<'hir>>) {
125        self.move_expr_bindings.push(state);
126        let result = f(self);
127        let state = self.move_expr_bindings.pop().unwrap_or_else(|| {
128            ::rustc_middle::util::bug::span_bug_fmt(DUMMY_SP,
    format_args!("`move_expr_bindings` stack was empty after lowering"))span_bug!(DUMMY_SP, "`move_expr_bindings` stack was empty after lowering")
129        });
130        (result, state)
131    }
132
133    fn record_move_expr(
134        &mut self,
135        id: NodeId,
136        inner: &Expr,
137        move_kw_span: Span,
138        explicit_capture: bool,
139    ) -> (Ident, HirId) {
140        let index = self
141            .move_expr_bindings
142            .last()
143            .and_then(|state| state.as_ref())
144            .map_or(0, |state| state.occurrences.len());
145        let ident = Ident::from_str_and_span(&::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("__move_expr_{0}", index))
    })format!("__move_expr_{index}"), move_kw_span);
146        let (pat, binding) = self.pat_ident(inner.span, ident);
147        let Some(state) = self.move_expr_bindings.last_mut().and_then(|state| state.as_mut())
148        else {
149            ::rustc_middle::util::bug::span_bug_fmt(move_kw_span,
    format_args!("`move(...)` lowered without a plain closure body state"));span_bug!(move_kw_span, "`move(...)` lowered without a plain closure body state");
150        };
151        state.bindings.insert(id, (ident, binding));
152        state.occurrences.push(MoveExprOccurrence { id, ident, pat, binding, explicit_capture });
153        (ident, binding)
154    }
155
156    fn lower_exprs(&mut self, exprs: &[Box<Expr>]) -> &'hir [hir::Expr<'hir>] {
157        self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
158    }
159
160    pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
161        self.arena.alloc(self.lower_expr_mut(e))
162    }
163
164    pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
165        ensure_sufficient_stack(|| {
166            let mut span = self.lower_span(e.span);
167            match &e.kind {
168                // Parenthesis expression does not have a HirId and is handled specially.
169                ExprKind::Paren(ex) => {
170                    let mut ex = self.lower_expr_mut(ex);
171                    // Include parens in span, but only if it is a super-span.
172                    if e.span.contains(ex.span) {
173                        ex.span = self.lower_span(e.span.with_ctxt(ex.span.ctxt()));
174                    }
175                    // Merge attributes into the inner expression.
176                    if !e.attrs.is_empty() {
177                        let old_attrs = self.attrs.get(&ex.hir_id.local_id).copied().unwrap_or(&[]);
178                        let new_attrs = self
179                            .lower_attrs_vec(&e.attrs, e.span, ex.hir_id, Target::from_expr(e))
180                            .into_iter()
181                            .chain(old_attrs.iter().cloned());
182                        let new_attrs = &*self.arena.alloc_from_iter(new_attrs);
183                        if new_attrs.is_empty() {
184                            return ex;
185                        }
186                        self.attrs.insert(ex.hir_id.local_id, new_attrs);
187                    }
188                    return ex;
189                }
190                // Desugar `ExprForLoop`
191                // from: `[opt_ident]: for await? <pat> in <iter> <body>`
192                //
193                // This also needs special handling because the HirId of the returned `hir::Expr` will not
194                // correspond to the `e.id`, so `lower_expr_for` handles attribute lowering itself.
195                ExprKind::ForLoop { pat, iter, body, label, kind } => {
196                    return self.lower_expr_for(e, pat, iter, body, *label, *kind);
197                }
198                ExprKind::Closure(closure) => return self.lower_expr_closure_expr(e, closure),
199                _ => (),
200            }
201
202            let expr_hir_id = self.lower_node_id(e.id);
203            self.lower_attrs(expr_hir_id, &e.attrs, e.span, Target::from_expr(e));
204
205            let kind = match &e.kind {
206                ExprKind::Array(exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
207                ExprKind::ConstBlock(c) => hir::ExprKind::ConstBlock(self.lower_const_block(c)),
208                ExprKind::Repeat(expr, count) => {
209                    let expr = self.lower_expr(expr);
210                    let count = self.lower_array_length_to_const_arg(count);
211                    hir::ExprKind::Repeat(expr, count)
212                }
213                ExprKind::Tup(elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
214                ExprKind::Call(f, args) => {
215                    if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f, self.tcx)
216                    {
217                        self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args)
218                    } else {
219                        let f = self.lower_expr(f);
220                        hir::ExprKind::Call(f, self.lower_exprs(args))
221                    }
222                }
223                ExprKind::MethodCall(MethodCall { seg, receiver, args, span }) => {
224                    let hir_seg = self.arena.alloc(self.lower_path_segment(
225                        e.span,
226                        seg,
227                        ParamMode::Optional,
228                        GenericArgsMode::Err,
229                        ImplTraitContext::Disallowed(ImplTraitPosition::Path),
230                        // Method calls can't have bound modifiers
231                        None,
232                    ));
233                    let receiver = self.lower_expr(receiver);
234                    let args =
235                        self.arena.alloc_from_iter(args.iter().map(|x| self.lower_expr_mut(x)));
236                    hir::ExprKind::MethodCall(hir_seg, receiver, args, self.lower_span(*span))
237                }
238                ExprKind::Binary(binop, lhs, rhs) => {
239                    let binop = self.lower_binop(*binop);
240                    let lhs = self.lower_expr(lhs);
241                    let rhs = self.lower_expr(rhs);
242                    hir::ExprKind::Binary(binop, lhs, rhs)
243                }
244                ExprKind::Unary(op, ohs) => {
245                    let op = self.lower_unop(*op);
246                    let ohs = self.lower_expr(ohs);
247                    hir::ExprKind::Unary(op, ohs)
248                }
249                ExprKind::Lit(token_lit) => hir::ExprKind::Lit(self.lower_lit(token_lit, e.span)),
250                ExprKind::IncludedBytes(byte_sym) => {
251                    let lit = respan(
252                        self.lower_span(e.span),
253                        LitKind::ByteStr(*byte_sym, StrStyle::Cooked),
254                    );
255                    hir::ExprKind::Lit(lit)
256                }
257                ExprKind::Cast(expr, ty) => {
258                    let expr = self.lower_expr(expr);
259                    let ty = self
260                        .lower_ty_alloc(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Cast));
261                    hir::ExprKind::Cast(expr, ty)
262                }
263                ExprKind::Type(expr, ty) => {
264                    let expr = self.lower_expr(expr);
265                    let ty = self
266                        .lower_ty_alloc(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Cast));
267                    hir::ExprKind::Type(expr, ty)
268                }
269                ExprKind::AddrOf(k, m, ohs) => {
270                    let ohs = self.lower_expr(ohs);
271                    hir::ExprKind::AddrOf(*k, *m, ohs)
272                }
273                ExprKind::Let(pat, scrutinee, span, recovered) => {
274                    hir::ExprKind::Let(self.arena.alloc(hir::LetExpr {
275                        span: self.lower_span(*span),
276                        pat: self.lower_pat(pat),
277                        ty: None,
278                        init: self.lower_expr(scrutinee),
279                        recovered: *recovered,
280                    }))
281                }
282                ExprKind::If(cond, then, else_opt) => {
283                    self.lower_expr_if(cond, then, else_opt.as_deref())
284                }
285                ExprKind::While(cond, body, opt_label) => {
286                    self.with_loop_scope(expr_hir_id, |this| {
287                        let span =
288                            this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None);
289                        let opt_label = this.lower_label(*opt_label, e.id, expr_hir_id);
290                        this.lower_expr_while_in_loop_scope(span, cond, body, opt_label)
291                    })
292                }
293                ExprKind::Loop(body, opt_label, span) => {
294                    self.with_loop_scope(expr_hir_id, |this| {
295                        let opt_label = this.lower_label(*opt_label, e.id, expr_hir_id);
296                        hir::ExprKind::Loop(
297                            this.lower_block(body, false),
298                            opt_label,
299                            hir::LoopSource::Loop,
300                            this.lower_span(*span),
301                        )
302                    })
303                }
304                ExprKind::TryBlock(body, opt_ty) => {
305                    self.lower_expr_try_block(body, opt_ty.as_deref())
306                }
307                ExprKind::Match(expr, arms, kind) => hir::ExprKind::Match(
308                    self.lower_expr(expr),
309                    self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
310                    match kind {
311                        MatchKind::Prefix => hir::MatchSource::Normal,
312                        MatchKind::Postfix => hir::MatchSource::Postfix,
313                    },
314                ),
315                ExprKind::Await(expr, await_kw_span) => self.lower_expr_await(*await_kw_span, expr),
316                ExprKind::Move(inner, move_kw_span) => {
317                    if !self.tcx.features().move_expr() {
318                        return self.expr_err(*move_kw_span, self.dcx().has_errors().unwrap());
319                    }
320                    if let Some(state) = self.move_expr_bindings.last().and_then(Option::as_ref) {
321                        let existing = state.bindings.get(&e.id).copied();
322                        let (ident, binding) = existing.unwrap_or_else(|| {
323                            for nested in MoveExprInitializerFinder::collect(inner) {
324                                self.record_move_expr(
325                                    nested.id,
326                                    nested.expr,
327                                    nested.move_kw_span,
328                                    false,
329                                );
330                            }
331                            self.record_move_expr(e.id, inner, *move_kw_span, true)
332                        });
333                        hir::ExprKind::Path(hir::QPath::Resolved(
334                            None,
335                            self.arena.alloc(hir::Path {
336                                span: self.lower_span(e.span),
337                                res: Res::Local(binding),
338                                segments: self.arena.alloc_from_iter([hir::PathSegment::new(self.lower_ident(ident),
                self.next_id(), Res::Local(binding))])arena_vec![
339                                    self;
340                                    hir::PathSegment::new(
341                                        self.lower_ident(ident),
342                                        self.next_id(),
343                                        Res::Local(binding),
344                                    )
345                                ],
346                            }),
347                        ))
348                    } else {
349                        let guar = self
350                            .dcx()
351                            .emit_err(MoveExprOnlyInPlainClosures { span: *move_kw_span });
352                        hir::ExprKind::Err(guar)
353                    }
354                }
355                ExprKind::Use(expr, use_kw_span) => self.lower_expr_use(*use_kw_span, expr),
356                ExprKind::Gen(capture_clause, block, genblock_kind, decl_span) => {
357                    let desugaring_kind = match genblock_kind {
358                        GenBlockKind::Async => hir::CoroutineDesugaring::Async,
359                        GenBlockKind::Gen => hir::CoroutineDesugaring::Gen,
360                        GenBlockKind::AsyncGen => hir::CoroutineDesugaring::AsyncGen,
361                    };
362                    self.make_desugared_coroutine_expr(
363                        *capture_clause,
364                        e.id,
365                        None,
366                        *decl_span,
367                        e.span,
368                        desugaring_kind,
369                        hir::CoroutineSource::Block,
370                        |this| {
371                            this.with_new_scopes(e.span, |this| {
372                                let (expr, _) = this.with_move_expr_bindings(None, |this| {
373                                    this.lower_block_expr(block)
374                                });
375                                expr
376                            })
377                        },
378                    )
379                }
380                ExprKind::Block(blk, opt_label) => {
381                    // Different from loops, label of block resolves to block id rather than
382                    // expr node id.
383                    let block_hir_id = self.lower_node_id(blk.id);
384                    let opt_label = self.lower_label(*opt_label, blk.id, block_hir_id);
385                    let hir_block = self.arena.alloc(self.lower_block_noalloc(
386                        block_hir_id,
387                        blk,
388                        opt_label.is_some(),
389                    ));
390                    hir::ExprKind::Block(hir_block, opt_label)
391                }
392                ExprKind::Assign(el, er, span) => self.lower_expr_assign(el, er, *span, e.span),
393                ExprKind::AssignOp(op, el, er) => hir::ExprKind::AssignOp(
394                    self.lower_assign_op(*op),
395                    self.lower_expr(el),
396                    self.lower_expr(er),
397                ),
398                ExprKind::Field(el, ident) => {
399                    hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(*ident))
400                }
401                ExprKind::Index(el, er, brackets_span) => hir::ExprKind::Index(
402                    self.lower_expr(el),
403                    self.lower_expr(er),
404                    self.lower_span(*brackets_span),
405                ),
406                ExprKind::Range(e1, e2, lims) => {
407                    span = self.mark_span_with_reason(DesugaringKind::RangeExpr, span, None);
408                    self.lower_expr_range(span, e1.as_deref(), e2.as_deref(), *lims)
409                }
410                ExprKind::Underscore => {
411                    let guar = self.dcx().emit_err(UnderscoreExprLhsAssign { span: e.span });
412                    hir::ExprKind::Err(guar)
413                }
414                ExprKind::Path(qself, path) => {
415                    let qpath = self.lower_qpath(
416                        e.id,
417                        qself,
418                        path,
419                        ParamMode::Optional,
420                        AllowReturnTypeNotation::No,
421                        ImplTraitContext::Disallowed(ImplTraitPosition::Path),
422                        None,
423                    );
424                    hir::ExprKind::Path(qpath)
425                }
426                ExprKind::Break(opt_label, opt_expr) => {
427                    let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
428                    hir::ExprKind::Break(self.lower_jump_destination(e.id, *opt_label), opt_expr)
429                }
430                ExprKind::Continue(opt_label) => {
431                    hir::ExprKind::Continue(self.lower_jump_destination(e.id, *opt_label))
432                }
433                ExprKind::Ret(e) => {
434                    let expr = e.as_ref().map(|x| self.lower_expr(x));
435                    self.checked_return(expr)
436                }
437                ExprKind::Yeet(sub_expr) => self.lower_expr_yeet(e.span, sub_expr.as_deref()),
438                ExprKind::Become(sub_expr) => {
439                    let sub_expr = self.lower_expr(sub_expr);
440                    hir::ExprKind::Become(sub_expr)
441                }
442                ExprKind::InlineAsm(asm) => {
443                    hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm))
444                }
445                ExprKind::FormatArgs(fmt) => self.lower_format_args(e.span, fmt),
446                ExprKind::OffsetOf(container, fields) => hir::ExprKind::OffsetOf(
447                    self.lower_ty_alloc(
448                        container,
449                        ImplTraitContext::Disallowed(ImplTraitPosition::OffsetOf),
450                    ),
451                    self.arena.alloc_from_iter(fields.iter().map(|&ident| self.lower_ident(ident))),
452                ),
453                ExprKind::Struct(se) => {
454                    let rest = match se.rest {
455                        StructRest::Base(ref e) => hir::StructTailExpr::Base(self.lower_expr(e)),
456                        StructRest::Rest(sp) => {
457                            hir::StructTailExpr::DefaultFields(self.lower_span(sp))
458                        }
459                        StructRest::None => hir::StructTailExpr::None,
460                        StructRest::NoneWithError(guar) => hir::StructTailExpr::NoneWithError(guar),
461                    };
462                    hir::ExprKind::Struct(
463                        self.arena.alloc(self.lower_qpath(
464                            e.id,
465                            &se.qself,
466                            &se.path,
467                            ParamMode::Optional,
468                            AllowReturnTypeNotation::No,
469                            ImplTraitContext::Disallowed(ImplTraitPosition::Path),
470                            None,
471                        )),
472                        self.arena
473                            .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))),
474                        rest,
475                    )
476                }
477                ExprKind::Yield(kind) => self.lower_expr_yield(e.span, kind.expr().map(|x| &**x)),
478                ExprKind::Err(guar) => hir::ExprKind::Err(*guar),
479
480                ExprKind::UnsafeBinderCast(kind, expr, ty) => hir::ExprKind::UnsafeBinderCast(
481                    *kind,
482                    self.lower_expr(expr),
483                    ty.as_ref().map(|ty| {
484                        self.lower_ty_alloc(
485                            ty,
486                            ImplTraitContext::Disallowed(ImplTraitPosition::Cast),
487                        )
488                    }),
489                ),
490
491                ExprKind::Dummy => {
492                    ::rustc_middle::util::bug::span_bug_fmt(e.span,
    format_args!("lowered ExprKind::Dummy"))span_bug!(e.span, "lowered ExprKind::Dummy")
493                }
494
495                ExprKind::Try(sub_expr) => self.lower_expr_try(e.span, sub_expr),
496
497                ExprKind::Paren(_) | ExprKind::ForLoop { .. } | ExprKind::Closure(..) => {
498                    {
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("already handled")));
}unreachable!("already handled")
499                }
500
501                ExprKind::MacCall(_) => {
    ::core::panicking::panic_fmt(format_args!("{0:?} shouldn\'t exist here",
            e.span));
}panic!("{:?} shouldn't exist here", e.span),
502            };
503
504            hir::Expr { hir_id: expr_hir_id, kind, span }
505        })
506    }
507
508    pub(crate) fn lower_const_block(&mut self, c: &AnonConst) -> hir::ConstBlock {
509        self.with_new_scopes(c.value.span, |this| {
510            let def_id = this.local_def_id(c.id);
511            let hir_id = this.lower_node_id(c.id);
512            let (body, _) = this.with_move_expr_bindings(None, |this| {
513                this.lower_const_body(c.value.span, Some(&c.value))
514            });
515            hir::ConstBlock { def_id, hir_id, body }
516        })
517    }
518
519    pub(crate) fn lower_lit(&mut self, token_lit: &token::Lit, span: Span) -> hir::Lit {
520        let lit_kind = match LitKind::from_token_lit(*token_lit) {
521            Ok(lit_kind) => lit_kind,
522            Err(err) => {
523                let guar = report_lit_error(&self.tcx.sess.psess, err, *token_lit, span);
524                LitKind::Err(guar)
525            }
526        };
527        respan(self.lower_span(span), lit_kind)
528    }
529
530    fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
531        match u {
532            UnOp::Deref => hir::UnOp::Deref,
533            UnOp::Not => hir::UnOp::Not,
534            UnOp::Neg => hir::UnOp::Neg,
535        }
536    }
537
538    fn lower_binop(&mut self, b: BinOp) -> BinOp {
539        Spanned { node: b.node, span: self.lower_span(b.span) }
540    }
541
542    fn lower_assign_op(&mut self, a: AssignOp) -> AssignOp {
543        Spanned { node: a.node, span: self.lower_span(a.span) }
544    }
545
546    fn lower_legacy_const_generics(
547        &mut self,
548        mut f: Expr,
549        args: ThinVec<Box<Expr>>,
550        legacy_args_idx: &[usize],
551    ) -> hir::ExprKind<'hir> {
552        let ExprKind::Path(None, path) = &mut f.kind else {
553            ::core::panicking::panic("internal error: entered unreachable code");unreachable!();
554        };
555
556        let mut error = None;
557        let mut invalid_expr_error = |tcx: TyCtxt<'_>, span| {
558            // Avoid emitting the error multiple times.
559            if error.is_none() {
560                let mut const_args = ::alloc::vec::Vec::new()vec![];
561                let mut other_args = ::alloc::vec::Vec::new()vec![];
562                for (idx, arg) in args.iter().enumerate() {
563                    if legacy_args_idx.contains(&idx) {
564                        const_args.push(::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("{{ {0} }}", expr_to_string(arg)))
    })format!("{{ {} }}", expr_to_string(arg)));
565                    } else {
566                        other_args.push(expr_to_string(arg));
567                    }
568                }
569                let suggestion = UseConstGenericArg {
570                    end_of_fn: f.span.shrink_to_hi(),
571                    const_args: const_args.join(", "),
572                    other_args: other_args.join(", "),
573                    call_args: args[0].span.to(args.last().unwrap().span),
574                };
575                error = Some(tcx.dcx().emit_err(InvalidLegacyConstGenericArg { span, suggestion }));
576            }
577            error.unwrap()
578        };
579
580        // Split the arguments into const generics and normal arguments
581        let mut real_args = ::alloc::vec::Vec::new()vec![];
582        let mut generic_args = ThinVec::new();
583        for (idx, arg) in args.iter().cloned().enumerate() {
584            if legacy_args_idx.contains(&idx) {
585                let node_id = self.next_node_id();
586                self.create_def(node_id, None, DefKind::AnonConst, f.span);
587                let const_value =
588                    if let ControlFlow::Break(span) = WillCreateDefIdsVisitor.visit_expr(&arg) {
589                        Box::new(Expr {
590                            id: self.next_node_id(),
591                            kind: ExprKind::Err(invalid_expr_error(self.tcx, span)),
592                            span: f.span,
593                            attrs: [].into(),
594                            tokens: None,
595                        })
596                    } else {
597                        arg
598                    };
599
600                let anon_const = AnonConst {
601                    id: node_id,
602                    value: const_value,
603                    mgca_disambiguation: MgcaDisambiguation::AnonConst,
604                };
605                generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
606            } else {
607                real_args.push(arg);
608            }
609        }
610
611        // Add generic args to the last element of the path.
612        let last_segment = path.segments.last_mut().unwrap();
613        if !last_segment.args.is_none() {
    ::core::panicking::panic("assertion failed: last_segment.args.is_none()")
};assert!(last_segment.args.is_none());
614        last_segment.args = Some(Box::new(GenericArgs::AngleBracketed(AngleBracketedArgs {
615            span: DUMMY_SP,
616            args: generic_args,
617        })));
618
619        // Now lower everything as normal.
620        let f = self.lower_expr(&f);
621        hir::ExprKind::Call(f, self.lower_exprs(&real_args))
622    }
623
624    fn lower_expr_if(
625        &mut self,
626        cond: &Expr,
627        then: &Block,
628        else_opt: Option<&Expr>,
629    ) -> hir::ExprKind<'hir> {
630        let lowered_cond = self.lower_expr(cond);
631        let then_expr = self.lower_block_expr(then);
632        if let Some(rslt) = else_opt {
633            hir::ExprKind::If(
634                lowered_cond,
635                self.arena.alloc(then_expr),
636                Some(self.lower_expr(rslt)),
637            )
638        } else {
639            hir::ExprKind::If(lowered_cond, self.arena.alloc(then_expr), None)
640        }
641    }
642
643    // We desugar: `'label: while $cond $body` into:
644    //
645    // ```
646    // 'label: loop {
647    //   if { let _t = $cond; _t } {
648    //     $body
649    //   }
650    //   else {
651    //     break;
652    //   }
653    // }
654    // ```
655    //
656    // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
657    // to preserve drop semantics since `while $cond { ... }` does not
658    // let temporaries live outside of `cond`.
659    fn lower_expr_while_in_loop_scope(
660        &mut self,
661        span: Span,
662        cond: &Expr,
663        body: &Block,
664        opt_label: Option<Label>,
665    ) -> hir::ExprKind<'hir> {
666        let lowered_cond = self.with_loop_condition_scope(|t| t.lower_expr(cond));
667        let then = self.lower_block_expr(body);
668        let expr_break = self.expr_break(span);
669        let stmt_break = self.stmt_expr(span, expr_break);
670        let else_blk = self.block_all(span, self.arena.alloc_from_iter([stmt_break])arena_vec![self; stmt_break], None);
671        let else_expr = self.arena.alloc(self.expr_block(else_blk));
672        let if_kind = hir::ExprKind::If(lowered_cond, self.arena.alloc(then), Some(else_expr));
673        let if_expr = self.expr(span, if_kind);
674        let block = self.block_expr(self.arena.alloc(if_expr));
675        let span = self.lower_span(span.with_hi(cond.span.hi()));
676        hir::ExprKind::Loop(block, opt_label, hir::LoopSource::While, span)
677    }
678
679    /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_output(<expr>) }`,
680    /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_output(()) }`
681    /// and save the block id to use it as a break target for desugaring of the `?` operator.
682    fn lower_expr_try_block(&mut self, body: &Block, opt_ty: Option<&Ty>) -> hir::ExprKind<'hir> {
683        let body_hir_id = self.lower_node_id(body.id);
684        let new_scope = if opt_ty.is_some() {
685            TryBlockScope::Heterogeneous(body_hir_id)
686        } else {
687            TryBlockScope::Homogeneous(body_hir_id)
688        };
689        let whole_block = self.with_try_block_scope(new_scope, |this| {
690            let mut block = this.lower_block_noalloc(body_hir_id, body, true);
691
692            // Final expression of the block (if present) or `()` with span at the end of block
693            let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
694                (
695                    this.mark_span_with_reason(
696                        DesugaringKind::TryBlock,
697                        expr.span,
698                        Some(Arc::clone(&this.allow_try_trait)),
699                    ),
700                    expr,
701                )
702            } else {
703                let try_span = this.mark_span_with_reason(
704                    DesugaringKind::TryBlock,
705                    this.tcx.sess.source_map().end_point(body.span),
706                    Some(Arc::clone(&this.allow_try_trait)),
707                );
708
709                (try_span, this.expr_unit(try_span))
710            };
711
712            let ok_wrapped_span =
713                this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
714
715            // `::std::ops::Try::from_output($tail_expr)`
716            block.expr = Some(this.wrap_in_try_constructor(
717                hir::LangItem::TryTraitFromOutput,
718                try_span,
719                tail_expr,
720                ok_wrapped_span,
721            ));
722
723            this.arena.alloc(block)
724        });
725
726        if let Some(ty) = opt_ty {
727            let ty = self.lower_ty_alloc(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Path));
728            let block_expr = self.arena.alloc(self.expr_block(whole_block));
729            hir::ExprKind::Type(block_expr, ty)
730        } else {
731            hir::ExprKind::Block(whole_block, None)
732        }
733    }
734
735    fn wrap_in_try_constructor(
736        &mut self,
737        lang_item: hir::LangItem,
738        method_span: Span,
739        expr: &'hir hir::Expr<'hir>,
740        overall_span: Span,
741    ) -> &'hir hir::Expr<'hir> {
742        let constructor = self.arena.alloc(self.expr_lang_item_path(method_span, lang_item));
743        self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
744    }
745
746    fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
747        let pat = self.lower_pat(&arm.pat);
748        let guard = arm.guard.as_ref().map(|guard| self.lower_expr(&guard.cond));
749        let hir_id = self.next_id();
750        let span = self.lower_span(arm.span);
751        self.lower_attrs(hir_id, &arm.attrs, arm.span, Target::Arm);
752        let is_never_pattern = pat.is_never_pattern();
753        // We need to lower the body even if it's unneeded for never pattern in match,
754        // ensure that we can get HirId for DefId if need (issue #137708).
755        let body = arm.body.as_ref().map(|x| self.lower_expr(x));
756        let body = if let Some(body) = body
757            && !is_never_pattern
758        {
759            body
760        } else {
761            // Either `body.is_none()` or `is_never_pattern` here.
762            if !is_never_pattern {
763                if self.tcx.features().never_patterns() {
764                    // If the feature is off we already emitted the error after parsing.
765                    let suggestion = span.shrink_to_hi();
766                    self.dcx().emit_err(MatchArmWithNoBody { span, suggestion });
767                }
768            } else if let Some(body) = &arm.body {
769                self.dcx().emit_err(NeverPatternWithBody { span: body.span });
770            } else if let Some(g) = &arm.guard {
771                self.dcx().emit_err(NeverPatternWithGuard { span: g.span() });
772            }
773
774            // We add a fake `loop {}` arm body so that it typecks to `!`. The mir lowering of never
775            // patterns ensures this loop is not reachable.
776            let block = self.arena.alloc(hir::Block {
777                stmts: &[],
778                expr: None,
779                hir_id: self.next_id(),
780                rules: hir::BlockCheckMode::DefaultBlock,
781                span,
782                targeted_by_break: false,
783            });
784            self.arena.alloc(hir::Expr {
785                hir_id: self.next_id(),
786                kind: hir::ExprKind::Loop(block, None, hir::LoopSource::Loop, span),
787                span,
788            })
789        };
790        hir::Arm { hir_id, pat, guard, body, span }
791    }
792
793    fn lower_capture_clause(&mut self, capture_clause: CaptureBy) -> CaptureBy {
794        match capture_clause {
795            CaptureBy::Ref => CaptureBy::Ref,
796            CaptureBy::Use { use_kw } => CaptureBy::Use { use_kw: self.lower_span(use_kw) },
797            CaptureBy::Value { move_kw } => CaptureBy::Value { move_kw: self.lower_span(move_kw) },
798        }
799    }
800
801    /// Lower/desugar a coroutine construct.
802    ///
803    /// In particular, this creates the correct async resume argument and `_task_context`.
804    ///
805    /// This results in:
806    ///
807    /// ```text
808    /// static move? |<_task_context?>| -> <return_ty> {
809    ///     <body>
810    /// }
811    /// ```
812    pub(super) fn make_desugared_coroutine_expr(
813        &mut self,
814        capture_clause: CaptureBy,
815        closure_node_id: NodeId,
816        return_ty: Option<hir::FnRetTy<'hir>>,
817        fn_decl_span: Span,
818        span: Span,
819        desugaring_kind: hir::CoroutineDesugaring,
820        coroutine_source: hir::CoroutineSource,
821        body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
822    ) -> hir::ExprKind<'hir> {
823        let closure_def_id = self.local_def_id(closure_node_id);
824        let coroutine_kind = hir::CoroutineKind::Desugared(desugaring_kind, coroutine_source);
825
826        // The `async` desugaring takes a resume argument and maintains a `task_context`,
827        // whereas a generator does not.
828        let (inputs, params, task_context): (&[_], &[_], _) = match desugaring_kind {
829            hir::CoroutineDesugaring::Async | hir::CoroutineDesugaring::AsyncGen => {
830                // Resume argument type: `ResumeTy`
831                let unstable_span = self.mark_span_with_reason(
832                    DesugaringKind::Async,
833                    self.lower_span(span),
834                    Some(Arc::clone(&self.allow_gen_future)),
835                );
836                let resume_ty =
837                    self.make_lang_item_qpath(hir::LangItem::ResumeTy, unstable_span, None);
838                let input_ty = hir::Ty {
839                    hir_id: self.next_id(),
840                    kind: hir::TyKind::Path(resume_ty),
841                    span: unstable_span,
842                };
843                let inputs = self.arena.alloc_from_iter([input_ty])arena_vec![self; input_ty];
844
845                // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
846                let (pat, task_context_hid) = self.pat_ident_binding_mode(
847                    span,
848                    Ident::with_dummy_span(sym::_task_context),
849                    hir::BindingMode::MUT,
850                );
851                let param = hir::Param {
852                    hir_id: self.next_id(),
853                    pat,
854                    ty_span: self.lower_span(span),
855                    span: self.lower_span(span),
856                };
857                let params = self.arena.alloc_from_iter([param])arena_vec![self; param];
858
859                (inputs, params, Some(task_context_hid))
860            }
861            hir::CoroutineDesugaring::Gen => (&[], &[], None),
862        };
863
864        let output =
865            return_ty.unwrap_or_else(|| hir::FnRetTy::DefaultReturn(self.lower_span(span)));
866
867        let fn_decl = self.arena.alloc(hir::FnDecl {
868            inputs,
869            output,
870            fn_decl_kind: hir::FnDeclFlags::default(),
871        });
872
873        let body = self.lower_body(move |this| {
874            this.coroutine_kind = Some(coroutine_kind);
875
876            let old_ctx = this.task_context;
877            if task_context.is_some() {
878                this.task_context = task_context;
879            }
880            let res = body(this);
881            this.task_context = old_ctx;
882
883            (params, res)
884        });
885
886        // `static |<_task_context?>| -> <return_ty> { <body> }`:
887        hir::ExprKind::Closure(self.arena.alloc(hir::Closure {
888            def_id: closure_def_id,
889            binder: hir::ClosureBinder::Default,
890            capture_clause: self.lower_capture_clause(capture_clause),
891            bound_generic_params: &[],
892            fn_decl,
893            body,
894            fn_decl_span: self.lower_span(fn_decl_span),
895            fn_arg_span: None,
896            kind: hir::ClosureKind::Coroutine(coroutine_kind),
897            constness: hir::Constness::NotConst,
898            explicit_captures: &[],
899        }))
900    }
901
902    /// Forwards a possible `#[track_caller]` annotation from `outer_hir_id` to
903    /// `inner_hir_id` in case the `async_fn_track_caller` feature is enabled.
904    pub(super) fn maybe_forward_track_caller(
905        &mut self,
906        span: Span,
907        outer_hir_id: HirId,
908        inner_hir_id: HirId,
909    ) {
910        if self.tcx.features().async_fn_track_caller()
911            && let Some(attrs) = self.attrs.get(&outer_hir_id.local_id)
912            && {
    {
            'done:
                {
                for i in *attrs {
                    #[allow(unused_imports)]
                    use rustc_hir::attrs::AttributeKind::*;
                    let i: &rustc_hir::Attribute = i;
                    match i {
                        rustc_hir::Attribute::Parsed(TrackCaller(_)) => {
                            break 'done Some(());
                        }
                        rustc_hir::Attribute::Unparsed(..) =>
                            {}
                            #[deny(unreachable_patterns)]
                            _ => {}
                    }
                }
                None
            }
        }.is_some()
}find_attr!(*attrs, TrackCaller(_))
913        {
914            let unstable_span = self.mark_span_with_reason(
915                DesugaringKind::Async,
916                span,
917                Some(Arc::clone(&self.allow_gen_future)),
918            );
919            self.lower_attrs(
920                inner_hir_id,
921                &[Attribute {
922                    kind: AttrKind::Normal(Box::new(NormalAttr::from_ident(Ident::new(
923                        sym::track_caller,
924                        span,
925                    )))),
926                    id: self.tcx.sess.psess.attr_id_generator.mk_attr_id(),
927                    style: AttrStyle::Outer,
928                    span: unstable_span,
929                }],
930                span,
931                Target::Fn,
932            );
933        }
934    }
935
936    /// Desugar `<expr>.await` into:
937    /// ```ignore (pseudo-rust)
938    /// match ::std::future::IntoFuture::into_future(<expr>) {
939    ///     mut __awaitee => loop {
940    ///         match unsafe { ::std::future::Future::poll(
941    ///             <::std::pin::Pin>::new_unchecked(&mut __awaitee),
942    ///             ::std::future::get_context(task_context),
943    ///         ) } {
944    ///             ::std::task::Poll::Ready(result) => break result,
945    ///             ::std::task::Poll::Pending => {}
946    ///         }
947    ///         task_context = yield ();
948    ///     }
949    /// }
950    /// ```
951    fn lower_expr_await(&mut self, await_kw_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
952        let expr = self.arena.alloc(self.lower_expr_mut(expr));
953        self.make_lowered_await(await_kw_span, expr, FutureKind::Future)
954    }
955
956    /// Takes an expr that has already been lowered and generates a desugared await loop around it
957    fn make_lowered_await(
958        &mut self,
959        await_kw_span: Span,
960        expr: &'hir hir::Expr<'hir>,
961        await_kind: FutureKind,
962    ) -> hir::ExprKind<'hir> {
963        let full_span = expr.span.to(await_kw_span);
964
965        let is_async_gen = match self.coroutine_kind {
966            Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)) => false,
967            Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)) => true,
968            Some(hir::CoroutineKind::Coroutine(_))
969            | Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _))
970            | None => {
971                // Lower to a block `{ EXPR; <error> }` so that the awaited expr
972                // is not accidentally orphaned.
973                let stmt_id = self.next_id();
974                let expr_err = self.expr(
975                    expr.span,
976                    hir::ExprKind::Err(self.dcx().emit_err(AwaitOnlyInAsyncFnAndBlocks {
977                        await_kw_span,
978                        item_span: self.current_item,
979                    })),
980                );
981                return hir::ExprKind::Block(
982                    self.block_all(
983                        expr.span,
984                        self.arena.alloc_from_iter([hir::Stmt {
                hir_id: stmt_id,
                kind: hir::StmtKind::Semi(expr),
                span: expr.span,
            }])arena_vec![self; hir::Stmt {
985                            hir_id: stmt_id,
986                            kind: hir::StmtKind::Semi(expr),
987                            span: expr.span,
988                        }],
989                        Some(self.arena.alloc(expr_err)),
990                    ),
991                    None,
992                );
993            }
994        };
995
996        let features = match await_kind {
997            FutureKind::Future if is_async_gen => Some(Arc::clone(&self.allow_async_gen)),
998            FutureKind::Future => None,
999            FutureKind::AsyncIterator => Some(Arc::clone(&self.allow_for_await)),
1000        };
1001        let span = self.mark_span_with_reason(DesugaringKind::Await, await_kw_span, features);
1002        let gen_future_span = self.mark_span_with_reason(
1003            DesugaringKind::Await,
1004            full_span,
1005            Some(Arc::clone(&self.allow_gen_future)),
1006        );
1007        let expr_hir_id = expr.hir_id;
1008
1009        // Note that the name of this binding must not be changed to something else because
1010        // debuggers and debugger extensions expect it to be called `__awaitee`. They use
1011        // this name to identify what is being awaited by a suspended async functions.
1012        let awaitee_ident = Ident::with_dummy_span(sym::__awaitee);
1013        let (awaitee_pat, awaitee_pat_hid) =
1014            self.pat_ident_binding_mode(gen_future_span, awaitee_ident, hir::BindingMode::MUT);
1015
1016        let task_context_ident = Ident::with_dummy_span(sym::_task_context);
1017
1018        // unsafe {
1019        //     ::std::future::Future::poll(
1020        //         ::std::pin::Pin::new_unchecked(&mut __awaitee),
1021        //         ::std::future::get_context(task_context),
1022        //     )
1023        // }
1024        let poll_expr = {
1025            let awaitee = self.expr_ident(span, awaitee_ident, awaitee_pat_hid);
1026            let ref_mut_awaitee = self.expr_mut_addr_of(span, awaitee);
1027
1028            let Some(task_context_hid) = self.task_context else {
1029                {
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("use of `await` outside of an async context.")));
};unreachable!("use of `await` outside of an async context.");
1030            };
1031
1032            let task_context = self.expr_ident_mut(span, task_context_ident, task_context_hid);
1033
1034            let new_unchecked = self.expr_call_lang_item_fn_mut(
1035                span,
1036                hir::LangItem::PinNewUnchecked,
1037                self.arena.alloc_from_iter([ref_mut_awaitee])arena_vec![self; ref_mut_awaitee],
1038            );
1039            let get_context = self.expr_call_lang_item_fn_mut(
1040                gen_future_span,
1041                hir::LangItem::GetContext,
1042                self.arena.alloc_from_iter([task_context])arena_vec![self; task_context],
1043            );
1044            let call = match await_kind {
1045                FutureKind::Future => self.expr_call_lang_item_fn(
1046                    span,
1047                    hir::LangItem::FuturePoll,
1048                    self.arena.alloc_from_iter([new_unchecked, get_context])arena_vec![self; new_unchecked, get_context],
1049                ),
1050                FutureKind::AsyncIterator => self.expr_call_lang_item_fn(
1051                    span,
1052                    hir::LangItem::AsyncIteratorPollNext,
1053                    self.arena.alloc_from_iter([new_unchecked, get_context])arena_vec![self; new_unchecked, get_context],
1054                ),
1055            };
1056            self.arena.alloc(self.expr_unsafe(span, call))
1057        };
1058
1059        // `::std::task::Poll::Ready(result) => break result`
1060        let loop_node_id = self.next_node_id();
1061        let loop_hir_id = self.lower_node_id(loop_node_id);
1062        let ready_arm = {
1063            let x_ident = Ident::with_dummy_span(sym::result);
1064            let (x_pat, x_pat_hid) = self.pat_ident(gen_future_span, x_ident);
1065            let x_expr = self.expr_ident(gen_future_span, x_ident, x_pat_hid);
1066            let ready_field = self.single_pat_field(gen_future_span, x_pat);
1067            let ready_pat = self.pat_lang_item_variant(span, hir::LangItem::PollReady, ready_field);
1068            let break_x = self.with_loop_scope(loop_hir_id, move |this| {
1069                let expr_break =
1070                    hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
1071                this.arena.alloc(this.expr(gen_future_span, expr_break))
1072            });
1073            self.arm(ready_pat, break_x, span)
1074        };
1075
1076        // `::std::task::Poll::Pending => {}`
1077        let pending_arm = {
1078            let pending_pat = self.pat_lang_item_variant(span, hir::LangItem::PollPending, &[]);
1079            let empty_block = self.expr_block_empty(span);
1080            self.arm(pending_pat, empty_block, span)
1081        };
1082
1083        let inner_match_stmt = {
1084            let match_expr = self.expr_match(
1085                span,
1086                poll_expr,
1087                self.arena.alloc_from_iter([ready_arm, pending_arm])arena_vec![self; ready_arm, pending_arm],
1088                hir::MatchSource::AwaitDesugar,
1089            );
1090            self.stmt_expr(span, match_expr)
1091        };
1092
1093        // Depending on `async` of `async gen`:
1094        // async     - task_context = yield ();
1095        // async gen - task_context = yield ASYNC_GEN_PENDING;
1096        let yield_stmt = {
1097            let yielded = if is_async_gen {
1098                self.arena.alloc(self.expr_lang_item_path(span, hir::LangItem::AsyncGenPending))
1099            } else {
1100                self.expr_unit(span)
1101            };
1102
1103            let yield_expr = self.expr(
1104                span,
1105                hir::ExprKind::Yield(yielded, hir::YieldSource::Await { expr: Some(expr_hir_id) }),
1106            );
1107            let yield_expr = self.arena.alloc(yield_expr);
1108
1109            let Some(task_context_hid) = self.task_context else {
1110                {
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("use of `await` outside of an async context.")));
};unreachable!("use of `await` outside of an async context.");
1111            };
1112
1113            let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
1114            let assign =
1115                self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span)));
1116            self.stmt_expr(span, assign)
1117        };
1118
1119        let loop_block = self.block_all(span, self.arena.alloc_from_iter([inner_match_stmt, yield_stmt])arena_vec![self; inner_match_stmt, yield_stmt], None);
1120
1121        // loop { .. }
1122        let loop_expr = self.arena.alloc(hir::Expr {
1123            hir_id: loop_hir_id,
1124            kind: hir::ExprKind::Loop(
1125                loop_block,
1126                None,
1127                hir::LoopSource::Loop,
1128                self.lower_span(span),
1129            ),
1130            span: self.lower_span(span),
1131        });
1132
1133        // mut __awaitee => loop { ... }
1134        let awaitee_arm = self.arm(awaitee_pat, loop_expr, span);
1135
1136        // `match ::std::future::IntoFuture::into_future(<expr>) { ... }`
1137        let into_future_expr = match await_kind {
1138            FutureKind::Future => self.expr_call_lang_item_fn(
1139                span,
1140                hir::LangItem::IntoFutureIntoFuture,
1141                self.arena.alloc_from_iter([*expr])arena_vec![self; *expr],
1142            ),
1143            // Not needed for `for await` because we expect to have already called
1144            // `IntoAsyncIterator::into_async_iter` on it.
1145            FutureKind::AsyncIterator => expr,
1146        };
1147
1148        // match <into_future_expr> {
1149        //     mut __awaitee => loop { .. }
1150        // }
1151        hir::ExprKind::Match(
1152            into_future_expr,
1153            self.arena.alloc_from_iter([awaitee_arm])arena_vec![self; awaitee_arm],
1154            hir::MatchSource::AwaitDesugar,
1155        )
1156    }
1157
1158    fn lower_expr_use(&mut self, use_kw_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
1159        hir::ExprKind::Use(self.lower_expr(expr), self.lower_span(use_kw_span))
1160    }
1161
1162    /// Destructure the LHS of complex assignments.
1163    /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
1164    fn lower_expr_assign(
1165        &mut self,
1166        lhs: &Expr,
1167        rhs: &Expr,
1168        eq_sign_span: Span,
1169        whole_span: Span,
1170    ) -> hir::ExprKind<'hir> {
1171        // Return early in case of an ordinary assignment.
1172        fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
1173            match &lhs.kind {
1174                ExprKind::Array(..)
1175                | ExprKind::Struct(..)
1176                | ExprKind::Tup(..)
1177                | ExprKind::Underscore => false,
1178                // Check for unit struct constructor.
1179                ExprKind::Path(..) => lower_ctx.extract_unit_struct_path(lhs).is_none(),
1180                // Check for tuple struct constructor.
1181                ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
1182                ExprKind::Paren(e) => {
1183                    match e.kind {
1184                        // We special-case `(..)` for consistency with patterns.
1185                        ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
1186                        _ => is_ordinary(lower_ctx, e),
1187                    }
1188                }
1189                _ => true,
1190            }
1191        }
1192        if is_ordinary(self, lhs) {
1193            return hir::ExprKind::Assign(
1194                self.lower_expr(lhs),
1195                self.lower_expr(rhs),
1196                self.lower_span(eq_sign_span),
1197            );
1198        }
1199
1200        let mut assignments = ::alloc::vec::Vec::new()vec![];
1201
1202        // The LHS becomes a pattern: `(lhs1, lhs2)`.
1203        let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
1204        let rhs = self.lower_expr(rhs);
1205
1206        // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
1207        let destructure_let =
1208            self.stmt_let_pat(None, whole_span, Some(rhs), pat, hir::LocalSource::AssignDesugar);
1209
1210        // `a = lhs1; b = lhs2;`.
1211        let stmts = self.arena.alloc_from_iter(std::iter::once(destructure_let).chain(assignments));
1212
1213        // Wrap everything in a block.
1214        hir::ExprKind::Block(self.block_all(whole_span, stmts, None), None)
1215    }
1216
1217    /// If the given expression is a path to a tuple struct, returns that path.
1218    /// It is not a complete check, but just tries to reject most paths early
1219    /// if they are not tuple structs.
1220    /// Type checking will take care of the full validation later.
1221    fn extract_tuple_struct_path<'a>(
1222        &mut self,
1223        expr: &'a Expr,
1224    ) -> Option<(&'a Option<Box<QSelf>>, &'a Path)> {
1225        if let ExprKind::Path(qself, path) = &expr.kind {
1226            // Does the path resolve to something disallowed in a tuple struct/variant pattern?
1227            if let Some(partial_res) = self.get_partial_res(expr.id) {
1228                if let Some(res) = partial_res.full_res()
1229                    && !res.expected_in_tuple_struct_pat()
1230                {
1231                    return None;
1232                }
1233            }
1234            return Some((qself, path));
1235        }
1236        None
1237    }
1238
1239    /// If the given expression is a path to a unit struct, returns that path.
1240    /// It is not a complete check, but just tries to reject most paths early
1241    /// if they are not unit structs.
1242    /// Type checking will take care of the full validation later.
1243    fn extract_unit_struct_path<'a>(
1244        &mut self,
1245        expr: &'a Expr,
1246    ) -> Option<(&'a Option<Box<QSelf>>, &'a Path)> {
1247        if let ExprKind::Path(qself, path) = &expr.kind {
1248            // Does the path resolve to something disallowed in a unit struct/variant pattern?
1249            if let Some(partial_res) = self.get_partial_res(expr.id) {
1250                if let Some(res) = partial_res.full_res()
1251                    && !res.expected_in_unit_struct_pat()
1252                {
1253                    return None;
1254                }
1255            }
1256            return Some((qself, path));
1257        }
1258        None
1259    }
1260
1261    /// Convert the LHS of a destructuring assignment to a pattern.
1262    /// Each sub-assignment is recorded in `assignments`.
1263    fn destructure_assign(
1264        &mut self,
1265        lhs: &Expr,
1266        eq_sign_span: Span,
1267        assignments: &mut Vec<hir::Stmt<'hir>>,
1268    ) -> &'hir hir::Pat<'hir> {
1269        self.arena.alloc(self.destructure_assign_mut(lhs, eq_sign_span, assignments))
1270    }
1271
1272    fn destructure_assign_mut(
1273        &mut self,
1274        lhs: &Expr,
1275        eq_sign_span: Span,
1276        assignments: &mut Vec<hir::Stmt<'hir>>,
1277    ) -> hir::Pat<'hir> {
1278        match &lhs.kind {
1279            // Underscore pattern.
1280            ExprKind::Underscore => {
1281                return self.pat_without_dbm(lhs.span, hir::PatKind::Wild);
1282            }
1283            // Slice patterns.
1284            ExprKind::Array(elements) => {
1285                let (pats, rest) =
1286                    self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
1287                let slice_pat = if let Some((i, span)) = rest {
1288                    let (before, after) = pats.split_at(i);
1289                    hir::PatKind::Slice(
1290                        before,
1291                        Some(self.arena.alloc(self.pat_without_dbm(span, hir::PatKind::Wild))),
1292                        after,
1293                    )
1294                } else {
1295                    hir::PatKind::Slice(pats, None, &[])
1296                };
1297                return self.pat_without_dbm(lhs.span, slice_pat);
1298            }
1299            // Tuple structs.
1300            ExprKind::Call(callee, args) => {
1301                if let Some((qself, path)) = self.extract_tuple_struct_path(callee) {
1302                    let (pats, rest) = self.destructure_sequence(
1303                        args,
1304                        "tuple struct or variant",
1305                        eq_sign_span,
1306                        assignments,
1307                    );
1308                    let qpath = self.lower_qpath(
1309                        callee.id,
1310                        qself,
1311                        path,
1312                        ParamMode::Optional,
1313                        AllowReturnTypeNotation::No,
1314                        ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1315                        None,
1316                    );
1317                    // Destructure like a tuple struct.
1318                    let tuple_struct_pat = hir::PatKind::TupleStruct(
1319                        qpath,
1320                        pats,
1321                        hir::DotDotPos::new(rest.map(|r| r.0)),
1322                    );
1323                    return self.pat_without_dbm(lhs.span, tuple_struct_pat);
1324                }
1325            }
1326            // Unit structs and enum variants.
1327            ExprKind::Path(..) => {
1328                if let Some((qself, path)) = self.extract_unit_struct_path(lhs) {
1329                    let qpath = self.lower_qpath(
1330                        lhs.id,
1331                        qself,
1332                        path,
1333                        ParamMode::Optional,
1334                        AllowReturnTypeNotation::No,
1335                        ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1336                        None,
1337                    );
1338                    // Destructure like a unit struct.
1339                    let unit_struct_pat = hir::PatKind::Expr(self.arena.alloc(hir::PatExpr {
1340                        kind: hir::PatExprKind::Path(qpath),
1341                        hir_id: self.next_id(),
1342                        span: self.lower_span(lhs.span),
1343                    }));
1344                    return self.pat_without_dbm(lhs.span, unit_struct_pat);
1345                }
1346            }
1347            // Structs.
1348            ExprKind::Struct(se) => {
1349                let field_pats = self.arena.alloc_from_iter(se.fields.iter().map(|f| {
1350                    let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
1351                    hir::PatField {
1352                        hir_id: self.next_id(),
1353                        ident: self.lower_ident(f.ident),
1354                        pat,
1355                        is_shorthand: f.is_shorthand,
1356                        span: self.lower_span(f.span),
1357                    }
1358                }));
1359                let qpath = self.lower_qpath(
1360                    lhs.id,
1361                    &se.qself,
1362                    &se.path,
1363                    ParamMode::Optional,
1364                    AllowReturnTypeNotation::No,
1365                    ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1366                    None,
1367                );
1368                let fields_omitted = match &se.rest {
1369                    StructRest::Base(e) => {
1370                        self.dcx().emit_err(FunctionalRecordUpdateDestructuringAssignment {
1371                            span: e.span,
1372                        });
1373                        Some(self.lower_span(e.span))
1374                    }
1375                    StructRest::Rest(span) => Some(self.lower_span(*span)),
1376                    StructRest::None | StructRest::NoneWithError(_) => None,
1377                };
1378                let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1379                return self.pat_without_dbm(lhs.span, struct_pat);
1380            }
1381            // Tuples.
1382            ExprKind::Tup(elements) => {
1383                let (pats, rest) =
1384                    self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1385                let tuple_pat = hir::PatKind::Tuple(pats, hir::DotDotPos::new(rest.map(|r| r.0)));
1386                return self.pat_without_dbm(lhs.span, tuple_pat);
1387            }
1388            ExprKind::Paren(e) => {
1389                // We special-case `(..)` for consistency with patterns.
1390                if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1391                    let tuple_pat = hir::PatKind::Tuple(&[], hir::DotDotPos::new(Some(0)));
1392                    return self.pat_without_dbm(lhs.span, tuple_pat);
1393                } else {
1394                    return self.destructure_assign_mut(e, eq_sign_span, assignments);
1395                }
1396            }
1397            _ => {}
1398        }
1399        // Treat all other cases as normal lvalue.
1400        let ident = Ident::new(sym::lhs, self.lower_span(lhs.span));
1401        let (pat, binding) = self.pat_ident_mut(lhs.span, ident);
1402        let ident = self.expr_ident(lhs.span, ident, binding);
1403        let assign =
1404            hir::ExprKind::Assign(self.lower_expr(lhs), ident, self.lower_span(eq_sign_span));
1405        let expr = self.expr(lhs.span, assign);
1406        assignments.push(self.stmt_expr(lhs.span, expr));
1407        pat
1408    }
1409
1410    /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1411    /// Such a sequence occurs in a tuple (struct)/slice.
1412    /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1413    /// exists.
1414    /// Each sub-assignment is recorded in `assignments`.
1415    fn destructure_sequence(
1416        &mut self,
1417        elements: &[Box<Expr>],
1418        ctx: &str,
1419        eq_sign_span: Span,
1420        assignments: &mut Vec<hir::Stmt<'hir>>,
1421    ) -> (&'hir [hir::Pat<'hir>], Option<(usize, Span)>) {
1422        let mut rest = None;
1423        let elements =
1424            self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1425                // Check for `..` pattern.
1426                if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1427                    if let Some((_, prev_span)) = rest {
1428                        self.ban_extra_rest_pat(e.span, prev_span, ctx);
1429                    } else {
1430                        rest = Some((i, e.span));
1431                    }
1432                    None
1433                } else {
1434                    Some(self.destructure_assign_mut(e, eq_sign_span, assignments))
1435                }
1436            }));
1437        (elements, rest)
1438    }
1439
1440    /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1441    fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1442        let e1 = self.lower_expr_mut(e1);
1443        let e2 = self.lower_expr_mut(e2);
1444        let fn_path = self.make_lang_item_qpath(hir::LangItem::RangeInclusiveNew, span, None);
1445        let fn_expr = self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path)));
1446        hir::ExprKind::Call(fn_expr, self.arena.alloc_from_iter([e1, e2])arena_vec![self; e1, e2])
1447    }
1448
1449    fn lower_expr_range(
1450        &mut self,
1451        span: Span,
1452        e1: Option<&Expr>,
1453        e2: Option<&Expr>,
1454        lims: RangeLimits,
1455    ) -> hir::ExprKind<'hir> {
1456        use rustc_ast::RangeLimits::*;
1457
1458        let lang_item = match (e1, e2, lims) {
1459            (None, None, HalfOpen) => hir::LangItem::RangeFull,
1460            (Some(..), None, HalfOpen) => {
1461                if self.tcx.features().new_range() {
1462                    hir::LangItem::RangeFromCopy
1463                } else {
1464                    hir::LangItem::RangeFrom
1465                }
1466            }
1467            (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1468            (Some(..), Some(..), HalfOpen) => {
1469                if self.tcx.features().new_range() {
1470                    hir::LangItem::RangeCopy
1471                } else {
1472                    hir::LangItem::Range
1473                }
1474            }
1475            (None, Some(..), Closed) => {
1476                if self.tcx.features().new_range() {
1477                    hir::LangItem::RangeToInclusiveCopy
1478                } else {
1479                    hir::LangItem::RangeToInclusive
1480                }
1481            }
1482            (Some(e1), Some(e2), Closed) => {
1483                if self.tcx.features().new_range() {
1484                    hir::LangItem::RangeInclusiveCopy
1485                } else {
1486                    return self.lower_expr_range_closed(span, e1, e2);
1487                }
1488            }
1489            (start, None, Closed) => {
1490                self.dcx().emit_err(InclusiveRangeWithNoEnd { span });
1491                match start {
1492                    Some(..) => {
1493                        if self.tcx.features().new_range() {
1494                            hir::LangItem::RangeFromCopy
1495                        } else {
1496                            hir::LangItem::RangeFrom
1497                        }
1498                    }
1499                    None => hir::LangItem::RangeFull,
1500                }
1501            }
1502        };
1503
1504        let fields = self.arena.alloc_from_iter(
1505            e1.iter()
1506                .map(|e| (sym::start, e))
1507                .chain(e2.iter().map(|e| {
1508                    (
1509                        if #[allow(non_exhaustive_omitted_patterns)] match lang_item {
    hir::LangItem::RangeInclusiveCopy | hir::LangItem::RangeToInclusiveCopy =>
        true,
    _ => false,
}matches!(
1510                            lang_item,
1511                            hir::LangItem::RangeInclusiveCopy | hir::LangItem::RangeToInclusiveCopy
1512                        ) {
1513                            sym::last
1514                        } else {
1515                            sym::end
1516                        },
1517                        e,
1518                    )
1519                }))
1520                .map(|(s, e)| {
1521                    let span = self.lower_span(e.span);
1522                    let span = self.mark_span_with_reason(DesugaringKind::RangeExpr, span, None);
1523                    let expr = self.lower_expr(e);
1524                    let ident = Ident::new(s, span);
1525                    self.expr_field(ident, expr, span)
1526                }),
1527        );
1528
1529        hir::ExprKind::Struct(
1530            self.arena.alloc(self.make_lang_item_qpath(lang_item, span, None)),
1531            fields,
1532            hir::StructTailExpr::None,
1533        )
1534    }
1535
1536    // Record labelled expr's HirId so that we can retrieve it in `lower_jump_destination` without
1537    // lowering node id again.
1538    fn lower_label(
1539        &mut self,
1540        opt_label: Option<Label>,
1541        dest_id: NodeId,
1542        dest_hir_id: hir::HirId,
1543    ) -> Option<Label> {
1544        let label = opt_label?;
1545        self.ident_and_label_to_local_id.insert(dest_id, dest_hir_id.local_id);
1546        Some(Label { ident: self.lower_ident(label.ident) })
1547    }
1548
1549    fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1550        let target_id = match destination {
1551            Some((id, _)) => {
1552                if let Some(loop_id) = self.resolver.get_label_res(id) {
1553                    let local_id = self.ident_and_label_to_local_id[&loop_id];
1554                    let loop_hir_id = HirId { owner: self.current_hir_id_owner, local_id };
1555                    Ok(loop_hir_id)
1556                } else {
1557                    Err(hir::LoopIdError::UnresolvedLabel)
1558                }
1559            }
1560            None => {
1561                self.loop_scope.map(|id| Ok(id)).unwrap_or(Err(hir::LoopIdError::OutsideLoopScope))
1562            }
1563        };
1564        let label = destination
1565            .map(|(_, label)| label)
1566            .map(|label| Label { ident: self.lower_ident(label.ident) });
1567        hir::Destination { label, target_id }
1568    }
1569
1570    fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1571        if self.is_in_loop_condition && opt_label.is_none() {
1572            hir::Destination {
1573                label: None,
1574                target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1575            }
1576        } else {
1577            self.lower_loop_destination(opt_label.map(|label| (id, label)))
1578        }
1579    }
1580
1581    fn with_try_block_scope<T>(
1582        &mut self,
1583        scope: TryBlockScope,
1584        f: impl FnOnce(&mut Self) -> T,
1585    ) -> T {
1586        let old_scope = mem::replace(&mut self.try_block_scope, scope);
1587        let result = f(self);
1588        self.try_block_scope = old_scope;
1589        result
1590    }
1591
1592    fn with_loop_scope<T>(&mut self, loop_id: hir::HirId, f: impl FnOnce(&mut Self) -> T) -> T {
1593        // We're no longer in the base loop's condition; we're in another loop.
1594        let was_in_loop_condition = self.is_in_loop_condition;
1595        self.is_in_loop_condition = false;
1596
1597        let old_scope = self.loop_scope.replace(loop_id);
1598        let result = f(self);
1599        self.loop_scope = old_scope;
1600
1601        self.is_in_loop_condition = was_in_loop_condition;
1602
1603        result
1604    }
1605
1606    fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1607        let was_in_loop_condition = self.is_in_loop_condition;
1608        self.is_in_loop_condition = true;
1609
1610        let result = f(self);
1611
1612        self.is_in_loop_condition = was_in_loop_condition;
1613
1614        result
1615    }
1616
1617    fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> {
1618        let hir_id = self.lower_node_id(f.id);
1619        self.lower_attrs(hir_id, &f.attrs, f.span, Target::ExprField);
1620        hir::ExprField {
1621            hir_id,
1622            ident: self.lower_ident(f.ident),
1623            expr: self.lower_expr(&f.expr),
1624            span: self.lower_span(f.span),
1625            is_shorthand: f.is_shorthand,
1626        }
1627    }
1628
1629    fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1630        let yielded =
1631            opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1632
1633        if !self.tcx.features().yield_expr()
1634            && !self.tcx.features().coroutines()
1635            && !self.tcx.features().gen_blocks()
1636        {
1637            rustc_session::errors::feature_err(
1638                &self.tcx.sess,
1639                sym::yield_expr,
1640                span,
1641                rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("yield syntax is experimental"))msg!("yield syntax is experimental"),
1642            )
1643            .emit();
1644        }
1645
1646        let is_async_gen = match self.coroutine_kind {
1647            Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _)) => false,
1648            Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)) => true,
1649            Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)) => {
1650                // Lower to a block `{ EXPR; <error> }` so that the awaited expr
1651                // is not accidentally orphaned.
1652                let stmt_id = self.next_id();
1653                let expr_err = self.expr(
1654                    yielded.span,
1655                    hir::ExprKind::Err(self.dcx().emit_err(AsyncCoroutinesNotSupported { span })),
1656                );
1657                return hir::ExprKind::Block(
1658                    self.block_all(
1659                        yielded.span,
1660                        self.arena.alloc_from_iter([hir::Stmt {
                hir_id: stmt_id,
                kind: hir::StmtKind::Semi(yielded),
                span: yielded.span,
            }])arena_vec![self; hir::Stmt {
1661                            hir_id: stmt_id,
1662                            kind: hir::StmtKind::Semi(yielded),
1663                            span: yielded.span,
1664                        }],
1665                        Some(self.arena.alloc(expr_err)),
1666                    ),
1667                    None,
1668                );
1669            }
1670            Some(hir::CoroutineKind::Coroutine(_)) => false,
1671            None => {
1672                let suggestion = self.current_item.map(|s| s.shrink_to_lo());
1673                self.dcx().emit_err(YieldInClosure { span, suggestion });
1674                self.coroutine_kind = Some(hir::CoroutineKind::Coroutine(Movability::Movable));
1675
1676                false
1677            }
1678        };
1679
1680        if is_async_gen {
1681            // `yield $expr` is transformed into `task_context = yield async_gen_ready($expr)`.
1682            // This ensures that we store our resumed `ResumeContext` correctly, and also that
1683            // the apparent value of the `yield` expression is `()`.
1684            let desugar_span = self.mark_span_with_reason(
1685                DesugaringKind::Async,
1686                span,
1687                Some(Arc::clone(&self.allow_async_gen)),
1688            );
1689            let wrapped_yielded = self.expr_call_lang_item_fn(
1690                desugar_span,
1691                hir::LangItem::AsyncGenReady,
1692                std::slice::from_ref(yielded),
1693            );
1694            let yield_expr = self.arena.alloc(
1695                self.expr(span, hir::ExprKind::Yield(wrapped_yielded, hir::YieldSource::Yield)),
1696            );
1697
1698            let Some(task_context_hid) = self.task_context else {
1699                {
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("use of `await` outside of an async context.")));
};unreachable!("use of `await` outside of an async context.");
1700            };
1701            let task_context_ident = Ident::with_dummy_span(sym::_task_context);
1702            let lhs = self.expr_ident(desugar_span, task_context_ident, task_context_hid);
1703
1704            hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span))
1705        } else {
1706            hir::ExprKind::Yield(yielded, hir::YieldSource::Yield)
1707        }
1708    }
1709
1710    /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1711    /// ```ignore (pseudo-rust)
1712    /// {
1713    ///     let result = match IntoIterator::into_iter(<head>) {
1714    ///         mut iter => {
1715    ///             [opt_ident]: loop {
1716    ///                 match Iterator::next(&mut iter) {
1717    ///                     None => break,
1718    ///                     Some(<pat>) => <body>,
1719    ///                 };
1720    ///             }
1721    ///         }
1722    ///     };
1723    ///     result
1724    /// }
1725    /// ```
1726    fn lower_expr_for(
1727        &mut self,
1728        e: &Expr,
1729        pat: &Pat,
1730        head: &Expr,
1731        body: &Block,
1732        opt_label: Option<Label>,
1733        loop_kind: ForLoopKind,
1734    ) -> hir::Expr<'hir> {
1735        let head = self.lower_expr_mut(head);
1736        let pat = self.lower_pat(pat);
1737        let for_span =
1738            self.mark_span_with_reason(DesugaringKind::ForLoop, self.lower_span(e.span), None);
1739        let for_ctxt = for_span.ctxt();
1740
1741        // Try to point both the head and pat spans to their position in the for loop
1742        // rather than inside a macro.
1743        let head_span =
1744            head.span.find_ancestor_in_same_ctxt(e.span).unwrap_or(head.span).with_ctxt(for_ctxt);
1745        let pat_span =
1746            pat.span.find_ancestor_in_same_ctxt(e.span).unwrap_or(pat.span).with_ctxt(for_ctxt);
1747
1748        let loop_hir_id = self.lower_node_id(e.id);
1749        let label = self.lower_label(opt_label, e.id, loop_hir_id);
1750
1751        // `None => break`
1752        let none_arm = {
1753            let break_expr =
1754                self.with_loop_scope(loop_hir_id, |this| this.expr_break_alloc(for_span));
1755            let pat = self.pat_none(for_span);
1756            self.arm(pat, break_expr, for_span)
1757        };
1758
1759        // Some(<pat>) => <body>,
1760        let some_arm = {
1761            let some_pat = self.pat_some(pat_span, pat);
1762            let body_block =
1763                self.with_loop_scope(loop_hir_id, |this| this.lower_block(body, false));
1764            let body_expr = self.arena.alloc(self.expr_block(body_block));
1765            self.arm(some_pat, body_expr, for_span)
1766        };
1767
1768        // `mut iter`
1769        let iter = Ident::with_dummy_span(sym::iter);
1770        let (iter_pat, iter_pat_nid) =
1771            self.pat_ident_binding_mode(head_span, iter, hir::BindingMode::MUT);
1772
1773        let match_expr = {
1774            let iter = self.expr_ident(head_span, iter, iter_pat_nid);
1775            let next_expr = match loop_kind {
1776                ForLoopKind::For => {
1777                    // `Iterator::next(&mut iter)`
1778                    let ref_mut_iter = self.expr_mut_addr_of(head_span, iter);
1779                    self.expr_call_lang_item_fn(
1780                        head_span,
1781                        hir::LangItem::IteratorNext,
1782                        self.arena.alloc_from_iter([ref_mut_iter])arena_vec![self; ref_mut_iter],
1783                    )
1784                }
1785                ForLoopKind::ForAwait => {
1786                    // we'll generate `unsafe { Pin::new_unchecked(&mut iter) })` and then pass this
1787                    // to make_lowered_await with `FutureKind::AsyncIterator` which will generator
1788                    // calls to `poll_next`. In user code, this would probably be a call to
1789                    // `Pin::as_mut` but here it's easy enough to do `new_unchecked`.
1790
1791                    // `&mut iter`
1792                    let iter = self.expr_mut_addr_of(head_span, iter);
1793                    // `Pin::new_unchecked(...)`
1794                    let iter = self.arena.alloc(self.expr_call_lang_item_fn_mut(
1795                        head_span,
1796                        hir::LangItem::PinNewUnchecked,
1797                        self.arena.alloc_from_iter([iter])arena_vec![self; iter],
1798                    ));
1799                    // `unsafe { ... }`
1800                    let iter = self.arena.alloc(self.expr_unsafe(head_span, iter));
1801                    let kind = self.make_lowered_await(head_span, iter, FutureKind::AsyncIterator);
1802                    self.arena.alloc(hir::Expr { hir_id: self.next_id(), kind, span: head_span })
1803                }
1804            };
1805            let arms = self.arena.alloc_from_iter([none_arm, some_arm])arena_vec![self; none_arm, some_arm];
1806
1807            // `match $next_expr { ... }`
1808            self.expr_match(head_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1809        };
1810        let match_stmt = self.stmt_expr(for_span, match_expr);
1811
1812        let loop_block = self.block_all(for_span, self.arena.alloc_from_iter([match_stmt])arena_vec![self; match_stmt], None);
1813
1814        // `[opt_ident]: loop { ... }`
1815        let kind = hir::ExprKind::Loop(
1816            loop_block,
1817            label,
1818            hir::LoopSource::ForLoop,
1819            self.lower_span(for_span.with_hi(head.span.hi())),
1820        );
1821        let loop_expr = self.arena.alloc(hir::Expr { hir_id: loop_hir_id, kind, span: for_span });
1822
1823        // `mut iter => { ... }`
1824        let iter_arm = self.arm(iter_pat, loop_expr, for_span);
1825
1826        let match_expr = match loop_kind {
1827            ForLoopKind::For => {
1828                // `::std::iter::IntoIterator::into_iter(<head>)`
1829                let into_iter_expr = self.expr_call_lang_item_fn(
1830                    head_span,
1831                    hir::LangItem::IntoIterIntoIter,
1832                    self.arena.alloc_from_iter([head])arena_vec![self; head],
1833                );
1834
1835                self.arena.alloc(self.expr_match(
1836                    for_span,
1837                    into_iter_expr,
1838                    self.arena.alloc_from_iter([iter_arm])arena_vec![self; iter_arm],
1839                    hir::MatchSource::ForLoopDesugar,
1840                ))
1841            }
1842            // `match into_async_iter(<head>) { ref mut iter => match unsafe { Pin::new_unchecked(iter) } { ... } }`
1843            ForLoopKind::ForAwait => {
1844                let iter_ident = iter;
1845                let (async_iter_pat, async_iter_pat_id) =
1846                    self.pat_ident_binding_mode(head_span, iter_ident, hir::BindingMode::REF_MUT);
1847                let iter = self.expr_ident_mut(head_span, iter_ident, async_iter_pat_id);
1848                // `Pin::new_unchecked(...)`
1849                let iter = self.arena.alloc(self.expr_call_lang_item_fn_mut(
1850                    head_span,
1851                    hir::LangItem::PinNewUnchecked,
1852                    self.arena.alloc_from_iter([iter])arena_vec![self; iter],
1853                ));
1854                // `unsafe { ... }`
1855                let iter = self.arena.alloc(self.expr_unsafe(head_span, iter));
1856                let inner_match_expr = self.arena.alloc(self.expr_match(
1857                    for_span,
1858                    iter,
1859                    self.arena.alloc_from_iter([iter_arm])arena_vec![self; iter_arm],
1860                    hir::MatchSource::ForLoopDesugar,
1861                ));
1862
1863                // `::core::async_iter::IntoAsyncIterator::into_async_iter(<head>)`
1864                let iter = self.expr_call_lang_item_fn(
1865                    head_span,
1866                    hir::LangItem::IntoAsyncIterIntoIter,
1867                    self.arena.alloc_from_iter([head])arena_vec![self; head],
1868                );
1869                let iter_arm = self.arm(async_iter_pat, inner_match_expr, for_span);
1870                self.arena.alloc(self.expr_match(
1871                    for_span,
1872                    iter,
1873                    self.arena.alloc_from_iter([iter_arm])arena_vec![self; iter_arm],
1874                    hir::MatchSource::ForLoopDesugar,
1875                ))
1876            }
1877        };
1878
1879        // This is effectively `{ let _result = ...; _result }`.
1880        // The construct was introduced in #21984 and is necessary to make sure that
1881        // temporaries in the `head` expression are dropped and do not leak to the
1882        // surrounding scope of the `match` since the `match` is not a terminating scope.
1883        //
1884        // Also, add the attributes to the outer returned expr node.
1885        let expr = self.expr_drop_temps_mut(for_span, match_expr);
1886        self.lower_attrs(expr.hir_id, &e.attrs, e.span, Target::from_expr(e));
1887        expr
1888    }
1889
1890    /// Desugar `ExprKind::Try` from: `<expr>?` into:
1891    /// ```ignore (pseudo-rust)
1892    /// match Try::branch(<expr>) {
1893    ///     ControlFlow::Continue(val) => #[allow(unreachable_code)] val,,
1894    ///     ControlFlow::Break(residual) =>
1895    ///         #[allow(unreachable_code)]
1896    ///         // If there is an enclosing `try {...}`:
1897    ///         break 'catch_target Residual::into_try_type(residual),
1898    ///         // Otherwise:
1899    ///         return Try::from_residual(residual),
1900    /// }
1901    /// ```
1902    fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1903        let unstable_span = self.mark_span_with_reason(
1904            DesugaringKind::QuestionMark,
1905            span,
1906            Some(Arc::clone(&self.allow_try_trait)),
1907        );
1908        let try_span = self.tcx.sess.source_map().end_point(span);
1909        let try_span = self.mark_span_with_reason(
1910            DesugaringKind::QuestionMark,
1911            try_span,
1912            Some(Arc::clone(&self.allow_try_trait)),
1913        );
1914
1915        // `Try::branch(<expr>)`
1916        let scrutinee = {
1917            // expand <expr>
1918            let sub_expr = self.lower_expr_mut(sub_expr);
1919
1920            self.expr_call_lang_item_fn(
1921                unstable_span,
1922                hir::LangItem::TryTraitBranch,
1923                self.arena.alloc_from_iter([sub_expr])arena_vec![self; sub_expr],
1924            )
1925        };
1926
1927        let attrs: AttrVec = {
    let len = [()].len();
    let mut vec = ::thin_vec::ThinVec::with_capacity(len);
    vec.push(self.unreachable_code_attr(try_span));
    vec
}thin_vec![self.unreachable_code_attr(try_span)];
1928
1929        // `ControlFlow::Continue(val) => #[allow(unreachable_code)] val,`
1930        let continue_arm = {
1931            let val_ident = Ident::with_dummy_span(sym::val);
1932            let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1933            let val_expr = self.expr_ident(span, val_ident, val_pat_nid);
1934            self.lower_attrs(val_expr.hir_id, &attrs, span, Target::Expression);
1935            let continue_pat = self.pat_cf_continue(unstable_span, val_pat);
1936            self.arm(continue_pat, val_expr, try_span)
1937        };
1938
1939        // `ControlFlow::Break(residual) =>
1940        //     #[allow(unreachable_code)]
1941        //     return Try::from_residual(residual),`
1942        let break_arm = {
1943            let residual_ident = Ident::with_dummy_span(sym::residual);
1944            let (residual_local, residual_local_nid) = self.pat_ident(try_span, residual_ident);
1945            let residual_expr = self.expr_ident_mut(try_span, residual_ident, residual_local_nid);
1946
1947            let (constructor_item, target_id) = match self.try_block_scope {
1948                TryBlockScope::Function => {
1949                    (hir::LangItem::TryTraitFromResidual, Err(hir::LoopIdError::OutsideLoopScope))
1950                }
1951                TryBlockScope::Homogeneous(block_id) => {
1952                    (hir::LangItem::ResidualIntoTryType, Ok(block_id))
1953                }
1954                TryBlockScope::Heterogeneous(block_id) => {
1955                    (hir::LangItem::TryTraitFromResidual, Ok(block_id))
1956                }
1957            };
1958            let from_residual_expr = self.wrap_in_try_constructor(
1959                constructor_item,
1960                try_span,
1961                self.arena.alloc(residual_expr),
1962                unstable_span,
1963            );
1964            let ret_expr = if target_id.is_ok() {
1965                self.arena.alloc(self.expr(
1966                    try_span,
1967                    hir::ExprKind::Break(
1968                        hir::Destination { label: None, target_id },
1969                        Some(from_residual_expr),
1970                    ),
1971                ))
1972            } else {
1973                let ret_expr = self.checked_return(Some(from_residual_expr));
1974                self.arena.alloc(self.expr(try_span, ret_expr))
1975            };
1976            self.lower_attrs(ret_expr.hir_id, &attrs, span, Target::Expression);
1977
1978            let break_pat = self.pat_cf_break(try_span, residual_local);
1979            self.arm(break_pat, ret_expr, try_span)
1980        };
1981
1982        hir::ExprKind::Match(
1983            scrutinee,
1984            self.arena.alloc_from_iter([break_arm, continue_arm])arena_vec![self; break_arm, continue_arm],
1985            hir::MatchSource::TryDesugar(scrutinee.hir_id),
1986        )
1987    }
1988
1989    /// Desugar `ExprKind::Yeet` from: `do yeet <expr>` into:
1990    /// ```ignore(illustrative)
1991    /// // If there is an enclosing `try {...}`:
1992    /// break 'catch_target FromResidual::from_residual(Yeet(residual));
1993    /// // Otherwise:
1994    /// return FromResidual::from_residual(Yeet(residual));
1995    /// ```
1996    /// But to simplify this, there's a `from_yeet` lang item function which
1997    /// handles the combined `FromResidual::from_residual(Yeet(residual))`.
1998    fn lower_expr_yeet(&mut self, span: Span, sub_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1999        // The expression (if present) or `()` otherwise.
2000        let (yeeted_span, yeeted_expr) = if let Some(sub_expr) = sub_expr {
2001            (sub_expr.span, self.lower_expr(sub_expr))
2002        } else {
2003            (self.mark_span_with_reason(DesugaringKind::YeetExpr, span, None), self.expr_unit(span))
2004        };
2005
2006        let unstable_span = self.mark_span_with_reason(
2007            DesugaringKind::YeetExpr,
2008            span,
2009            Some(Arc::clone(&self.allow_try_trait)),
2010        );
2011
2012        let from_yeet_expr = self.wrap_in_try_constructor(
2013            hir::LangItem::TryTraitFromYeet,
2014            unstable_span,
2015            yeeted_expr,
2016            yeeted_span,
2017        );
2018
2019        match self.try_block_scope {
2020            TryBlockScope::Homogeneous(block_id) | TryBlockScope::Heterogeneous(block_id) => {
2021                hir::ExprKind::Break(
2022                    hir::Destination { label: None, target_id: Ok(block_id) },
2023                    Some(from_yeet_expr),
2024                )
2025            }
2026            TryBlockScope::Function => self.checked_return(Some(from_yeet_expr)),
2027        }
2028    }
2029
2030    // =========================================================================
2031    // Helper methods for building HIR.
2032    // =========================================================================
2033
2034    /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
2035    ///
2036    /// In terms of drop order, it has the same effect as wrapping `expr` in
2037    /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
2038    ///
2039    /// The drop order can be important, e.g. to drop temporaries from an `async fn`
2040    /// body before its parameters.
2041    pub(super) fn expr_drop_temps(
2042        &mut self,
2043        span: Span,
2044        expr: &'hir hir::Expr<'hir>,
2045    ) -> &'hir hir::Expr<'hir> {
2046        self.arena.alloc(self.expr_drop_temps_mut(span, expr))
2047    }
2048
2049    pub(super) fn expr_drop_temps_mut(
2050        &mut self,
2051        span: Span,
2052        expr: &'hir hir::Expr<'hir>,
2053    ) -> hir::Expr<'hir> {
2054        self.expr(span, hir::ExprKind::DropTemps(expr))
2055    }
2056
2057    pub(super) fn expr_match(
2058        &mut self,
2059        span: Span,
2060        arg: &'hir hir::Expr<'hir>,
2061        arms: &'hir [hir::Arm<'hir>],
2062        source: hir::MatchSource,
2063    ) -> hir::Expr<'hir> {
2064        self.expr(span, hir::ExprKind::Match(arg, arms, source))
2065    }
2066
2067    fn expr_break(&mut self, span: Span) -> hir::Expr<'hir> {
2068        let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
2069        self.expr(span, expr_break)
2070    }
2071
2072    fn expr_break_alloc(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
2073        let expr_break = self.expr_break(span);
2074        self.arena.alloc(expr_break)
2075    }
2076
2077    fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
2078        self.expr(span, hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e))
2079    }
2080
2081    pub(super) fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
2082        self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[])))
2083    }
2084
2085    pub(super) fn expr_str(&mut self, sp: Span, value: Symbol) -> hir::Expr<'hir> {
2086        let lit = hir::Lit {
2087            span: self.lower_span(sp),
2088            node: ast::LitKind::Str(value, ast::StrStyle::Cooked),
2089        };
2090        self.expr(sp, hir::ExprKind::Lit(lit))
2091    }
2092
2093    pub(super) fn expr_byte_str(&mut self, sp: Span, value: ByteSymbol) -> hir::Expr<'hir> {
2094        let lit = hir::Lit {
2095            span: self.lower_span(sp),
2096            node: ast::LitKind::ByteStr(value, ast::StrStyle::Cooked),
2097        };
2098        self.expr(sp, hir::ExprKind::Lit(lit))
2099    }
2100
2101    pub(super) fn expr_call_mut(
2102        &mut self,
2103        span: Span,
2104        e: &'hir hir::Expr<'hir>,
2105        args: &'hir [hir::Expr<'hir>],
2106    ) -> hir::Expr<'hir> {
2107        self.expr(span, hir::ExprKind::Call(e, args))
2108    }
2109
2110    pub(super) fn expr_struct(
2111        &mut self,
2112        span: Span,
2113        path: &'hir hir::QPath<'hir>,
2114        fields: &'hir [hir::ExprField<'hir>],
2115    ) -> hir::Expr<'hir> {
2116        self.expr(span, hir::ExprKind::Struct(path, fields, rustc_hir::StructTailExpr::None))
2117    }
2118
2119    pub(super) fn expr_enum_variant(
2120        &mut self,
2121        span: Span,
2122        path: &'hir hir::QPath<'hir>,
2123        fields: &'hir [hir::Expr<'hir>],
2124    ) -> hir::Expr<'hir> {
2125        let fields = self.arena.alloc_from_iter(fields.into_iter().enumerate().map(|(i, f)| {
2126            hir::ExprField {
2127                hir_id: self.next_id(),
2128                ident: Ident::from_str(&i.to_string()),
2129                expr: f,
2130                span: f.span,
2131                is_shorthand: false,
2132            }
2133        }));
2134        self.expr_struct(span, path, fields)
2135    }
2136
2137    pub(super) fn expr_enum_variant_lang_item(
2138        &mut self,
2139        span: Span,
2140        lang_item: hir::LangItem,
2141        fields: &'hir [hir::Expr<'hir>],
2142    ) -> hir::Expr<'hir> {
2143        let path = self.arena.alloc(self.make_lang_item_qpath(lang_item, span, None));
2144        self.expr_enum_variant(span, path, fields)
2145    }
2146
2147    pub(super) fn expr_call(
2148        &mut self,
2149        span: Span,
2150        e: &'hir hir::Expr<'hir>,
2151        args: &'hir [hir::Expr<'hir>],
2152    ) -> &'hir hir::Expr<'hir> {
2153        self.arena.alloc(self.expr_call_mut(span, e, args))
2154    }
2155
2156    pub(super) fn expr_call_lang_item_fn_mut(
2157        &mut self,
2158        span: Span,
2159        lang_item: hir::LangItem,
2160        args: &'hir [hir::Expr<'hir>],
2161    ) -> hir::Expr<'hir> {
2162        let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item));
2163        self.expr_call_mut(span, path, args)
2164    }
2165
2166    pub(super) fn expr_call_lang_item_fn(
2167        &mut self,
2168        span: Span,
2169        lang_item: hir::LangItem,
2170        args: &'hir [hir::Expr<'hir>],
2171    ) -> &'hir hir::Expr<'hir> {
2172        self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args))
2173    }
2174
2175    pub(super) fn expr_lang_item_path(
2176        &mut self,
2177        span: Span,
2178        lang_item: hir::LangItem,
2179    ) -> hir::Expr<'hir> {
2180        let qpath = self.make_lang_item_qpath(lang_item, self.lower_span(span), None);
2181        self.expr(span, hir::ExprKind::Path(qpath))
2182    }
2183
2184    /// `<LangItem>::name`
2185    pub(super) fn expr_lang_item_type_relative(
2186        &mut self,
2187        span: Span,
2188        lang_item: hir::LangItem,
2189        name: Symbol,
2190    ) -> hir::Expr<'hir> {
2191        let qpath = self.make_lang_item_qpath(lang_item, self.lower_span(span), None);
2192        let path = hir::ExprKind::Path(hir::QPath::TypeRelative(
2193            self.arena.alloc(self.ty(span, hir::TyKind::Path(qpath))),
2194            self.arena.alloc(hir::PathSegment::new(
2195                Ident::new(name, self.lower_span(span)),
2196                self.next_id(),
2197                Res::Err,
2198            )),
2199        ));
2200        self.expr(span, path)
2201    }
2202
2203    pub(super) fn expr_ident(
2204        &mut self,
2205        sp: Span,
2206        ident: Ident,
2207        binding: HirId,
2208    ) -> &'hir hir::Expr<'hir> {
2209        self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
2210    }
2211
2212    pub(super) fn expr_ident_mut(
2213        &mut self,
2214        span: Span,
2215        ident: Ident,
2216        binding: HirId,
2217    ) -> hir::Expr<'hir> {
2218        let hir_id = self.next_id();
2219        let res = Res::Local(binding);
2220        let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
2221            None,
2222            self.arena.alloc(hir::Path {
2223                span: self.lower_span(span),
2224                res,
2225                segments: self.arena.alloc_from_iter([hir::PathSegment::new(self.lower_ident(ident),
                hir_id, res)])arena_vec![self; hir::PathSegment::new(self.lower_ident(ident), hir_id, res)],
2226            }),
2227        ));
2228
2229        self.expr(span, expr_path)
2230    }
2231
2232    pub(super) fn expr_unsafe(
2233        &mut self,
2234        span: Span,
2235        expr: &'hir hir::Expr<'hir>,
2236    ) -> hir::Expr<'hir> {
2237        let hir_id = self.next_id();
2238        self.expr(
2239            span,
2240            hir::ExprKind::Block(
2241                self.arena.alloc(hir::Block {
2242                    stmts: &[],
2243                    expr: Some(expr),
2244                    hir_id,
2245                    rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
2246                    span: self.lower_span(span),
2247                    targeted_by_break: false,
2248                }),
2249                None,
2250            ),
2251        )
2252    }
2253
2254    fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
2255        let blk = self.block_all(span, &[], None);
2256        let expr = self.expr_block(blk);
2257        self.arena.alloc(expr)
2258    }
2259
2260    pub(super) fn expr_block(&mut self, b: &'hir hir::Block<'hir>) -> hir::Expr<'hir> {
2261        self.expr(b.span, hir::ExprKind::Block(b, None))
2262    }
2263
2264    /// Wrap an expression in a block, and wrap that block in an expression again.
2265    /// Useful for constructing if-expressions, which require expressions of
2266    /// kind block.
2267    pub(super) fn block_expr_block(
2268        &mut self,
2269        expr: &'hir hir::Expr<'hir>,
2270    ) -> &'hir hir::Expr<'hir> {
2271        let b = self.block_expr(expr);
2272        self.arena.alloc(self.expr_block(b))
2273    }
2274
2275    pub(super) fn expr_ref(&mut self, span: Span, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
2276        self.expr(span, hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, expr))
2277    }
2278
2279    pub(super) fn expr_bool_literal(&mut self, span: Span, val: bool) -> hir::Expr<'hir> {
2280        self.expr(span, hir::ExprKind::Lit(Spanned { node: LitKind::Bool(val), span }))
2281    }
2282
2283    pub(super) fn expr(&mut self, span: Span, kind: hir::ExprKind<'hir>) -> hir::Expr<'hir> {
2284        let hir_id = self.next_id();
2285        hir::Expr { hir_id, kind, span: self.lower_span(span) }
2286    }
2287
2288    pub(super) fn expr_field(
2289        &mut self,
2290        ident: Ident,
2291        expr: &'hir hir::Expr<'hir>,
2292        span: Span,
2293    ) -> hir::ExprField<'hir> {
2294        hir::ExprField {
2295            hir_id: self.next_id(),
2296            ident,
2297            span: self.lower_span(span),
2298            expr,
2299            is_shorthand: false,
2300        }
2301    }
2302
2303    pub(super) fn arm(
2304        &mut self,
2305        pat: &'hir hir::Pat<'hir>,
2306        expr: &'hir hir::Expr<'hir>,
2307        span: Span,
2308    ) -> hir::Arm<'hir> {
2309        hir::Arm {
2310            hir_id: self.next_id(),
2311            pat,
2312            guard: None,
2313            span: self.lower_span(span),
2314            body: expr,
2315        }
2316    }
2317
2318    /// `#[allow(unreachable_code)]`
2319    pub(super) fn unreachable_code_attr(&mut self, span: Span) -> Attribute {
2320        let attr = attr::mk_attr_nested_word(
2321            &self.tcx.sess.psess.attr_id_generator,
2322            AttrStyle::Outer,
2323            Safety::Default,
2324            sym::allow,
2325            sym::unreachable_code,
2326            span,
2327        );
2328        attr
2329    }
2330}
2331
2332/// Used by [`LoweringContext::make_lowered_await`] to customize the desugaring based on what kind
2333/// of future we are awaiting.
2334#[derive(#[automatically_derived]
impl ::core::marker::Copy for FutureKind { }Copy, #[automatically_derived]
impl ::core::clone::Clone for FutureKind {
    #[inline]
    fn clone(&self) -> FutureKind { *self }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for FutureKind {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f,
            match self {
                FutureKind::Future => "Future",
                FutureKind::AsyncIterator => "AsyncIterator",
            })
    }
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for FutureKind {
    #[inline]
    fn eq(&self, other: &FutureKind) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for FutureKind {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_fields_are_eq(&self) {}
}Eq)]
2335enum FutureKind {
2336    /// We are awaiting a normal future
2337    Future,
2338    /// We are awaiting something that's known to be an AsyncIterator (i.e. we are in the header of
2339    /// a `for await` loop)
2340    AsyncIterator,
2341}