rustc_hir_analysis/check/
region.rs

1//! This file builds up the `ScopeTree`, which describes
2//! the parent links in the region hierarchy.
3//!
4//! For more information about how MIR-based region-checking works,
5//! see the [rustc dev guide].
6//!
7//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/borrow_check.html
8
9use std::mem;
10
11use rustc_data_structures::fx::FxHashSet;
12use rustc_hir as hir;
13use rustc_hir::def_id::DefId;
14use rustc_hir::intravisit::{self, Visitor};
15use rustc_hir::{Arm, Block, Expr, LetStmt, Pat, PatKind, Stmt};
16use rustc_index::Idx;
17use rustc_middle::bug;
18use rustc_middle::middle::region::*;
19use rustc_middle::ty::TyCtxt;
20use rustc_session::lint;
21use rustc_span::source_map;
22use tracing::debug;
23
24#[derive(Debug, Copy, Clone)]
25struct Context {
26    /// The scope that contains any new variables declared, plus its depth in
27    /// the scope tree.
28    var_parent: Option<(Scope, ScopeDepth)>,
29
30    /// Region parent of expressions, etc., plus its depth in the scope tree.
31    parent: Option<(Scope, ScopeDepth)>,
32}
33
34struct ScopeResolutionVisitor<'tcx> {
35    tcx: TyCtxt<'tcx>,
36
37    // The number of expressions and patterns visited in the current body.
38    expr_and_pat_count: usize,
39    // When this is `true`, we record the `Scopes` we encounter
40    // when processing a Yield expression. This allows us to fix
41    // up their indices.
42    pessimistic_yield: bool,
43    // Stores scopes when `pessimistic_yield` is `true`.
44    fixup_scopes: Vec<Scope>,
45    // The generated scope tree.
46    scope_tree: ScopeTree,
47
48    cx: Context,
49
50    /// `terminating_scopes` is a set containing the ids of each
51    /// statement, or conditional/repeating expression. These scopes
52    /// are calling "terminating scopes" because, when attempting to
53    /// find the scope of a temporary, by default we search up the
54    /// enclosing scopes until we encounter the terminating scope. A
55    /// conditional/repeating expression is one which is not
56    /// guaranteed to execute exactly once upon entering the parent
57    /// scope. This could be because the expression only executes
58    /// conditionally, such as the expression `b` in `a && b`, or
59    /// because the expression may execute many times, such as a loop
60    /// body. The reason that we distinguish such expressions is that,
61    /// upon exiting the parent scope, we cannot statically know how
62    /// many times the expression executed, and thus if the expression
63    /// creates temporaries we cannot know statically how many such
64    /// temporaries we would have to cleanup. Therefore, we ensure that
65    /// the temporaries never outlast the conditional/repeating
66    /// expression, preventing the need for dynamic checks and/or
67    /// arbitrary amounts of stack space. Terminating scopes end
68    /// up being contained in a DestructionScope that contains the
69    /// destructor's execution.
70    terminating_scopes: FxHashSet<hir::ItemLocalId>,
71}
72
73/// Records the lifetime of a local variable as `cx.var_parent`
74fn record_var_lifetime(visitor: &mut ScopeResolutionVisitor<'_>, var_id: hir::ItemLocalId) {
75    match visitor.cx.var_parent {
76        None => {
77            // this can happen in extern fn declarations like
78            //
79            // extern fn isalnum(c: c_int) -> c_int
80        }
81        Some((parent_scope, _)) => visitor.scope_tree.record_var_scope(var_id, parent_scope),
82    }
83}
84
85fn resolve_block<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, blk: &'tcx hir::Block<'tcx>) {
86    debug!("resolve_block(blk.hir_id={:?})", blk.hir_id);
87
88    let prev_cx = visitor.cx;
89
90    // We treat the tail expression in the block (if any) somewhat
91    // differently from the statements. The issue has to do with
92    // temporary lifetimes. Consider the following:
93    //
94    //    quux({
95    //        let inner = ... (&bar()) ...;
96    //
97    //        (... (&foo()) ...) // (the tail expression)
98    //    }, other_argument());
99    //
100    // Each of the statements within the block is a terminating
101    // scope, and thus a temporary (e.g., the result of calling
102    // `bar()` in the initializer expression for `let inner = ...;`)
103    // will be cleaned up immediately after its corresponding
104    // statement (i.e., `let inner = ...;`) executes.
105    //
106    // On the other hand, temporaries associated with evaluating the
107    // tail expression for the block are assigned lifetimes so that
108    // they will be cleaned up as part of the terminating scope
109    // *surrounding* the block expression. Here, the terminating
110    // scope for the block expression is the `quux(..)` call; so
111    // those temporaries will only be cleaned up *after* both
112    // `other_argument()` has run and also the call to `quux(..)`
113    // itself has returned.
114
115    visitor.enter_node_scope_with_dtor(blk.hir_id.local_id);
116    visitor.cx.var_parent = visitor.cx.parent;
117
118    {
119        // This block should be kept approximately in sync with
120        // `intravisit::walk_block`. (We manually walk the block, rather
121        // than call `walk_block`, in order to maintain precise
122        // index information.)
123
124        for (i, statement) in blk.stmts.iter().enumerate() {
125            match statement.kind {
126                hir::StmtKind::Let(LetStmt { els: Some(els), .. }) => {
127                    // Let-else has a special lexical structure for variables.
128                    // First we take a checkpoint of the current scope context here.
129                    let mut prev_cx = visitor.cx;
130
131                    visitor.enter_scope(Scope {
132                        local_id: blk.hir_id.local_id,
133                        data: ScopeData::Remainder(FirstStatementIndex::new(i)),
134                    });
135                    visitor.cx.var_parent = visitor.cx.parent;
136                    visitor.visit_stmt(statement);
137                    // We need to back out temporarily to the last enclosing scope
138                    // for the `else` block, so that even the temporaries receiving
139                    // extended lifetime will be dropped inside this block.
140                    // We are visiting the `else` block in this order so that
141                    // the sequence of visits agree with the order in the default
142                    // `hir::intravisit` visitor.
143                    mem::swap(&mut prev_cx, &mut visitor.cx);
144                    visitor.terminating_scopes.insert(els.hir_id.local_id);
145                    visitor.visit_block(els);
146                    // From now on, we continue normally.
147                    visitor.cx = prev_cx;
148                }
149                hir::StmtKind::Let(..) => {
150                    // Each declaration introduces a subscope for bindings
151                    // introduced by the declaration; this subscope covers a
152                    // suffix of the block. Each subscope in a block has the
153                    // previous subscope in the block as a parent, except for
154                    // the first such subscope, which has the block itself as a
155                    // parent.
156                    visitor.enter_scope(Scope {
157                        local_id: blk.hir_id.local_id,
158                        data: ScopeData::Remainder(FirstStatementIndex::new(i)),
159                    });
160                    visitor.cx.var_parent = visitor.cx.parent;
161                    visitor.visit_stmt(statement)
162                }
163                hir::StmtKind::Item(..) => {
164                    // Don't create scopes for items, since they won't be
165                    // lowered to THIR and MIR.
166                }
167                hir::StmtKind::Expr(..) | hir::StmtKind::Semi(..) => visitor.visit_stmt(statement),
168            }
169        }
170        if let Some(tail_expr) = blk.expr {
171            let local_id = tail_expr.hir_id.local_id;
172            let edition = blk.span.edition();
173            if edition.at_least_rust_2024() {
174                visitor.terminating_scopes.insert(local_id);
175            } else if !visitor
176                .tcx
177                .lints_that_dont_need_to_run(())
178                .contains(&lint::LintId::of(lint::builtin::TAIL_EXPR_DROP_ORDER))
179            {
180                // If this temporary scope will be changing once the codebase adopts Rust 2024,
181                // and we are linting about possible semantic changes that would result,
182                // then record this node-id in the field `backwards_incompatible_scope`
183                // for future reference.
184                visitor
185                    .scope_tree
186                    .backwards_incompatible_scope
187                    .insert(local_id, Scope { local_id, data: ScopeData::Node });
188            }
189            visitor.visit_expr(tail_expr);
190        }
191    }
192
193    visitor.cx = prev_cx;
194}
195
196fn resolve_arm<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, arm: &'tcx hir::Arm<'tcx>) {
197    fn has_let_expr(expr: &Expr<'_>) -> bool {
198        match &expr.kind {
199            hir::ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs),
200            hir::ExprKind::Let(..) => true,
201            _ => false,
202        }
203    }
204
205    let prev_cx = visitor.cx;
206
207    visitor.terminating_scopes.insert(arm.hir_id.local_id);
208
209    visitor.enter_node_scope_with_dtor(arm.hir_id.local_id);
210    visitor.cx.var_parent = visitor.cx.parent;
211
212    if let Some(expr) = arm.guard
213        && !has_let_expr(expr)
214    {
215        visitor.terminating_scopes.insert(expr.hir_id.local_id);
216    }
217
218    intravisit::walk_arm(visitor, arm);
219
220    visitor.cx = prev_cx;
221}
222
223fn resolve_pat<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, pat: &'tcx hir::Pat<'tcx>) {
224    visitor.record_child_scope(Scope { local_id: pat.hir_id.local_id, data: ScopeData::Node });
225
226    // If this is a binding then record the lifetime of that binding.
227    if let PatKind::Binding(..) = pat.kind {
228        record_var_lifetime(visitor, pat.hir_id.local_id);
229    }
230
231    debug!("resolve_pat - pre-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
232
233    intravisit::walk_pat(visitor, pat);
234
235    visitor.expr_and_pat_count += 1;
236
237    debug!("resolve_pat - post-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
238}
239
240fn resolve_stmt<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, stmt: &'tcx hir::Stmt<'tcx>) {
241    let stmt_id = stmt.hir_id.local_id;
242    debug!("resolve_stmt(stmt.id={:?})", stmt_id);
243
244    // Every statement will clean up the temporaries created during
245    // execution of that statement. Therefore each statement has an
246    // associated destruction scope that represents the scope of the
247    // statement plus its destructors, and thus the scope for which
248    // regions referenced by the destructors need to survive.
249    visitor.terminating_scopes.insert(stmt_id);
250
251    let prev_parent = visitor.cx.parent;
252    visitor.enter_node_scope_with_dtor(stmt_id);
253
254    intravisit::walk_stmt(visitor, stmt);
255
256    visitor.cx.parent = prev_parent;
257}
258
259fn resolve_expr<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, expr: &'tcx hir::Expr<'tcx>) {
260    debug!("resolve_expr - pre-increment {} expr = {:?}", visitor.expr_and_pat_count, expr);
261
262    let prev_cx = visitor.cx;
263    visitor.enter_node_scope_with_dtor(expr.hir_id.local_id);
264
265    {
266        let terminating_scopes = &mut visitor.terminating_scopes;
267        let mut terminating = |id: hir::ItemLocalId| {
268            terminating_scopes.insert(id);
269        };
270        match expr.kind {
271            // Conditional or repeating scopes are always terminating
272            // scopes, meaning that temporaries cannot outlive them.
273            // This ensures fixed size stacks.
274            hir::ExprKind::Binary(
275                source_map::Spanned { node: hir::BinOpKind::And | hir::BinOpKind::Or, .. },
276                l,
277                r,
278            ) => {
279                // expr is a short circuiting operator (|| or &&). As its
280                // functionality can't be overridden by traits, it always
281                // processes bool sub-expressions. bools are Copy and thus we
282                // can drop any temporaries in evaluation (read) order
283                // (with the exception of potentially failing let expressions).
284                // We achieve this by enclosing the operands in a terminating
285                // scope, both the LHS and the RHS.
286
287                // We optimize this a little in the presence of chains.
288                // Chains like a && b && c get lowered to AND(AND(a, b), c).
289                // In here, b and c are RHS, while a is the only LHS operand in
290                // that chain. This holds true for longer chains as well: the
291                // leading operand is always the only LHS operand that is not a
292                // binop itself. Putting a binop like AND(a, b) into a
293                // terminating scope is not useful, thus we only put the LHS
294                // into a terminating scope if it is not a binop.
295
296                let terminate_lhs = match l.kind {
297                    // let expressions can create temporaries that live on
298                    hir::ExprKind::Let(_) => false,
299                    // binops already drop their temporaries, so there is no
300                    // need to put them into a terminating scope.
301                    // This is purely an optimization to reduce the number of
302                    // terminating scopes.
303                    hir::ExprKind::Binary(
304                        source_map::Spanned {
305                            node: hir::BinOpKind::And | hir::BinOpKind::Or, ..
306                        },
307                        ..,
308                    ) => false,
309                    // otherwise: mark it as terminating
310                    _ => true,
311                };
312                if terminate_lhs {
313                    terminating(l.hir_id.local_id);
314                }
315
316                // `Let` expressions (in a let-chain) shouldn't be terminating, as their temporaries
317                // should live beyond the immediate expression
318                if !matches!(r.kind, hir::ExprKind::Let(_)) {
319                    terminating(r.hir_id.local_id);
320                }
321            }
322            hir::ExprKind::If(_, then, Some(otherwise)) => {
323                terminating(then.hir_id.local_id);
324                terminating(otherwise.hir_id.local_id);
325            }
326
327            hir::ExprKind::If(_, then, None) => {
328                terminating(then.hir_id.local_id);
329            }
330
331            hir::ExprKind::Loop(body, _, _, _) => {
332                terminating(body.hir_id.local_id);
333            }
334
335            hir::ExprKind::DropTemps(expr) => {
336                // `DropTemps(expr)` does not denote a conditional scope.
337                // Rather, we want to achieve the same behavior as `{ let _t = expr; _t }`.
338                terminating(expr.hir_id.local_id);
339            }
340
341            hir::ExprKind::AssignOp(..)
342            | hir::ExprKind::Index(..)
343            | hir::ExprKind::Unary(..)
344            | hir::ExprKind::Call(..)
345            | hir::ExprKind::MethodCall(..) => {
346                // FIXME(https://github.com/rust-lang/rfcs/issues/811) Nested method calls
347                //
348                // The lifetimes for a call or method call look as follows:
349                //
350                // call.id
351                // - arg0.id
352                // - ...
353                // - argN.id
354                // - call.callee_id
355                //
356                // The idea is that call.callee_id represents *the time when
357                // the invoked function is actually running* and call.id
358                // represents *the time to prepare the arguments and make the
359                // call*. See the section "Borrows in Calls" borrowck/README.md
360                // for an extended explanation of why this distinction is
361                // important.
362                //
363                // record_superlifetime(new_cx, expr.callee_id);
364            }
365
366            _ => {}
367        }
368    }
369
370    let prev_pessimistic = visitor.pessimistic_yield;
371
372    // Ordinarily, we can rely on the visit order of HIR intravisit
373    // to correspond to the actual execution order of statements.
374    // However, there's a weird corner case with compound assignment
375    // operators (e.g. `a += b`). The evaluation order depends on whether
376    // or not the operator is overloaded (e.g. whether or not a trait
377    // like AddAssign is implemented).
378
379    // For primitive types (which, despite having a trait impl, don't actually
380    // end up calling it), the evaluation order is right-to-left. For example,
381    // the following code snippet:
382    //
383    //    let y = &mut 0;
384    //    *{println!("LHS!"); y} += {println!("RHS!"); 1};
385    //
386    // will print:
387    //
388    // RHS!
389    // LHS!
390    //
391    // However, if the operator is used on a non-primitive type,
392    // the evaluation order will be left-to-right, since the operator
393    // actually get desugared to a method call. For example, this
394    // nearly identical code snippet:
395    //
396    //     let y = &mut String::new();
397    //    *{println!("LHS String"); y} += {println!("RHS String"); "hi"};
398    //
399    // will print:
400    // LHS String
401    // RHS String
402    //
403    // To determine the actual execution order, we need to perform
404    // trait resolution. Unfortunately, we need to be able to compute
405    // yield_in_scope before type checking is even done, as it gets
406    // used by AST borrowcheck.
407    //
408    // Fortunately, we don't need to know the actual execution order.
409    // It suffices to know the 'worst case' order with respect to yields.
410    // Specifically, we need to know the highest 'expr_and_pat_count'
411    // that we could assign to the yield expression. To do this,
412    // we pick the greater of the two values from the left-hand
413    // and right-hand expressions. This makes us overly conservative
414    // about what types could possibly live across yield points,
415    // but we will never fail to detect that a type does actually
416    // live across a yield point. The latter part is critical -
417    // we're already overly conservative about what types will live
418    // across yield points, as the generated MIR will determine
419    // when things are actually live. However, for typecheck to work
420    // properly, we can't miss any types.
421
422    match expr.kind {
423        // Manually recurse over closures, because they are nested bodies
424        // that share the parent environment. We handle const blocks in
425        // `visit_inline_const`.
426        hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
427            let body = visitor.tcx.hir().body(body);
428            visitor.visit_body(body);
429        }
430        hir::ExprKind::AssignOp(_, left_expr, right_expr) => {
431            debug!(
432                "resolve_expr - enabling pessimistic_yield, was previously {}",
433                prev_pessimistic
434            );
435
436            let start_point = visitor.fixup_scopes.len();
437            visitor.pessimistic_yield = true;
438
439            // If the actual execution order turns out to be right-to-left,
440            // then we're fine. However, if the actual execution order is left-to-right,
441            // then we'll assign too low a count to any `yield` expressions
442            // we encounter in 'right_expression' - they should really occur after all of the
443            // expressions in 'left_expression'.
444            visitor.visit_expr(right_expr);
445            visitor.pessimistic_yield = prev_pessimistic;
446
447            debug!("resolve_expr - restoring pessimistic_yield to {}", prev_pessimistic);
448            visitor.visit_expr(left_expr);
449            debug!("resolve_expr - fixing up counts to {}", visitor.expr_and_pat_count);
450
451            // Remove and process any scopes pushed by the visitor
452            let target_scopes = visitor.fixup_scopes.drain(start_point..);
453
454            for scope in target_scopes {
455                let yield_data =
456                    visitor.scope_tree.yield_in_scope.get_mut(&scope).unwrap().last_mut().unwrap();
457                let count = yield_data.expr_and_pat_count;
458                let span = yield_data.span;
459
460                // expr_and_pat_count never decreases. Since we recorded counts in yield_in_scope
461                // before walking the left-hand side, it should be impossible for the recorded
462                // count to be greater than the left-hand side count.
463                if count > visitor.expr_and_pat_count {
464                    bug!(
465                        "Encountered greater count {} at span {:?} - expected no greater than {}",
466                        count,
467                        span,
468                        visitor.expr_and_pat_count
469                    );
470                }
471                let new_count = visitor.expr_and_pat_count;
472                debug!(
473                    "resolve_expr - increasing count for scope {:?} from {} to {} at span {:?}",
474                    scope, count, new_count, span
475                );
476
477                yield_data.expr_and_pat_count = new_count;
478            }
479        }
480
481        hir::ExprKind::If(cond, then, Some(otherwise)) => {
482            let expr_cx = visitor.cx;
483            let data = if expr.span.at_least_rust_2024() {
484                ScopeData::IfThenRescope
485            } else {
486                ScopeData::IfThen
487            };
488            visitor.enter_scope(Scope { local_id: then.hir_id.local_id, data });
489            visitor.cx.var_parent = visitor.cx.parent;
490            visitor.visit_expr(cond);
491            visitor.visit_expr(then);
492            visitor.cx = expr_cx;
493            visitor.visit_expr(otherwise);
494        }
495
496        hir::ExprKind::If(cond, then, None) => {
497            let expr_cx = visitor.cx;
498            let data = if expr.span.at_least_rust_2024() {
499                ScopeData::IfThenRescope
500            } else {
501                ScopeData::IfThen
502            };
503            visitor.enter_scope(Scope { local_id: then.hir_id.local_id, data });
504            visitor.cx.var_parent = visitor.cx.parent;
505            visitor.visit_expr(cond);
506            visitor.visit_expr(then);
507            visitor.cx = expr_cx;
508        }
509
510        _ => intravisit::walk_expr(visitor, expr),
511    }
512
513    visitor.expr_and_pat_count += 1;
514
515    debug!("resolve_expr post-increment {}, expr = {:?}", visitor.expr_and_pat_count, expr);
516
517    if let hir::ExprKind::Yield(_, source) = &expr.kind {
518        // Mark this expr's scope and all parent scopes as containing `yield`.
519        let mut scope = Scope { local_id: expr.hir_id.local_id, data: ScopeData::Node };
520        loop {
521            let span = match expr.kind {
522                hir::ExprKind::Yield(expr, hir::YieldSource::Await { .. }) => {
523                    expr.span.shrink_to_hi().to(expr.span)
524                }
525                _ => expr.span,
526            };
527            let data =
528                YieldData { span, expr_and_pat_count: visitor.expr_and_pat_count, source: *source };
529            match visitor.scope_tree.yield_in_scope.get_mut(&scope) {
530                Some(yields) => yields.push(data),
531                None => {
532                    visitor.scope_tree.yield_in_scope.insert(scope, vec![data]);
533                }
534            }
535
536            if visitor.pessimistic_yield {
537                debug!("resolve_expr in pessimistic_yield - marking scope {:?} for fixup", scope);
538                visitor.fixup_scopes.push(scope);
539            }
540
541            // Keep traversing up while we can.
542            match visitor.scope_tree.parent_map.get(&scope) {
543                // Don't cross from closure bodies to their parent.
544                Some(&(superscope, _)) => match superscope.data {
545                    ScopeData::CallSite => break,
546                    _ => scope = superscope,
547                },
548                None => break,
549            }
550        }
551    }
552
553    visitor.cx = prev_cx;
554}
555
556fn resolve_local<'tcx>(
557    visitor: &mut ScopeResolutionVisitor<'tcx>,
558    pat: Option<&'tcx hir::Pat<'tcx>>,
559    init: Option<&'tcx hir::Expr<'tcx>>,
560) {
561    debug!("resolve_local(pat={:?}, init={:?})", pat, init);
562
563    let blk_scope = visitor.cx.var_parent.map(|(p, _)| p);
564
565    // As an exception to the normal rules governing temporary
566    // lifetimes, initializers in a let have a temporary lifetime
567    // of the enclosing block. This means that e.g., a program
568    // like the following is legal:
569    //
570    //     let ref x = HashMap::new();
571    //
572    // Because the hash map will be freed in the enclosing block.
573    //
574    // We express the rules more formally based on 3 grammars (defined
575    // fully in the helpers below that implement them):
576    //
577    // 1. `E&`, which matches expressions like `&<rvalue>` that
578    //    own a pointer into the stack.
579    //
580    // 2. `P&`, which matches patterns like `ref x` or `(ref x, ref
581    //    y)` that produce ref bindings into the value they are
582    //    matched against or something (at least partially) owned by
583    //    the value they are matched against. (By partially owned,
584    //    I mean that creating a binding into a ref-counted or managed value
585    //    would still count.)
586    //
587    // 3. `ET`, which matches both rvalues like `foo()` as well as places
588    //    based on rvalues like `foo().x[2].y`.
589    //
590    // A subexpression `<rvalue>` that appears in a let initializer
591    // `let pat [: ty] = expr` has an extended temporary lifetime if
592    // any of the following conditions are met:
593    //
594    // A. `pat` matches `P&` and `expr` matches `ET`
595    //    (covers cases where `pat` creates ref bindings into an rvalue
596    //     produced by `expr`)
597    // B. `ty` is a borrowed pointer and `expr` matches `ET`
598    //    (covers cases where coercion creates a borrow)
599    // C. `expr` matches `E&`
600    //    (covers cases `expr` borrows an rvalue that is then assigned
601    //     to memory (at least partially) owned by the binding)
602    //
603    // Here are some examples hopefully giving an intuition where each
604    // rule comes into play and why:
605    //
606    // Rule A. `let (ref x, ref y) = (foo().x, 44)`. The rvalue `(22, 44)`
607    // would have an extended lifetime, but not `foo()`.
608    //
609    // Rule B. `let x = &foo().x`. The rvalue `foo()` would have extended
610    // lifetime.
611    //
612    // In some cases, multiple rules may apply (though not to the same
613    // rvalue). For example:
614    //
615    //     let ref x = [&a(), &b()];
616    //
617    // Here, the expression `[...]` has an extended lifetime due to rule
618    // A, but the inner rvalues `a()` and `b()` have an extended lifetime
619    // due to rule C.
620
621    if let Some(expr) = init {
622        record_rvalue_scope_if_borrow_expr(visitor, expr, blk_scope);
623
624        if let Some(pat) = pat {
625            if is_binding_pat(pat) {
626                visitor.scope_tree.record_rvalue_candidate(
627                    expr.hir_id,
628                    RvalueCandidateType::Pattern {
629                        target: expr.hir_id.local_id,
630                        lifetime: blk_scope,
631                    },
632                );
633            }
634        }
635    }
636
637    // Make sure we visit the initializer first, so expr_and_pat_count remains correct.
638    // The correct order, as shared between coroutine_interior, drop_ranges and intravisitor,
639    // is to walk initializer, followed by pattern bindings, finally followed by the `else` block.
640    if let Some(expr) = init {
641        visitor.visit_expr(expr);
642    }
643    if let Some(pat) = pat {
644        visitor.visit_pat(pat);
645    }
646
647    /// Returns `true` if `pat` match the `P&` non-terminal.
648    ///
649    /// ```text
650    ///     P& = ref X
651    ///        | StructName { ..., P&, ... }
652    ///        | VariantName(..., P&, ...)
653    ///        | [ ..., P&, ... ]
654    ///        | ( ..., P&, ... )
655    ///        | ... "|" P& "|" ...
656    ///        | box P&
657    ///        | P& if ...
658    /// ```
659    fn is_binding_pat(pat: &hir::Pat<'_>) -> bool {
660        // Note that the code below looks for *explicit* refs only, that is, it won't
661        // know about *implicit* refs as introduced in #42640.
662        //
663        // This is not a problem. For example, consider
664        //
665        //      let (ref x, ref y) = (Foo { .. }, Bar { .. });
666        //
667        // Due to the explicit refs on the left hand side, the below code would signal
668        // that the temporary value on the right hand side should live until the end of
669        // the enclosing block (as opposed to being dropped after the let is complete).
670        //
671        // To create an implicit ref, however, you must have a borrowed value on the RHS
672        // already, as in this example (which won't compile before #42640):
673        //
674        //      let Foo { x, .. } = &Foo { x: ..., ... };
675        //
676        // in place of
677        //
678        //      let Foo { ref x, .. } = Foo { ... };
679        //
680        // In the former case (the implicit ref version), the temporary is created by the
681        // & expression, and its lifetime would be extended to the end of the block (due
682        // to a different rule, not the below code).
683        match pat.kind {
684            PatKind::Binding(hir::BindingMode(hir::ByRef::Yes(_), _), ..) => true,
685
686            PatKind::Struct(_, field_pats, _) => field_pats.iter().any(|fp| is_binding_pat(fp.pat)),
687
688            PatKind::Slice(pats1, pats2, pats3) => {
689                pats1.iter().any(|p| is_binding_pat(p))
690                    || pats2.iter().any(|p| is_binding_pat(p))
691                    || pats3.iter().any(|p| is_binding_pat(p))
692            }
693
694            PatKind::Or(subpats)
695            | PatKind::TupleStruct(_, subpats, _)
696            | PatKind::Tuple(subpats, _) => subpats.iter().any(|p| is_binding_pat(p)),
697
698            PatKind::Box(subpat) | PatKind::Deref(subpat) | PatKind::Guard(subpat, _) => {
699                is_binding_pat(subpat)
700            }
701
702            PatKind::Ref(_, _)
703            | PatKind::Binding(hir::BindingMode(hir::ByRef::No, _), ..)
704            | PatKind::Wild
705            | PatKind::Never
706            | PatKind::Expr(_)
707            | PatKind::Range(_, _, _)
708            | PatKind::Err(_) => false,
709        }
710    }
711
712    /// If `expr` matches the `E&` grammar, then records an extended rvalue scope as appropriate:
713    ///
714    /// ```text
715    ///     E& = & ET
716    ///        | StructName { ..., f: E&, ... }
717    ///        | [ ..., E&, ... ]
718    ///        | ( ..., E&, ... )
719    ///        | {...; E&}
720    ///        | if _ { ...; E& } else { ...; E& }
721    ///        | match _ { ..., _ => E&, ... }
722    ///        | box E&
723    ///        | E& as ...
724    ///        | ( E& )
725    /// ```
726    fn record_rvalue_scope_if_borrow_expr<'tcx>(
727        visitor: &mut ScopeResolutionVisitor<'tcx>,
728        expr: &hir::Expr<'_>,
729        blk_id: Option<Scope>,
730    ) {
731        match expr.kind {
732            hir::ExprKind::AddrOf(_, _, subexpr) => {
733                record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id);
734                visitor.scope_tree.record_rvalue_candidate(
735                    subexpr.hir_id,
736                    RvalueCandidateType::Borrow {
737                        target: subexpr.hir_id.local_id,
738                        lifetime: blk_id,
739                    },
740                );
741            }
742            hir::ExprKind::Struct(_, fields, _) => {
743                for field in fields {
744                    record_rvalue_scope_if_borrow_expr(visitor, field.expr, blk_id);
745                }
746            }
747            hir::ExprKind::Array(subexprs) | hir::ExprKind::Tup(subexprs) => {
748                for subexpr in subexprs {
749                    record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id);
750                }
751            }
752            hir::ExprKind::Cast(subexpr, _) => {
753                record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id)
754            }
755            hir::ExprKind::Block(block, _) => {
756                if let Some(subexpr) = block.expr {
757                    record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id);
758                }
759            }
760            hir::ExprKind::If(_, then_block, else_block) => {
761                record_rvalue_scope_if_borrow_expr(visitor, then_block, blk_id);
762                if let Some(else_block) = else_block {
763                    record_rvalue_scope_if_borrow_expr(visitor, else_block, blk_id);
764                }
765            }
766            hir::ExprKind::Match(_, arms, _) => {
767                for arm in arms {
768                    record_rvalue_scope_if_borrow_expr(visitor, arm.body, blk_id);
769                }
770            }
771            hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) => {
772                // FIXME(@dingxiangfei2009): choose call arguments here
773                // for candidacy for extended parameter rule application
774            }
775            hir::ExprKind::Index(..) => {
776                // FIXME(@dingxiangfei2009): select the indices
777                // as candidate for rvalue scope rules
778            }
779            _ => {}
780        }
781    }
782}
783
784impl<'tcx> ScopeResolutionVisitor<'tcx> {
785    /// Records the current parent (if any) as the parent of `child_scope`.
786    /// Returns the depth of `child_scope`.
787    fn record_child_scope(&mut self, child_scope: Scope) -> ScopeDepth {
788        let parent = self.cx.parent;
789        self.scope_tree.record_scope_parent(child_scope, parent);
790        // If `child_scope` has no parent, it must be the root node, and so has
791        // a depth of 1. Otherwise, its depth is one more than its parent's.
792        parent.map_or(1, |(_p, d)| d + 1)
793    }
794
795    /// Records the current parent (if any) as the parent of `child_scope`,
796    /// and sets `child_scope` as the new current parent.
797    fn enter_scope(&mut self, child_scope: Scope) {
798        let child_depth = self.record_child_scope(child_scope);
799        self.cx.parent = Some((child_scope, child_depth));
800    }
801
802    fn enter_node_scope_with_dtor(&mut self, id: hir::ItemLocalId) {
803        // If node was previously marked as a terminating scope during the
804        // recursive visit of its parent node in the HIR, then we need to
805        // account for the destruction scope representing the scope of
806        // the destructors that run immediately after it completes.
807        if self.terminating_scopes.contains(&id) {
808            self.enter_scope(Scope { local_id: id, data: ScopeData::Destruction });
809        }
810        self.enter_scope(Scope { local_id: id, data: ScopeData::Node });
811    }
812
813    fn enter_body(&mut self, hir_id: hir::HirId, f: impl FnOnce(&mut Self)) {
814        // Save all state that is specific to the outer function
815        // body. These will be restored once down below, once we've
816        // visited the body.
817        let outer_ec = mem::replace(&mut self.expr_and_pat_count, 0);
818        let outer_cx = self.cx;
819        let outer_ts = mem::take(&mut self.terminating_scopes);
820        // The 'pessimistic yield' flag is set to true when we are
821        // processing a `+=` statement and have to make pessimistic
822        // control flow assumptions. This doesn't apply to nested
823        // bodies within the `+=` statements. See #69307.
824        let outer_pessimistic_yield = mem::replace(&mut self.pessimistic_yield, false);
825        self.terminating_scopes.insert(hir_id.local_id);
826
827        self.enter_scope(Scope { local_id: hir_id.local_id, data: ScopeData::CallSite });
828        self.enter_scope(Scope { local_id: hir_id.local_id, data: ScopeData::Arguments });
829
830        f(self);
831
832        // Restore context we had at the start.
833        self.expr_and_pat_count = outer_ec;
834        self.cx = outer_cx;
835        self.terminating_scopes = outer_ts;
836        self.pessimistic_yield = outer_pessimistic_yield;
837    }
838}
839
840impl<'tcx> Visitor<'tcx> for ScopeResolutionVisitor<'tcx> {
841    fn visit_block(&mut self, b: &'tcx Block<'tcx>) {
842        resolve_block(self, b);
843    }
844
845    fn visit_body(&mut self, body: &hir::Body<'tcx>) {
846        let body_id = body.id();
847        let owner_id = self.tcx.hir().body_owner_def_id(body_id);
848
849        debug!(
850            "visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})",
851            owner_id,
852            self.tcx.sess.source_map().span_to_diagnostic_string(body.value.span),
853            body_id,
854            self.cx.parent
855        );
856
857        self.enter_body(body.value.hir_id, |this| {
858            if this.tcx.hir().body_owner_kind(owner_id).is_fn_or_closure() {
859                // The arguments and `self` are parented to the fn.
860                this.cx.var_parent = this.cx.parent.take();
861                for param in body.params {
862                    this.visit_pat(param.pat);
863                }
864
865                // The body of the every fn is a root scope.
866                this.cx.parent = this.cx.var_parent;
867                this.visit_expr(body.value)
868            } else {
869                // Only functions have an outer terminating (drop) scope, while
870                // temporaries in constant initializers may be 'static, but only
871                // according to rvalue lifetime semantics, using the same
872                // syntactical rules used for let initializers.
873                //
874                // e.g., in `let x = &f();`, the temporary holding the result from
875                // the `f()` call lives for the entirety of the surrounding block.
876                //
877                // Similarly, `const X: ... = &f();` would have the result of `f()`
878                // live for `'static`, implying (if Drop restrictions on constants
879                // ever get lifted) that the value *could* have a destructor, but
880                // it'd get leaked instead of the destructor running during the
881                // evaluation of `X` (if at all allowed by CTFE).
882                //
883                // However, `const Y: ... = g(&f());`, like `let y = g(&f());`,
884                // would *not* let the `f()` temporary escape into an outer scope
885                // (i.e., `'static`), which means that after `g` returns, it drops,
886                // and all the associated destruction scope rules apply.
887                this.cx.var_parent = None;
888                resolve_local(this, None, Some(body.value));
889            }
890        })
891    }
892
893    fn visit_arm(&mut self, a: &'tcx Arm<'tcx>) {
894        resolve_arm(self, a);
895    }
896    fn visit_pat(&mut self, p: &'tcx Pat<'tcx>) {
897        resolve_pat(self, p);
898    }
899    fn visit_stmt(&mut self, s: &'tcx Stmt<'tcx>) {
900        resolve_stmt(self, s);
901    }
902    fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
903        resolve_expr(self, ex);
904    }
905    fn visit_local(&mut self, l: &'tcx LetStmt<'tcx>) {
906        resolve_local(self, Some(l.pat), l.init)
907    }
908    fn visit_inline_const(&mut self, c: &'tcx hir::ConstBlock) {
909        let body = self.tcx.hir().body(c.body);
910        self.visit_body(body);
911    }
912}
913
914/// Per-body `region::ScopeTree`. The `DefId` should be the owner `DefId` for the body;
915/// in the case of closures, this will be redirected to the enclosing function.
916///
917/// Performance: This is a query rather than a simple function to enable
918/// re-use in incremental scenarios. We may sometimes need to rerun the
919/// type checker even when the HIR hasn't changed, and in those cases
920/// we can avoid reconstructing the region scope tree.
921pub(crate) fn region_scope_tree(tcx: TyCtxt<'_>, def_id: DefId) -> &ScopeTree {
922    let typeck_root_def_id = tcx.typeck_root_def_id(def_id);
923    if typeck_root_def_id != def_id {
924        return tcx.region_scope_tree(typeck_root_def_id);
925    }
926
927    let scope_tree = if let Some(body) = tcx.hir().maybe_body_owned_by(def_id.expect_local()) {
928        let mut visitor = ScopeResolutionVisitor {
929            tcx,
930            scope_tree: ScopeTree::default(),
931            expr_and_pat_count: 0,
932            cx: Context { parent: None, var_parent: None },
933            terminating_scopes: Default::default(),
934            pessimistic_yield: false,
935            fixup_scopes: vec![],
936        };
937
938        visitor.scope_tree.root_body = Some(body.value.hir_id);
939        visitor.visit_body(&body);
940        visitor.scope_tree
941    } else {
942        ScopeTree::default()
943    };
944
945    tcx.arena.alloc(scope_tree)
946}