rustc_hir_typeck/
writeback.rs

1// Type resolution: the phase that finds all the types in the AST with
2// unresolved type variables and replaces "ty_var" types with their
3// generic parameters.
4
5use std::mem;
6
7use rustc_data_structures::unord::ExtendUnord;
8use rustc_errors::ErrorGuaranteed;
9use rustc_hir::intravisit::{self, InferKind, Visitor};
10use rustc_hir::{self as hir, AmbigArg, HirId};
11use rustc_middle::span_bug;
12use rustc_middle::traits::ObligationCause;
13use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCoercion};
14use rustc_middle::ty::fold::{TypeFoldable, TypeFolder, fold_regions};
15use rustc_middle::ty::visit::TypeVisitableExt;
16use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperFoldable};
17use rustc_span::{Span, sym};
18use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded;
19use rustc_trait_selection::solve;
20use tracing::{debug, instrument};
21
22use crate::FnCtxt;
23
24///////////////////////////////////////////////////////////////////////////
25// Entry point
26
27// During type inference, partially inferred types are
28// represented using Type variables (ty::Infer). These don't appear in
29// the final TypeckResults since all of the types should have been
30// inferred once typeck is done.
31// When type inference is running however, having to update the typeck
32// typeck results every time a new type is inferred would be unreasonably slow,
33// so instead all of the replacement happens at the end in
34// resolve_type_vars_in_body, which creates a new TypeTables which
35// doesn't contain any inference types.
36impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
37    pub(crate) fn resolve_type_vars_in_body(
38        &self,
39        body: &'tcx hir::Body<'tcx>,
40    ) -> &'tcx ty::TypeckResults<'tcx> {
41        let item_def_id = self.tcx.hir().body_owner_def_id(body.id());
42
43        // This attribute causes us to dump some writeback information
44        // in the form of errors, which is used for unit tests.
45        let rustc_dump_user_args = self.tcx.has_attr(item_def_id, sym::rustc_dump_user_args);
46
47        let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_args);
48        for param in body.params {
49            wbcx.visit_node_id(param.pat.span, param.hir_id);
50        }
51        // Type only exists for constants and statics, not functions.
52        match self.tcx.hir().body_owner_kind(item_def_id) {
53            hir::BodyOwnerKind::Const { .. } | hir::BodyOwnerKind::Static(_) => {
54                let item_hir_id = self.tcx.local_def_id_to_hir_id(item_def_id);
55                wbcx.visit_node_id(body.value.span, item_hir_id);
56            }
57            hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => (),
58        }
59        wbcx.visit_body(body);
60        wbcx.visit_min_capture_map();
61        wbcx.eval_closure_size();
62        wbcx.visit_fake_reads_map();
63        wbcx.visit_closures();
64        wbcx.visit_liberated_fn_sigs();
65        wbcx.visit_fru_field_types();
66        wbcx.visit_opaque_types();
67        wbcx.visit_coercion_casts();
68        wbcx.visit_user_provided_tys();
69        wbcx.visit_user_provided_sigs();
70        wbcx.visit_coroutine_interior();
71        wbcx.visit_offset_of_container_types();
72
73        wbcx.typeck_results.rvalue_scopes =
74            mem::take(&mut self.typeck_results.borrow_mut().rvalue_scopes);
75
76        let used_trait_imports =
77            mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
78        debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
79        wbcx.typeck_results.used_trait_imports = used_trait_imports;
80
81        wbcx.typeck_results.treat_byte_string_as_slice =
82            mem::take(&mut self.typeck_results.borrow_mut().treat_byte_string_as_slice);
83
84        debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
85
86        self.tcx.arena.alloc(wbcx.typeck_results)
87    }
88}
89
90///////////////////////////////////////////////////////////////////////////
91// The Writeback context. This visitor walks the HIR, checking the
92// fn-specific typeck results to find references to types or regions. It
93// resolves those regions to remove inference variables and writes the
94// final result back into the master typeck results in the tcx. Here and
95// there, it applies a few ad-hoc checks that were not convenient to
96// do elsewhere.
97
98struct WritebackCx<'cx, 'tcx> {
99    fcx: &'cx FnCtxt<'cx, 'tcx>,
100
101    typeck_results: ty::TypeckResults<'tcx>,
102
103    body: &'tcx hir::Body<'tcx>,
104
105    rustc_dump_user_args: bool,
106}
107
108impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
109    fn new(
110        fcx: &'cx FnCtxt<'cx, 'tcx>,
111        body: &'tcx hir::Body<'tcx>,
112        rustc_dump_user_args: bool,
113    ) -> WritebackCx<'cx, 'tcx> {
114        let owner = body.id().hir_id.owner;
115
116        let mut wbcx = WritebackCx {
117            fcx,
118            typeck_results: ty::TypeckResults::new(owner),
119            body,
120            rustc_dump_user_args,
121        };
122
123        // HACK: We specifically don't want the (opaque) error from tainting our
124        // inference context. That'll prevent us from doing opaque type inference
125        // later on in borrowck, which affects diagnostic spans pretty negatively.
126        if let Some(e) = fcx.tainted_by_errors() {
127            wbcx.typeck_results.tainted_by_errors = Some(e);
128        }
129
130        wbcx
131    }
132
133    fn tcx(&self) -> TyCtxt<'tcx> {
134        self.fcx.tcx
135    }
136
137    fn write_ty_to_typeck_results(&mut self, hir_id: HirId, ty: Ty<'tcx>) {
138        debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
139        assert!(
140            !ty.has_infer() && !ty.has_placeholders() && !ty.has_free_regions(),
141            "{ty} can't be put into typeck results"
142        );
143        self.typeck_results.node_types_mut().insert(hir_id, ty);
144    }
145
146    // Hacky hack: During type-checking, we treat *all* operators
147    // as potentially overloaded. But then, during writeback, if
148    // we observe that something like `a+b` is (known to be)
149    // operating on scalars, we clear the overload.
150    fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
151        match e.kind {
152            hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
153                let inner_ty = self.typeck_results.node_type(inner.hir_id);
154
155                if inner_ty.is_scalar() {
156                    self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
157                    self.typeck_results.node_args_mut().remove(e.hir_id);
158                }
159            }
160            hir::ExprKind::Binary(ref op, lhs, rhs) | hir::ExprKind::AssignOp(ref op, lhs, rhs) => {
161                let lhs_ty = self.typeck_results.node_type(lhs.hir_id);
162                let rhs_ty = self.typeck_results.node_type(rhs.hir_id);
163
164                if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
165                    self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
166                    self.typeck_results.node_args_mut().remove(e.hir_id);
167
168                    match e.kind {
169                        hir::ExprKind::Binary(..) => {
170                            if !op.node.is_by_value() {
171                                let mut adjustments = self.typeck_results.adjustments_mut();
172                                if let Some(a) = adjustments.get_mut(lhs.hir_id) {
173                                    a.pop();
174                                }
175                                if let Some(a) = adjustments.get_mut(rhs.hir_id) {
176                                    a.pop();
177                                }
178                            }
179                        }
180                        hir::ExprKind::AssignOp(..)
181                            if let Some(a) =
182                                self.typeck_results.adjustments_mut().get_mut(lhs.hir_id) =>
183                        {
184                            a.pop();
185                        }
186                        _ => {}
187                    }
188                }
189            }
190            _ => {}
191        }
192    }
193
194    // (ouz-a 1005988): Normally `[T] : std::ops::Index<usize>` should be normalized
195    // into [T] but currently `Where` clause stops the normalization process for it,
196    // here we compare types of expr and base in a code without `Where` clause they would be equal
197    // if they are not we don't modify the expr, hence we bypass the ICE
198    fn is_builtin_index(
199        &mut self,
200        e: &hir::Expr<'_>,
201        base_ty: Ty<'tcx>,
202        index_ty: Ty<'tcx>,
203    ) -> bool {
204        if let Some(elem_ty) = base_ty.builtin_index()
205            && let Some(exp_ty) = self.typeck_results.expr_ty_opt(e)
206        {
207            elem_ty == exp_ty && index_ty == self.fcx.tcx.types.usize
208        } else {
209            false
210        }
211    }
212
213    // Similar to operators, indexing is always assumed to be overloaded
214    // Here, correct cases where an indexing expression can be simplified
215    // to use builtin indexing because the index type is known to be
216    // usize-ish
217    fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
218        if let hir::ExprKind::Index(ref base, ref index, _) = e.kind {
219            // All valid indexing looks like this; might encounter non-valid indexes at this point.
220            let base_ty = self.typeck_results.expr_ty_adjusted(base);
221            if let ty::Ref(_, base_ty_inner, _) = *base_ty.kind() {
222                let index_ty = self.typeck_results.expr_ty_adjusted(index);
223                if self.is_builtin_index(e, base_ty_inner, index_ty) {
224                    // Remove the method call record
225                    self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
226                    self.typeck_results.node_args_mut().remove(e.hir_id);
227
228                    if let Some(a) = self.typeck_results.adjustments_mut().get_mut(base.hir_id) {
229                        // Discard the need for a mutable borrow
230
231                        // Extra adjustment made when indexing causes a drop
232                        // of size information - we need to get rid of it
233                        // Since this is "after" the other adjustment to be
234                        // discarded, we do an extra `pop()`
235                        if let Some(Adjustment {
236                            kind: Adjust::Pointer(PointerCoercion::Unsize),
237                            ..
238                        }) = a.pop()
239                        {
240                            // So the borrow discard actually happens here
241                            a.pop();
242                        }
243                    }
244                }
245            }
246        }
247    }
248
249    fn visit_const_block(&mut self, span: Span, anon_const: &hir::ConstBlock) {
250        self.visit_node_id(span, anon_const.hir_id);
251
252        let body = self.tcx().hir().body(anon_const.body);
253        self.visit_body(body);
254    }
255}
256
257///////////////////////////////////////////////////////////////////////////
258// Impl of Visitor for Resolver
259//
260// This is the master code which walks the AST. It delegates most of
261// the heavy lifting to the generic visit and resolve functions
262// below. In general, a function is made into a `visitor` if it must
263// traffic in node-ids or update typeck results in the type context etc.
264
265impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
266    fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
267        match e.kind {
268            hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
269                let body = self.fcx.tcx.hir().body(body);
270                for param in body.params {
271                    self.visit_node_id(e.span, param.hir_id);
272                }
273
274                self.visit_body(body);
275            }
276            hir::ExprKind::Struct(_, fields, _) => {
277                for field in fields {
278                    self.visit_field_id(field.hir_id);
279                }
280            }
281            hir::ExprKind::Field(..) | hir::ExprKind::OffsetOf(..) => {
282                self.visit_field_id(e.hir_id);
283            }
284            hir::ExprKind::ConstBlock(ref anon_const) => {
285                self.visit_const_block(e.span, anon_const);
286            }
287            _ => {}
288        }
289
290        self.visit_node_id(e.span, e.hir_id);
291        intravisit::walk_expr(self, e);
292
293        self.fix_scalar_builtin_expr(e);
294        self.fix_index_builtin_expr(e);
295    }
296
297    fn visit_generic_param(&mut self, p: &'tcx hir::GenericParam<'tcx>) {
298        match &p.kind {
299            hir::GenericParamKind::Lifetime { .. } => {
300                // Nothing to write back here
301            }
302            hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => {
303                self.tcx()
304                    .dcx()
305                    .span_delayed_bug(p.span, format!("unexpected generic param: {p:?}"));
306            }
307        }
308    }
309
310    fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) {
311        self.visit_node_id(b.span, b.hir_id);
312        intravisit::walk_block(self, b);
313    }
314
315    fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
316        match p.kind {
317            hir::PatKind::Binding(..) => {
318                let typeck_results = self.fcx.typeck_results.borrow();
319                if let Some(bm) =
320                    typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span)
321                {
322                    self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
323                }
324            }
325            hir::PatKind::Struct(_, fields, _) => {
326                for field in fields {
327                    self.visit_field_id(field.hir_id);
328                }
329            }
330            _ => {}
331        };
332
333        self.visit_rust_2024_migration_desugared_pats(p.hir_id);
334        self.visit_skipped_ref_pats(p.hir_id);
335        self.visit_pat_adjustments(p.span, p.hir_id);
336
337        self.visit_node_id(p.span, p.hir_id);
338        intravisit::walk_pat(self, p);
339    }
340
341    fn visit_pat_expr(&mut self, expr: &'tcx hir::PatExpr<'tcx>) {
342        self.visit_node_id(expr.span, expr.hir_id);
343        if let hir::PatExprKind::ConstBlock(c) = &expr.kind {
344            self.visit_const_block(expr.span, c);
345        }
346        intravisit::walk_pat_expr(self, expr);
347    }
348
349    fn visit_local(&mut self, l: &'tcx hir::LetStmt<'tcx>) {
350        intravisit::walk_local(self, l);
351        let var_ty = self.fcx.local_ty(l.span, l.hir_id);
352        let var_ty = self.resolve(var_ty, &l.span);
353        self.write_ty_to_typeck_results(l.hir_id, var_ty);
354    }
355
356    fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx, AmbigArg>) {
357        intravisit::walk_ty(self, hir_ty);
358        // If there are type checking errors, Type privacy pass will stop,
359        // so we may not get the type from hid_id, see #104513
360        if let Some(ty) = self.fcx.node_ty_opt(hir_ty.hir_id) {
361            let ty = self.resolve(ty, &hir_ty.span);
362            self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
363        }
364    }
365
366    fn visit_infer(
367        &mut self,
368        inf_id: HirId,
369        inf_span: Span,
370        _kind: InferKind<'cx>,
371    ) -> Self::Result {
372        self.visit_id(inf_id);
373
374        // We don't currently write inference results of const infer vars to
375        // the typeck results as there is not yet any part of the compiler that
376        // needs this information.
377        if let Some(ty) = self.fcx.node_ty_opt(inf_id) {
378            let ty = self.resolve(ty, &inf_span);
379            self.write_ty_to_typeck_results(inf_id, ty);
380        }
381    }
382}
383
384impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
385    fn eval_closure_size(&mut self) {
386        self.tcx().with_stable_hashing_context(|ref hcx| {
387            let fcx_typeck_results = self.fcx.typeck_results.borrow();
388
389            self.typeck_results.closure_size_eval = fcx_typeck_results
390                .closure_size_eval
391                .to_sorted(hcx, false)
392                .into_iter()
393                .map(|(&closure_def_id, data)| {
394                    let closure_hir_id = self.tcx().local_def_id_to_hir_id(closure_def_id);
395                    let data = self.resolve(*data, &closure_hir_id);
396                    (closure_def_id, data)
397                })
398                .collect();
399        })
400    }
401
402    fn visit_min_capture_map(&mut self) {
403        self.tcx().with_stable_hashing_context(|ref hcx| {
404            let fcx_typeck_results = self.fcx.typeck_results.borrow();
405
406            self.typeck_results.closure_min_captures = fcx_typeck_results
407                .closure_min_captures
408                .to_sorted(hcx, false)
409                .into_iter()
410                .map(|(&closure_def_id, root_min_captures)| {
411                    let root_var_map_wb = root_min_captures
412                        .iter()
413                        .map(|(var_hir_id, min_list)| {
414                            let min_list_wb = min_list
415                                .iter()
416                                .map(|captured_place| {
417                                    let locatable =
418                                        captured_place.info.path_expr_id.unwrap_or_else(|| {
419                                            self.tcx().local_def_id_to_hir_id(closure_def_id)
420                                        });
421                                    self.resolve(captured_place.clone(), &locatable)
422                                })
423                                .collect();
424                            (*var_hir_id, min_list_wb)
425                        })
426                        .collect();
427                    (closure_def_id, root_var_map_wb)
428                })
429                .collect();
430        })
431    }
432
433    fn visit_fake_reads_map(&mut self) {
434        self.tcx().with_stable_hashing_context(move |ref hcx| {
435            let fcx_typeck_results = self.fcx.typeck_results.borrow();
436
437            self.typeck_results.closure_fake_reads = fcx_typeck_results
438                .closure_fake_reads
439                .to_sorted(hcx, true)
440                .into_iter()
441                .map(|(&closure_def_id, fake_reads)| {
442                    let resolved_fake_reads = fake_reads
443                        .iter()
444                        .map(|(place, cause, hir_id)| {
445                            let locatable = self.tcx().local_def_id_to_hir_id(closure_def_id);
446                            let resolved_fake_read = self.resolve(place.clone(), &locatable);
447                            (resolved_fake_read, *cause, *hir_id)
448                        })
449                        .collect();
450
451                    (closure_def_id, resolved_fake_reads)
452                })
453                .collect();
454        });
455    }
456
457    fn visit_closures(&mut self) {
458        let fcx_typeck_results = self.fcx.typeck_results.borrow();
459        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
460        let common_hir_owner = fcx_typeck_results.hir_owner;
461
462        let fcx_closure_kind_origins =
463            fcx_typeck_results.closure_kind_origins().items_in_stable_order();
464
465        for (local_id, origin) in fcx_closure_kind_origins {
466            let hir_id = HirId { owner: common_hir_owner, local_id };
467            let place_span = origin.0;
468            let place = self.resolve(origin.1.clone(), &place_span);
469            self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
470        }
471    }
472
473    fn visit_coercion_casts(&mut self) {
474        let fcx_typeck_results = self.fcx.typeck_results.borrow();
475
476        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
477
478        let fcx_coercion_casts = fcx_typeck_results.coercion_casts().to_sorted_stable_ord();
479        for &local_id in fcx_coercion_casts {
480            self.typeck_results.set_coercion_cast(local_id);
481        }
482    }
483
484    fn visit_user_provided_tys(&mut self) {
485        let fcx_typeck_results = self.fcx.typeck_results.borrow();
486        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
487        let common_hir_owner = fcx_typeck_results.hir_owner;
488
489        if self.rustc_dump_user_args {
490            let sorted_user_provided_types =
491                fcx_typeck_results.user_provided_types().items_in_stable_order();
492
493            let mut errors_buffer = Vec::new();
494            for (local_id, c_ty) in sorted_user_provided_types {
495                let hir_id = HirId { owner: common_hir_owner, local_id };
496
497                if let ty::UserTypeKind::TypeOf(_, user_args) = c_ty.value.kind {
498                    // This is a unit-testing mechanism.
499                    let span = self.tcx().hir().span(hir_id);
500                    // We need to buffer the errors in order to guarantee a consistent
501                    // order when emitting them.
502                    let err =
503                        self.tcx().dcx().struct_span_err(span, format!("user args: {user_args:?}"));
504                    errors_buffer.push(err);
505                }
506            }
507
508            if !errors_buffer.is_empty() {
509                errors_buffer.sort_by_key(|diag| diag.span.primary_span());
510                for err in errors_buffer {
511                    err.emit();
512                }
513            }
514        }
515
516        self.typeck_results.user_provided_types_mut().extend(
517            fcx_typeck_results.user_provided_types().items().map(|(local_id, c_ty)| {
518                let hir_id = HirId { owner: common_hir_owner, local_id };
519
520                if cfg!(debug_assertions) && c_ty.has_infer() {
521                    span_bug!(
522                        hir_id.to_span(self.fcx.tcx),
523                        "writeback: `{:?}` has inference variables",
524                        c_ty
525                    );
526                };
527
528                (hir_id, *c_ty)
529            }),
530        );
531    }
532
533    fn visit_user_provided_sigs(&mut self) {
534        let fcx_typeck_results = self.fcx.typeck_results.borrow();
535        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
536
537        self.typeck_results.user_provided_sigs.extend_unord(
538            fcx_typeck_results.user_provided_sigs.items().map(|(&def_id, c_sig)| {
539                if cfg!(debug_assertions) && c_sig.has_infer() {
540                    span_bug!(
541                        self.fcx.tcx.def_span(def_id),
542                        "writeback: `{:?}` has inference variables",
543                        c_sig
544                    );
545                };
546
547                (def_id, *c_sig)
548            }),
549        );
550    }
551
552    fn visit_coroutine_interior(&mut self) {
553        let fcx_typeck_results = self.fcx.typeck_results.borrow();
554        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
555        for (predicate, cause) in &fcx_typeck_results.coroutine_stalled_predicates {
556            let (predicate, cause) = self.resolve((*predicate, cause.clone()), &cause.span);
557            self.typeck_results.coroutine_stalled_predicates.insert((predicate, cause));
558        }
559    }
560
561    #[instrument(skip(self), level = "debug")]
562    fn visit_opaque_types(&mut self) {
563        // We clone the opaques instead of stealing them here as they are still used for
564        // normalization in the next generation trait solver.
565        //
566        // FIXME(-Znext-solver): Opaque types defined after this would simply get dropped
567        // at the end of typeck. While this seems unlikely to happen in practice this
568        // should still get fixed. Either by preventing writeback from defining new opaque
569        // types or by using this function at the end of writeback and running it as a
570        // fixpoint.
571        let opaque_types = self.fcx.infcx.clone_opaque_types();
572        for (opaque_type_key, hidden_type) in opaque_types {
573            let hidden_type = self.resolve(hidden_type, &hidden_type.span);
574            let opaque_type_key = self.resolve(opaque_type_key, &hidden_type.span);
575
576            if !self.fcx.next_trait_solver() {
577                if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind()
578                    && alias_ty.def_id == opaque_type_key.def_id.to_def_id()
579                    && alias_ty.args == opaque_type_key.args
580                {
581                    continue;
582                }
583            }
584
585            // Here we only detect impl trait definition conflicts when they
586            // are equal modulo regions.
587            if let Some(last_opaque_ty) =
588                self.typeck_results.concrete_opaque_types.insert(opaque_type_key, hidden_type)
589                && last_opaque_ty.ty != hidden_type.ty
590            {
591                assert!(!self.fcx.next_trait_solver());
592                if let Ok(d) = hidden_type.build_mismatch_error(&last_opaque_ty, self.tcx()) {
593                    d.emit();
594                }
595            }
596        }
597    }
598
599    fn visit_field_id(&mut self, hir_id: HirId) {
600        if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
601        {
602            self.typeck_results.field_indices_mut().insert(hir_id, index);
603        }
604    }
605
606    #[instrument(skip(self, span), level = "debug")]
607    fn visit_node_id(&mut self, span: Span, hir_id: HirId) {
608        // Export associated path extensions and method resolutions.
609        if let Some(def) =
610            self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
611        {
612            self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
613        }
614
615        // Resolve any borrowings for the node with id `node_id`
616        self.visit_adjustments(span, hir_id);
617
618        // Resolve the type of the node with id `node_id`
619        let n_ty = self.fcx.node_ty(hir_id);
620        let n_ty = self.resolve(n_ty, &span);
621        self.write_ty_to_typeck_results(hir_id, n_ty);
622        debug!(?n_ty);
623
624        // Resolve any generic parameters
625        if let Some(args) = self.fcx.typeck_results.borrow().node_args_opt(hir_id) {
626            let args = self.resolve(args, &span);
627            debug!("write_args_to_tcx({:?}, {:?})", hir_id, args);
628            assert!(!args.has_infer() && !args.has_placeholders());
629            self.typeck_results.node_args_mut().insert(hir_id, args);
630        }
631    }
632
633    #[instrument(skip(self, span), level = "debug")]
634    fn visit_adjustments(&mut self, span: Span, hir_id: HirId) {
635        let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
636        match adjustment {
637            None => {
638                debug!("no adjustments for node");
639            }
640
641            Some(adjustment) => {
642                let resolved_adjustment = self.resolve(adjustment, &span);
643                debug!(?resolved_adjustment);
644                self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
645            }
646        }
647    }
648
649    #[instrument(skip(self), level = "debug")]
650    fn visit_rust_2024_migration_desugared_pats(&mut self, hir_id: hir::HirId) {
651        if let Some(is_hard_error) = self
652            .fcx
653            .typeck_results
654            .borrow_mut()
655            .rust_2024_migration_desugared_pats_mut()
656            .remove(hir_id)
657        {
658            debug!(
659                "node is a pat whose match ergonomics are desugared by the Rust 2024 migration lint"
660            );
661            self.typeck_results
662                .rust_2024_migration_desugared_pats_mut()
663                .insert(hir_id, is_hard_error);
664        }
665    }
666
667    #[instrument(skip(self, span), level = "debug")]
668    fn visit_pat_adjustments(&mut self, span: Span, hir_id: HirId) {
669        let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
670        match adjustment {
671            None => {
672                debug!("no pat_adjustments for node");
673            }
674
675            Some(adjustment) => {
676                let resolved_adjustment = self.resolve(adjustment, &span);
677                debug!(?resolved_adjustment);
678                self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
679            }
680        }
681    }
682
683    #[instrument(skip(self), level = "debug")]
684    fn visit_skipped_ref_pats(&mut self, hir_id: hir::HirId) {
685        if self.fcx.typeck_results.borrow_mut().skipped_ref_pats_mut().remove(hir_id) {
686            debug!("node is a skipped ref pat");
687            self.typeck_results.skipped_ref_pats_mut().insert(hir_id);
688        }
689    }
690
691    fn visit_liberated_fn_sigs(&mut self) {
692        let fcx_typeck_results = self.fcx.typeck_results.borrow();
693        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
694        let common_hir_owner = fcx_typeck_results.hir_owner;
695
696        let fcx_liberated_fn_sigs = fcx_typeck_results.liberated_fn_sigs().items_in_stable_order();
697
698        for (local_id, &fn_sig) in fcx_liberated_fn_sigs {
699            let hir_id = HirId { owner: common_hir_owner, local_id };
700            let fn_sig = self.resolve(fn_sig, &hir_id);
701            self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
702        }
703    }
704
705    fn visit_fru_field_types(&mut self) {
706        let fcx_typeck_results = self.fcx.typeck_results.borrow();
707        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
708        let common_hir_owner = fcx_typeck_results.hir_owner;
709
710        let fcx_fru_field_types = fcx_typeck_results.fru_field_types().items_in_stable_order();
711
712        for (local_id, ftys) in fcx_fru_field_types {
713            let hir_id = HirId { owner: common_hir_owner, local_id };
714            let ftys = self.resolve(ftys.clone(), &hir_id);
715            self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
716        }
717    }
718
719    fn visit_offset_of_container_types(&mut self) {
720        let fcx_typeck_results = self.fcx.typeck_results.borrow();
721        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
722        let common_hir_owner = fcx_typeck_results.hir_owner;
723
724        for (local_id, &(container, ref indices)) in
725            fcx_typeck_results.offset_of_data().items_in_stable_order()
726        {
727            let hir_id = HirId { owner: common_hir_owner, local_id };
728            let container = self.resolve(container, &hir_id);
729            self.typeck_results.offset_of_data_mut().insert(hir_id, (container, indices.clone()));
730        }
731    }
732
733    fn resolve<T>(&mut self, value: T, span: &dyn Locatable) -> T
734    where
735        T: TypeFoldable<TyCtxt<'tcx>>,
736    {
737        let value = self.fcx.resolve_vars_if_possible(value);
738        let value = value.fold_with(&mut Resolver::new(self.fcx, span, self.body));
739        assert!(!value.has_infer());
740
741        // We may have introduced e.g. `ty::Error`, if inference failed, make sure
742        // to mark the `TypeckResults` as tainted in that case, so that downstream
743        // users of the typeck results don't produce extra errors, or worse, ICEs.
744        if let Err(guar) = value.error_reported() {
745            self.typeck_results.tainted_by_errors = Some(guar);
746        }
747
748        value
749    }
750}
751
752pub(crate) trait Locatable {
753    fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
754}
755
756impl Locatable for Span {
757    fn to_span(&self, _: TyCtxt<'_>) -> Span {
758        *self
759    }
760}
761
762impl Locatable for HirId {
763    fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
764        tcx.hir().span(*self)
765    }
766}
767
768struct Resolver<'cx, 'tcx> {
769    fcx: &'cx FnCtxt<'cx, 'tcx>,
770    span: &'cx dyn Locatable,
771    body: &'tcx hir::Body<'tcx>,
772    /// Whether we should normalize using the new solver, disabled
773    /// both when using the old solver and when resolving predicates.
774    should_normalize: bool,
775}
776
777impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
778    fn new(
779        fcx: &'cx FnCtxt<'cx, 'tcx>,
780        span: &'cx dyn Locatable,
781        body: &'tcx hir::Body<'tcx>,
782    ) -> Resolver<'cx, 'tcx> {
783        Resolver { fcx, span, body, should_normalize: fcx.next_trait_solver() }
784    }
785
786    fn report_error(&self, p: impl Into<ty::GenericArg<'tcx>>) -> ErrorGuaranteed {
787        if let Some(guar) = self.fcx.tainted_by_errors() {
788            guar
789        } else {
790            self.fcx
791                .err_ctxt()
792                .emit_inference_failure_err(
793                    self.fcx.tcx.hir().body_owner_def_id(self.body.id()),
794                    self.span.to_span(self.fcx.tcx),
795                    p.into(),
796                    TypeAnnotationNeeded::E0282,
797                    false,
798                )
799                .emit()
800        }
801    }
802
803    fn handle_term<T>(
804        &mut self,
805        value: T,
806        outer_exclusive_binder: impl FnOnce(T) -> ty::DebruijnIndex,
807        new_err: impl Fn(TyCtxt<'tcx>, ErrorGuaranteed) -> T,
808    ) -> T
809    where
810        T: Into<ty::GenericArg<'tcx>> + TypeSuperFoldable<TyCtxt<'tcx>> + Copy,
811    {
812        let tcx = self.fcx.tcx;
813        // We must deeply normalize in the new solver, since later lints
814        // expect that types that show up in the typeck are fully
815        // normalized.
816        let mut value = if self.should_normalize {
817            let body_id = tcx.hir().body_owner_def_id(self.body.id());
818            let cause = ObligationCause::misc(self.span.to_span(tcx), body_id);
819            let at = self.fcx.at(&cause, self.fcx.param_env);
820            let universes = vec![None; outer_exclusive_binder(value).as_usize()];
821            solve::deeply_normalize_with_skipped_universes(at, value, universes).unwrap_or_else(
822                |errors| {
823                    let guar = self.fcx.err_ctxt().report_fulfillment_errors(errors);
824                    new_err(tcx, guar)
825                },
826            )
827        } else {
828            value
829        };
830
831        // Bail if there are any non-region infer.
832        if value.has_non_region_infer() {
833            let guar = self.report_error(value);
834            value = new_err(tcx, guar);
835        }
836
837        // Erase the regions from the ty, since it's not really meaningful what
838        // these region values are; there's not a trivial correspondence between
839        // regions in the HIR and MIR, so when we turn the body into MIR, there's
840        // no reason to keep regions around. They will be repopulated during MIR
841        // borrowck, and specifically region constraints will be populated during
842        // MIR typeck which is run on the new body.
843        //
844        // We're not using `tcx.erase_regions` as that also anonymizes bound variables,
845        // regressing borrowck diagnostics.
846        value = fold_regions(tcx, value, |_, _| tcx.lifetimes.re_erased);
847
848        // Normalize consts in writeback, because GCE doesn't normalize eagerly.
849        if tcx.features().generic_const_exprs() {
850            value = value.fold_with(&mut EagerlyNormalizeConsts::new(self.fcx));
851        }
852
853        value
854    }
855}
856
857impl<'cx, 'tcx> TypeFolder<TyCtxt<'tcx>> for Resolver<'cx, 'tcx> {
858    fn cx(&self) -> TyCtxt<'tcx> {
859        self.fcx.tcx
860    }
861
862    fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
863        debug_assert!(!r.is_bound(), "Should not be resolving bound region.");
864        self.fcx.tcx.lifetimes.re_erased
865    }
866
867    fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
868        self.handle_term(ty, Ty::outer_exclusive_binder, Ty::new_error)
869    }
870
871    fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
872        self.handle_term(ct, ty::Const::outer_exclusive_binder, |tcx, guar| {
873            ty::Const::new_error(tcx, guar)
874        })
875        .super_fold_with(self)
876    }
877
878    fn fold_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> {
879        // Do not normalize predicates in the new solver. The new solver is
880        // supposed to handle unnormalized predicates and incorrectly normalizing
881        // them can be unsound, e.g. for `WellFormed` predicates.
882        let prev = mem::replace(&mut self.should_normalize, false);
883        let predicate = predicate.super_fold_with(self);
884        self.should_normalize = prev;
885        predicate
886    }
887}
888
889struct EagerlyNormalizeConsts<'tcx> {
890    tcx: TyCtxt<'tcx>,
891    typing_env: ty::TypingEnv<'tcx>,
892}
893impl<'tcx> EagerlyNormalizeConsts<'tcx> {
894    fn new(fcx: &FnCtxt<'_, 'tcx>) -> Self {
895        // FIXME(#132279, generic_const_exprs): Using `try_normalize_erasing_regions` here
896        // means we can't handle opaque types in their defining scope.
897        EagerlyNormalizeConsts { tcx: fcx.tcx, typing_env: fcx.typing_env(fcx.param_env) }
898    }
899}
900
901impl<'tcx> TypeFolder<TyCtxt<'tcx>> for EagerlyNormalizeConsts<'tcx> {
902    fn cx(&self) -> TyCtxt<'tcx> {
903        self.tcx
904    }
905
906    fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
907        self.tcx.try_normalize_erasing_regions(self.typing_env, ct).unwrap_or(ct)
908    }
909}