rustc_hir_typeck/
writeback.rs

1//! During type inference, partially inferred terms are
2//! represented using inference variables (ty::Infer). These don't appear in
3//! the final [`ty::TypeckResults`] since all of the types should have been
4//! inferred once typeck is done.
5//!
6//! When type inference is running however, having to update the typeck results
7//! every time a new type is inferred would be unreasonably slow, so instead all
8//! of the replacement happens at the end in [`FnCtxt::resolve_type_vars_in_body`],
9//! which creates a new `TypeckResults` which doesn't contain any inference variables.
10
11use std::mem;
12use std::ops::ControlFlow;
13
14use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
15use rustc_data_structures::unord::ExtendUnord;
16use rustc_errors::{E0720, ErrorGuaranteed};
17use rustc_hir::def_id::LocalDefId;
18use rustc_hir::intravisit::{self, InferKind, Visitor};
19use rustc_hir::{self as hir, AmbigArg, HirId};
20use rustc_infer::traits::solve::Goal;
21use rustc_middle::traits::ObligationCause;
22use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCoercion};
23use rustc_middle::ty::{
24    self, DefiningScopeKind, DefinitionSiteHiddenType, Ty, TyCtxt, TypeFoldable, TypeFolder,
25    TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor,
26    fold_regions,
27};
28use rustc_span::{Span, sym};
29use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded;
30use rustc_trait_selection::opaque_types::opaque_type_has_defining_use_args;
31use rustc_trait_selection::solve;
32use tracing::{debug, instrument};
33
34use crate::FnCtxt;
35
36///////////////////////////////////////////////////////////////////////////
37// Entry point
38
39impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
40    pub(crate) fn resolve_type_vars_in_body(
41        &self,
42        body: &'tcx hir::Body<'tcx>,
43    ) -> &'tcx ty::TypeckResults<'tcx> {
44        let item_def_id = self.tcx.hir_body_owner_def_id(body.id());
45
46        // This attribute causes us to dump some writeback information
47        // in the form of errors, which is used for unit tests.
48        let rustc_dump_user_args =
49            self.has_rustc_attrs && self.tcx.has_attr(item_def_id, sym::rustc_dump_user_args);
50
51        let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_args);
52        for param in body.params {
53            wbcx.visit_node_id(param.pat.span, param.hir_id);
54        }
55        match self.tcx.hir_body_owner_kind(item_def_id) {
56            // Visit the type of a const or static, which is used during THIR building.
57            hir::BodyOwnerKind::Const { .. }
58            | hir::BodyOwnerKind::Static(_)
59            | hir::BodyOwnerKind::GlobalAsm => {
60                let item_hir_id = self.tcx.local_def_id_to_hir_id(item_def_id);
61                wbcx.visit_node_id(body.value.span, item_hir_id);
62            }
63            // For closures and consts, we already plan to visit liberated signatures.
64            hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => {}
65        }
66        wbcx.visit_body(body);
67        wbcx.visit_min_capture_map();
68        wbcx.eval_closure_size();
69        wbcx.visit_fake_reads_map();
70        wbcx.visit_closures();
71        wbcx.visit_liberated_fn_sigs();
72        wbcx.visit_fru_field_types();
73        wbcx.visit_opaque_types();
74        wbcx.visit_coercion_casts();
75        wbcx.visit_user_provided_tys();
76        wbcx.visit_user_provided_sigs();
77        wbcx.visit_coroutine_interior();
78        wbcx.visit_transmutes();
79        wbcx.visit_offset_of_container_types();
80        wbcx.visit_potentially_region_dependent_goals();
81
82        let used_trait_imports =
83            mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
84        debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
85        wbcx.typeck_results.used_trait_imports = used_trait_imports;
86
87        debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
88
89        self.tcx.arena.alloc(wbcx.typeck_results)
90    }
91}
92
93/// The Writeback context. This visitor walks the HIR, checking the
94/// fn-specific typeck results to find inference variables. It resolves
95/// those inference variables and writes the final result into the
96/// `TypeckResults`. It also applies a few ad-hoc checks that were not
97/// convenient to do elsewhere.
98struct WritebackCx<'cx, 'tcx> {
99    fcx: &'cx FnCtxt<'cx, 'tcx>,
100
101    typeck_results: ty::TypeckResults<'tcx>,
102
103    body: &'tcx hir::Body<'tcx>,
104
105    rustc_dump_user_args: bool,
106}
107
108impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
109    fn new(
110        fcx: &'cx FnCtxt<'cx, 'tcx>,
111        body: &'tcx hir::Body<'tcx>,
112        rustc_dump_user_args: bool,
113    ) -> WritebackCx<'cx, 'tcx> {
114        let owner = body.id().hir_id.owner;
115
116        let mut wbcx = WritebackCx {
117            fcx,
118            typeck_results: ty::TypeckResults::new(owner),
119            body,
120            rustc_dump_user_args,
121        };
122
123        // HACK: We specifically don't want the (opaque) error from tainting our
124        // inference context. That'll prevent us from doing opaque type inference
125        // later on in borrowck, which affects diagnostic spans pretty negatively.
126        if let Some(e) = fcx.tainted_by_errors() {
127            wbcx.typeck_results.tainted_by_errors = Some(e);
128        }
129
130        wbcx
131    }
132
133    fn tcx(&self) -> TyCtxt<'tcx> {
134        self.fcx.tcx
135    }
136
137    fn write_ty_to_typeck_results(&mut self, hir_id: HirId, ty: Ty<'tcx>) {
138        debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
139        assert!(
140            !ty.has_infer() && !ty.has_placeholders() && !ty.has_free_regions(),
141            "{ty} can't be put into typeck results"
142        );
143        self.typeck_results.node_types_mut().insert(hir_id, ty);
144    }
145
146    // Hacky hack: During type-checking, we treat *all* operators
147    // as potentially overloaded. But then, during writeback, if
148    // we observe that something like `a+b` is (known to be)
149    // operating on scalars, we clear the overload.
150    fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
151        match e.kind {
152            hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
153                let inner_ty = self.typeck_results.node_type(inner.hir_id);
154
155                if inner_ty.is_scalar() {
156                    self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
157                    self.typeck_results.node_args_mut().remove(e.hir_id);
158                }
159            }
160            hir::ExprKind::Binary(ref op, lhs, rhs) => {
161                let lhs_ty = self.typeck_results.node_type(lhs.hir_id);
162                let rhs_ty = self.typeck_results.node_type(rhs.hir_id);
163
164                if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
165                    self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
166                    self.typeck_results.node_args_mut().remove(e.hir_id);
167
168                    if !op.node.is_by_value() {
169                        let mut adjustments = self.typeck_results.adjustments_mut();
170                        if let Some(a) = adjustments.get_mut(lhs.hir_id) {
171                            a.pop();
172                        }
173                        if let Some(a) = adjustments.get_mut(rhs.hir_id) {
174                            a.pop();
175                        }
176                    }
177                }
178            }
179            hir::ExprKind::AssignOp(_, lhs, rhs) => {
180                let lhs_ty = self.typeck_results.node_type(lhs.hir_id);
181                let rhs_ty = self.typeck_results.node_type(rhs.hir_id);
182
183                if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
184                    self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
185                    self.typeck_results.node_args_mut().remove(e.hir_id);
186
187                    if let Some(a) = self.typeck_results.adjustments_mut().get_mut(lhs.hir_id) {
188                        a.pop();
189                    }
190                }
191            }
192            _ => {}
193        }
194    }
195
196    // (ouz-a 1005988): Normally `[T] : std::ops::Index<usize>` should be normalized
197    // into [T] but currently `Where` clause stops the normalization process for it,
198    // here we compare types of expr and base in a code without `Where` clause they would be equal
199    // if they are not we don't modify the expr, hence we bypass the ICE
200    fn is_builtin_index(
201        &mut self,
202        e: &hir::Expr<'_>,
203        base_ty: Ty<'tcx>,
204        index_ty: Ty<'tcx>,
205    ) -> bool {
206        if let Some(elem_ty) = base_ty.builtin_index()
207            && let Some(exp_ty) = self.typeck_results.expr_ty_opt(e)
208        {
209            elem_ty == exp_ty && index_ty == self.fcx.tcx.types.usize
210        } else {
211            false
212        }
213    }
214
215    // Similar to operators, indexing is always assumed to be overloaded
216    // Here, correct cases where an indexing expression can be simplified
217    // to use builtin indexing because the index type is known to be
218    // usize-ish
219    fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
220        if let hir::ExprKind::Index(base, index, _) = e.kind {
221            // All valid indexing looks like this; might encounter non-valid indexes at this point.
222            let base_ty = self.typeck_results.expr_ty_adjusted(base);
223            if let ty::Ref(_, base_ty_inner, _) = *base_ty.kind() {
224                let index_ty = self.typeck_results.expr_ty_adjusted(index);
225                if self.is_builtin_index(e, base_ty_inner, index_ty) {
226                    // Remove the method call record
227                    self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
228                    self.typeck_results.node_args_mut().remove(e.hir_id);
229
230                    if let Some(a) = self.typeck_results.adjustments_mut().get_mut(base.hir_id)
231                        // Discard the need for a mutable borrow
232                        // Extra adjustment made when indexing causes a drop
233                        // of size information - we need to get rid of it
234                        // Since this is "after" the other adjustment to be
235                        // discarded, we do an extra `pop()`
236                        && let Some(Adjustment {
237                            kind: Adjust::Pointer(PointerCoercion::Unsize),
238                            ..
239                        }) = a.pop()
240                    {
241                        // So the borrow discard actually happens here
242                        a.pop();
243                    }
244                }
245            }
246        }
247    }
248}
249
250///////////////////////////////////////////////////////////////////////////
251// Impl of Visitor for Resolver
252//
253// This is the master code which walks the AST. It delegates most of
254// the heavy lifting to the generic visit and resolve functions
255// below. In general, a function is made into a `visitor` if it must
256// traffic in node-ids or update typeck results in the type context etc.
257
258impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
259    fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
260        match e.kind {
261            hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
262                let body = self.fcx.tcx.hir_body(body);
263                for param in body.params {
264                    self.visit_node_id(e.span, param.hir_id);
265                }
266
267                self.visit_body(body);
268            }
269            hir::ExprKind::Struct(_, fields, _) => {
270                for field in fields {
271                    self.visit_field_id(field.hir_id);
272                }
273            }
274            hir::ExprKind::Field(..) | hir::ExprKind::OffsetOf(..) => {
275                self.visit_field_id(e.hir_id);
276            }
277            _ => {}
278        }
279
280        self.visit_node_id(e.span, e.hir_id);
281        intravisit::walk_expr(self, e);
282
283        self.fix_scalar_builtin_expr(e);
284        self.fix_index_builtin_expr(e);
285    }
286
287    fn visit_inline_const(&mut self, anon_const: &hir::ConstBlock) {
288        let span = self.tcx().def_span(anon_const.def_id);
289        self.visit_node_id(span, anon_const.hir_id);
290
291        let body = self.tcx().hir_body(anon_const.body);
292        self.visit_body(body);
293    }
294
295    fn visit_generic_param(&mut self, p: &'tcx hir::GenericParam<'tcx>) {
296        match &p.kind {
297            hir::GenericParamKind::Lifetime { .. } => {
298                // Nothing to write back here
299            }
300            hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => {
301                self.tcx()
302                    .dcx()
303                    .span_delayed_bug(p.span, format!("unexpected generic param: {p:?}"));
304            }
305        }
306    }
307
308    fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) {
309        self.visit_node_id(b.span, b.hir_id);
310        intravisit::walk_block(self, b);
311    }
312
313    fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
314        match p.kind {
315            hir::PatKind::Binding(..) => {
316                let typeck_results = self.fcx.typeck_results.borrow();
317                let bm = typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span);
318                self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
319            }
320            hir::PatKind::Struct(_, fields, _) => {
321                for field in fields {
322                    self.visit_field_id(field.hir_id);
323                }
324            }
325            _ => {}
326        };
327
328        self.visit_rust_2024_migration_desugared_pats(p.hir_id);
329        self.visit_skipped_ref_pats(p.hir_id);
330        self.visit_pat_adjustments(p.span, p.hir_id);
331
332        self.visit_node_id(p.span, p.hir_id);
333        intravisit::walk_pat(self, p);
334    }
335
336    fn visit_pat_expr(&mut self, expr: &'tcx hir::PatExpr<'tcx>) {
337        self.visit_node_id(expr.span, expr.hir_id);
338        intravisit::walk_pat_expr(self, expr);
339    }
340
341    fn visit_local(&mut self, l: &'tcx hir::LetStmt<'tcx>) {
342        intravisit::walk_local(self, l);
343        let var_ty = self.fcx.local_ty(l.span, l.hir_id);
344        let var_ty = self.resolve(var_ty, &l.span);
345        self.write_ty_to_typeck_results(l.hir_id, var_ty);
346    }
347
348    fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx, AmbigArg>) {
349        intravisit::walk_ty(self, hir_ty);
350        // If there are type checking errors, Type privacy pass will stop,
351        // so we may not get the type from hid_id, see #104513
352        if let Some(ty) = self.fcx.node_ty_opt(hir_ty.hir_id) {
353            let ty = self.resolve(ty, &hir_ty.span);
354            self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
355        }
356    }
357
358    fn visit_infer(
359        &mut self,
360        inf_id: HirId,
361        inf_span: Span,
362        _kind: InferKind<'cx>,
363    ) -> Self::Result {
364        self.visit_id(inf_id);
365
366        // We don't currently write inference results of const infer vars to
367        // the typeck results as there is not yet any part of the compiler that
368        // needs this information.
369        if let Some(ty) = self.fcx.node_ty_opt(inf_id) {
370            let ty = self.resolve(ty, &inf_span);
371            self.write_ty_to_typeck_results(inf_id, ty);
372        }
373    }
374}
375
376impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
377    fn eval_closure_size(&mut self) {
378        self.tcx().with_stable_hashing_context(|ref hcx| {
379            let fcx_typeck_results = self.fcx.typeck_results.borrow();
380
381            self.typeck_results.closure_size_eval = fcx_typeck_results
382                .closure_size_eval
383                .to_sorted(hcx, false)
384                .into_iter()
385                .map(|(&closure_def_id, data)| {
386                    let closure_hir_id = self.tcx().local_def_id_to_hir_id(closure_def_id);
387                    let data = self.resolve(*data, &closure_hir_id);
388                    (closure_def_id, data)
389                })
390                .collect();
391        })
392    }
393
394    fn visit_min_capture_map(&mut self) {
395        self.tcx().with_stable_hashing_context(|ref hcx| {
396            let fcx_typeck_results = self.fcx.typeck_results.borrow();
397
398            self.typeck_results.closure_min_captures = fcx_typeck_results
399                .closure_min_captures
400                .to_sorted(hcx, false)
401                .into_iter()
402                .map(|(&closure_def_id, root_min_captures)| {
403                    let root_var_map_wb = root_min_captures
404                        .iter()
405                        .map(|(var_hir_id, min_list)| {
406                            let min_list_wb = min_list
407                                .iter()
408                                .map(|captured_place| {
409                                    let locatable =
410                                        captured_place.info.path_expr_id.unwrap_or_else(|| {
411                                            self.tcx().local_def_id_to_hir_id(closure_def_id)
412                                        });
413                                    self.resolve(captured_place.clone(), &locatable)
414                                })
415                                .collect();
416                            (*var_hir_id, min_list_wb)
417                        })
418                        .collect();
419                    (closure_def_id, root_var_map_wb)
420                })
421                .collect();
422        })
423    }
424
425    fn visit_fake_reads_map(&mut self) {
426        self.tcx().with_stable_hashing_context(move |ref hcx| {
427            let fcx_typeck_results = self.fcx.typeck_results.borrow();
428
429            self.typeck_results.closure_fake_reads = fcx_typeck_results
430                .closure_fake_reads
431                .to_sorted(hcx, true)
432                .into_iter()
433                .map(|(&closure_def_id, fake_reads)| {
434                    let resolved_fake_reads = fake_reads
435                        .iter()
436                        .map(|(place, cause, hir_id)| {
437                            let locatable = self.tcx().local_def_id_to_hir_id(closure_def_id);
438                            let resolved_fake_read = self.resolve(place.clone(), &locatable);
439                            (resolved_fake_read, *cause, *hir_id)
440                        })
441                        .collect();
442
443                    (closure_def_id, resolved_fake_reads)
444                })
445                .collect();
446        });
447    }
448
449    fn visit_closures(&mut self) {
450        let fcx_typeck_results = self.fcx.typeck_results.borrow();
451        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
452        let common_hir_owner = fcx_typeck_results.hir_owner;
453
454        let fcx_closure_kind_origins =
455            fcx_typeck_results.closure_kind_origins().items_in_stable_order();
456
457        for (local_id, origin) in fcx_closure_kind_origins {
458            let hir_id = HirId { owner: common_hir_owner, local_id };
459            let place_span = origin.0;
460            let place = self.resolve(origin.1.clone(), &place_span);
461            self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
462        }
463    }
464
465    fn visit_coercion_casts(&mut self) {
466        let fcx_typeck_results = self.fcx.typeck_results.borrow();
467
468        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
469
470        let fcx_coercion_casts = fcx_typeck_results.coercion_casts().to_sorted_stable_ord();
471        for &local_id in fcx_coercion_casts {
472            self.typeck_results.set_coercion_cast(local_id);
473        }
474    }
475
476    fn visit_user_provided_tys(&mut self) {
477        let fcx_typeck_results = self.fcx.typeck_results.borrow();
478        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
479        let common_hir_owner = fcx_typeck_results.hir_owner;
480
481        if self.rustc_dump_user_args {
482            let sorted_user_provided_types =
483                fcx_typeck_results.user_provided_types().items_in_stable_order();
484
485            let mut errors_buffer = Vec::new();
486            for (local_id, c_ty) in sorted_user_provided_types {
487                let hir_id = HirId { owner: common_hir_owner, local_id };
488
489                if let ty::UserTypeKind::TypeOf(_, user_args) = c_ty.value.kind {
490                    // This is a unit-testing mechanism.
491                    let span = self.tcx().hir_span(hir_id);
492                    // We need to buffer the errors in order to guarantee a consistent
493                    // order when emitting them.
494                    let err =
495                        self.tcx().dcx().struct_span_err(span, format!("user args: {user_args:?}"));
496                    errors_buffer.push(err);
497                }
498            }
499
500            if !errors_buffer.is_empty() {
501                errors_buffer.sort_by_key(|diag| diag.span.primary_span());
502                for err in errors_buffer {
503                    err.emit();
504                }
505            }
506        }
507
508        self.typeck_results.user_provided_types_mut().extend(
509            fcx_typeck_results.user_provided_types().items().map(|(local_id, c_ty)| {
510                let hir_id = HirId { owner: common_hir_owner, local_id };
511                (hir_id, *c_ty)
512            }),
513        );
514    }
515
516    fn visit_user_provided_sigs(&mut self) {
517        let fcx_typeck_results = self.fcx.typeck_results.borrow();
518        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
519
520        self.typeck_results.user_provided_sigs.extend_unord(
521            fcx_typeck_results.user_provided_sigs.items().map(|(def_id, c_sig)| (*def_id, *c_sig)),
522        );
523    }
524
525    fn visit_coroutine_interior(&mut self) {
526        let fcx_typeck_results = self.fcx.typeck_results.borrow();
527        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
528        for (predicate, cause) in &fcx_typeck_results.coroutine_stalled_predicates {
529            let (predicate, cause) =
530                self.resolve_coroutine_predicate((*predicate, cause.clone()), &cause.span);
531            self.typeck_results.coroutine_stalled_predicates.insert((predicate, cause));
532        }
533    }
534
535    fn visit_transmutes(&mut self) {
536        let tcx = self.tcx();
537        let fcx_typeck_results = self.fcx.typeck_results.borrow();
538        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
539        for &(from, to, hir_id) in self.fcx.deferred_transmute_checks.borrow().iter() {
540            let span = tcx.hir_span(hir_id);
541            let from = self.resolve(from, &span);
542            let to = self.resolve(to, &span);
543            self.typeck_results.transmutes_to_check.push((from, to, hir_id));
544        }
545    }
546
547    fn visit_opaque_types_next(&mut self) {
548        let mut fcx_typeck_results = self.fcx.typeck_results.borrow_mut();
549        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
550        assert_eq!(self.typeck_results.hidden_types.len(), 0);
551        self.typeck_results.hidden_types = mem::take(&mut fcx_typeck_results.hidden_types);
552    }
553
554    #[instrument(skip(self), level = "debug")]
555    fn visit_opaque_types(&mut self) {
556        if self.fcx.next_trait_solver() {
557            return self.visit_opaque_types_next();
558        }
559
560        let tcx = self.tcx();
561        // We clone the opaques instead of stealing them here as they are still used for
562        // normalization in the next generation trait solver.
563        let opaque_types = self.fcx.infcx.clone_opaque_types();
564        let num_entries = self.fcx.inner.borrow_mut().opaque_types().num_entries();
565        let prev = self.fcx.checked_opaque_types_storage_entries.replace(Some(num_entries));
566        debug_assert_eq!(prev, None);
567        for (opaque_type_key, hidden_type) in opaque_types {
568            let hidden_type = self.resolve(hidden_type, &hidden_type.span);
569            let opaque_type_key = self.resolve(opaque_type_key, &hidden_type.span);
570            if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind()
571                && alias_ty.def_id == opaque_type_key.def_id.to_def_id()
572                && alias_ty.args == opaque_type_key.args
573            {
574                continue;
575            }
576
577            if let Err(err) = opaque_type_has_defining_use_args(
578                self.fcx,
579                opaque_type_key,
580                hidden_type.span,
581                DefiningScopeKind::HirTypeck,
582            ) {
583                self.typeck_results.hidden_types.insert(
584                    opaque_type_key.def_id,
585                    ty::DefinitionSiteHiddenType::new_error(tcx, err.report(self.fcx)),
586                );
587            }
588
589            let hidden_type = hidden_type.remap_generic_params_to_declaration_params(
590                opaque_type_key,
591                tcx,
592                DefiningScopeKind::HirTypeck,
593            );
594
595            if let Some(prev) =
596                self.typeck_results.hidden_types.insert(opaque_type_key.def_id, hidden_type)
597            {
598                let entry =
599                    self.typeck_results.hidden_types.get_mut(&opaque_type_key.def_id).unwrap();
600                if prev.ty != hidden_type.ty {
601                    let guar = if let Some(guar) = self.typeck_results.tainted_by_errors {
602                        guar
603                    } else {
604                        let (Ok(guar) | Err(guar)) =
605                            prev.build_mismatch_error(&hidden_type, tcx).map(|d| d.emit());
606                        guar
607                    };
608                    *entry = DefinitionSiteHiddenType::new_error(tcx, guar);
609                }
610
611                // Pick a better span if there is one.
612                // FIXME(oli-obk): collect multiple spans for better diagnostics down the road.
613                entry.span = prev.span.substitute_dummy(hidden_type.span);
614            }
615        }
616
617        let recursive_opaques: Vec<_> = self
618            .typeck_results
619            .hidden_types
620            .iter()
621            .filter(|&(&def_id, hidden_ty)| {
622                hidden_ty
623                    .ty
624                    .instantiate_identity()
625                    .visit_with(&mut HasRecursiveOpaque {
626                        def_id,
627                        seen: Default::default(),
628                        opaques: &self.typeck_results.hidden_types,
629                        tcx,
630                    })
631                    .is_break()
632            })
633            .map(|(def_id, hidden_ty)| (*def_id, hidden_ty.span))
634            .collect();
635        for (def_id, span) in recursive_opaques {
636            let guar = self
637                .fcx
638                .dcx()
639                .struct_span_err(span, "cannot resolve opaque type")
640                .with_code(E0720)
641                .emit();
642            self.typeck_results
643                .hidden_types
644                .insert(def_id, DefinitionSiteHiddenType::new_error(tcx, guar));
645        }
646    }
647
648    fn visit_field_id(&mut self, hir_id: HirId) {
649        if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
650        {
651            self.typeck_results.field_indices_mut().insert(hir_id, index);
652        }
653    }
654
655    #[instrument(skip(self, span), level = "debug")]
656    fn visit_node_id(&mut self, span: Span, hir_id: HirId) {
657        // Export associated path extensions and method resolutions.
658        if let Some(def) =
659            self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
660        {
661            self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
662        }
663
664        // Resolve any borrowings for the node with id `node_id`
665        self.visit_adjustments(span, hir_id);
666
667        // Resolve the type of the node with id `node_id`
668        let n_ty = self.fcx.node_ty(hir_id);
669        let n_ty = self.resolve(n_ty, &span);
670        self.write_ty_to_typeck_results(hir_id, n_ty);
671        debug!(?n_ty);
672
673        // Resolve any generic parameters
674        if let Some(args) = self.fcx.typeck_results.borrow().node_args_opt(hir_id) {
675            let args = self.resolve(args, &span);
676            debug!("write_args_to_tcx({:?}, {:?})", hir_id, args);
677            assert!(!args.has_infer() && !args.has_placeholders());
678            self.typeck_results.node_args_mut().insert(hir_id, args);
679        }
680    }
681
682    #[instrument(skip(self, span), level = "debug")]
683    fn visit_adjustments(&mut self, span: Span, hir_id: HirId) {
684        let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
685        match adjustment {
686            None => {
687                debug!("no adjustments for node");
688            }
689
690            Some(adjustment) => {
691                let resolved_adjustment = self.resolve(adjustment, &span);
692                debug!(?resolved_adjustment);
693                self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
694            }
695        }
696    }
697
698    #[instrument(skip(self), level = "debug")]
699    fn visit_rust_2024_migration_desugared_pats(&mut self, hir_id: hir::HirId) {
700        if let Some(is_hard_error) = self
701            .fcx
702            .typeck_results
703            .borrow_mut()
704            .rust_2024_migration_desugared_pats_mut()
705            .remove(hir_id)
706        {
707            debug!(
708                "node is a pat whose match ergonomics are desugared by the Rust 2024 migration lint"
709            );
710            self.typeck_results
711                .rust_2024_migration_desugared_pats_mut()
712                .insert(hir_id, is_hard_error);
713        }
714    }
715
716    #[instrument(skip(self, span), level = "debug")]
717    fn visit_pat_adjustments(&mut self, span: Span, hir_id: HirId) {
718        let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
719        match adjustment {
720            None => {
721                debug!("no pat_adjustments for node");
722            }
723
724            Some(adjustment) => {
725                let resolved_adjustment = self.resolve(adjustment, &span);
726                debug!(?resolved_adjustment);
727                self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
728            }
729        }
730    }
731
732    #[instrument(skip(self), level = "debug")]
733    fn visit_skipped_ref_pats(&mut self, hir_id: hir::HirId) {
734        if self.fcx.typeck_results.borrow_mut().skipped_ref_pats_mut().remove(hir_id) {
735            debug!("node is a skipped ref pat");
736            self.typeck_results.skipped_ref_pats_mut().insert(hir_id);
737        }
738    }
739
740    fn visit_liberated_fn_sigs(&mut self) {
741        let fcx_typeck_results = self.fcx.typeck_results.borrow();
742        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
743        let common_hir_owner = fcx_typeck_results.hir_owner;
744
745        let fcx_liberated_fn_sigs = fcx_typeck_results.liberated_fn_sigs().items_in_stable_order();
746
747        for (local_id, &fn_sig) in fcx_liberated_fn_sigs {
748            let hir_id = HirId { owner: common_hir_owner, local_id };
749            let fn_sig = self.resolve(fn_sig, &hir_id);
750            self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
751        }
752    }
753
754    fn visit_fru_field_types(&mut self) {
755        let fcx_typeck_results = self.fcx.typeck_results.borrow();
756        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
757        let common_hir_owner = fcx_typeck_results.hir_owner;
758
759        let fcx_fru_field_types = fcx_typeck_results.fru_field_types().items_in_stable_order();
760
761        for (local_id, ftys) in fcx_fru_field_types {
762            let hir_id = HirId { owner: common_hir_owner, local_id };
763            let ftys = self.resolve(ftys.clone(), &hir_id);
764            self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
765        }
766    }
767
768    fn visit_offset_of_container_types(&mut self) {
769        let fcx_typeck_results = self.fcx.typeck_results.borrow();
770        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
771        let common_hir_owner = fcx_typeck_results.hir_owner;
772
773        for (local_id, indices) in fcx_typeck_results.offset_of_data().items_in_stable_order() {
774            let hir_id = HirId { owner: common_hir_owner, local_id };
775            let indices = indices
776                .iter()
777                .map(|&(ty, variant, field)| (self.resolve(ty, &hir_id), variant, field))
778                .collect();
779            self.typeck_results.offset_of_data_mut().insert(hir_id, indices);
780        }
781    }
782
783    fn visit_potentially_region_dependent_goals(&mut self) {
784        let obligations = self.fcx.take_hir_typeck_potentially_region_dependent_goals();
785        if self.fcx.tainted_by_errors().is_none() {
786            for obligation in obligations {
787                let (predicate, mut cause) =
788                    self.fcx.resolve_vars_if_possible((obligation.predicate, obligation.cause));
789                if predicate.has_non_region_infer() {
790                    self.fcx.dcx().span_delayed_bug(
791                        cause.span,
792                        format!("unexpected inference variable after writeback: {predicate:?}"),
793                    );
794                } else {
795                    let predicate = self.tcx().erase_and_anonymize_regions(predicate);
796                    if cause.has_infer() || cause.has_placeholders() {
797                        // We can't use the obligation cause as it references
798                        // information local to this query.
799                        cause = self.fcx.misc(cause.span);
800                    }
801                    self.typeck_results
802                        .potentially_region_dependent_goals
803                        .insert((predicate, cause));
804                }
805            }
806        }
807    }
808
809    fn resolve<T>(&mut self, value: T, span: &dyn Locatable) -> T
810    where
811        T: TypeFoldable<TyCtxt<'tcx>>,
812    {
813        let value = self.fcx.resolve_vars_if_possible(value);
814
815        let mut goals = vec![];
816        let value =
817            value.fold_with(&mut Resolver::new(self.fcx, span, self.body, true, &mut goals));
818
819        // Ensure that we resolve goals we get from normalizing coroutine interiors,
820        // but we shouldn't expect those goals to need normalizing (or else we'd get
821        // into a somewhat awkward fixpoint situation, and we don't need it anyways).
822        let mut unexpected_goals = vec![];
823        self.typeck_results.coroutine_stalled_predicates.extend(
824            goals
825                .into_iter()
826                .map(|pred| {
827                    self.fcx.resolve_vars_if_possible(pred).fold_with(&mut Resolver::new(
828                        self.fcx,
829                        span,
830                        self.body,
831                        false,
832                        &mut unexpected_goals,
833                    ))
834                })
835                // FIXME: throwing away the param-env :(
836                .map(|goal| (goal.predicate, self.fcx.misc(span.to_span(self.fcx.tcx)))),
837        );
838        assert_eq!(unexpected_goals, vec![]);
839
840        assert!(!value.has_infer());
841
842        // We may have introduced e.g. `ty::Error`, if inference failed, make sure
843        // to mark the `TypeckResults` as tainted in that case, so that downstream
844        // users of the typeck results don't produce extra errors, or worse, ICEs.
845        if let Err(guar) = value.error_reported() {
846            self.typeck_results.tainted_by_errors = Some(guar);
847        }
848
849        value
850    }
851
852    fn resolve_coroutine_predicate<T>(&mut self, value: T, span: &dyn Locatable) -> T
853    where
854        T: TypeFoldable<TyCtxt<'tcx>>,
855    {
856        let value = self.fcx.resolve_vars_if_possible(value);
857
858        let mut goals = vec![];
859        let value =
860            value.fold_with(&mut Resolver::new(self.fcx, span, self.body, false, &mut goals));
861        assert_eq!(goals, vec![]);
862
863        assert!(!value.has_infer());
864
865        // We may have introduced e.g. `ty::Error`, if inference failed, make sure
866        // to mark the `TypeckResults` as tainted in that case, so that downstream
867        // users of the typeck results don't produce extra errors, or worse, ICEs.
868        if let Err(guar) = value.error_reported() {
869            self.typeck_results.tainted_by_errors = Some(guar);
870        }
871
872        value
873    }
874}
875
876pub(crate) trait Locatable {
877    fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
878}
879
880impl Locatable for Span {
881    fn to_span(&self, _: TyCtxt<'_>) -> Span {
882        *self
883    }
884}
885
886impl Locatable for HirId {
887    fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
888        tcx.hir_span(*self)
889    }
890}
891
892struct Resolver<'cx, 'tcx> {
893    fcx: &'cx FnCtxt<'cx, 'tcx>,
894    span: &'cx dyn Locatable,
895    body: &'tcx hir::Body<'tcx>,
896    /// Whether we should normalize using the new solver, disabled
897    /// both when using the old solver and when resolving predicates.
898    should_normalize: bool,
899    nested_goals: &'cx mut Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
900}
901
902impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
903    fn new(
904        fcx: &'cx FnCtxt<'cx, 'tcx>,
905        span: &'cx dyn Locatable,
906        body: &'tcx hir::Body<'tcx>,
907        should_normalize: bool,
908        nested_goals: &'cx mut Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
909    ) -> Resolver<'cx, 'tcx> {
910        Resolver { fcx, span, body, nested_goals, should_normalize }
911    }
912
913    fn report_error(&self, p: impl Into<ty::Term<'tcx>>) -> ErrorGuaranteed {
914        if let Some(guar) = self.fcx.tainted_by_errors() {
915            guar
916        } else {
917            self.fcx
918                .err_ctxt()
919                .emit_inference_failure_err(
920                    self.fcx.tcx.hir_body_owner_def_id(self.body.id()),
921                    self.span.to_span(self.fcx.tcx),
922                    p.into(),
923                    TypeAnnotationNeeded::E0282,
924                    false,
925                )
926                .emit()
927        }
928    }
929
930    #[instrument(level = "debug", skip(self, outer_exclusive_binder, new_err))]
931    fn handle_term<T>(
932        &mut self,
933        value: T,
934        outer_exclusive_binder: impl FnOnce(T) -> ty::DebruijnIndex,
935        new_err: impl Fn(TyCtxt<'tcx>, ErrorGuaranteed) -> T,
936    ) -> T
937    where
938        T: Into<ty::Term<'tcx>> + TypeSuperFoldable<TyCtxt<'tcx>> + Copy,
939    {
940        let tcx = self.fcx.tcx;
941        // We must deeply normalize in the new solver, since later lints expect
942        // that types that show up in the typeck are fully normalized.
943        let mut value = if self.should_normalize && self.fcx.next_trait_solver() {
944            let body_id = tcx.hir_body_owner_def_id(self.body.id());
945            let cause = ObligationCause::misc(self.span.to_span(tcx), body_id);
946            let at = self.fcx.at(&cause, self.fcx.param_env);
947            let universes = vec![None; outer_exclusive_binder(value).as_usize()];
948            match solve::deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
949                at, value, universes,
950            ) {
951                Ok((value, goals)) => {
952                    self.nested_goals.extend(goals);
953                    value
954                }
955                Err(errors) => {
956                    let guar = self.fcx.err_ctxt().report_fulfillment_errors(errors);
957                    new_err(tcx, guar)
958                }
959            }
960        } else {
961            value
962        };
963
964        // Bail if there are any non-region infer.
965        if value.has_non_region_infer() {
966            let guar = self.report_error(value);
967            value = new_err(tcx, guar);
968        }
969
970        // Erase the regions from the ty, since it's not really meaningful what
971        // these region values are; there's not a trivial correspondence between
972        // regions in the HIR and MIR, so when we turn the body into MIR, there's
973        // no reason to keep regions around. They will be repopulated during MIR
974        // borrowck, and specifically region constraints will be populated during
975        // MIR typeck which is run on the new body.
976        //
977        // We're not using `tcx.erase_and_anonymize_regions` as that also
978        // anonymizes bound variables, regressing borrowck diagnostics.
979        value = fold_regions(tcx, value, |_, _| tcx.lifetimes.re_erased);
980
981        // Normalize consts in writeback, because GCE doesn't normalize eagerly.
982        if tcx.features().generic_const_exprs() {
983            value = value.fold_with(&mut EagerlyNormalizeConsts::new(self.fcx));
984        }
985
986        value
987    }
988}
989
990impl<'cx, 'tcx> TypeFolder<TyCtxt<'tcx>> for Resolver<'cx, 'tcx> {
991    fn cx(&self) -> TyCtxt<'tcx> {
992        self.fcx.tcx
993    }
994
995    fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
996        match r.kind() {
997            ty::ReBound(..) => r,
998            _ => self.fcx.tcx.lifetimes.re_erased,
999        }
1000    }
1001
1002    fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
1003        self.handle_term(ty, Ty::outer_exclusive_binder, Ty::new_error)
1004    }
1005
1006    fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
1007        self.handle_term(ct, ty::Const::outer_exclusive_binder, ty::Const::new_error)
1008    }
1009
1010    fn fold_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> {
1011        assert!(
1012            !self.should_normalize,
1013            "normalizing predicates in writeback is not generally sound"
1014        );
1015        predicate.super_fold_with(self)
1016    }
1017}
1018
1019struct EagerlyNormalizeConsts<'tcx> {
1020    tcx: TyCtxt<'tcx>,
1021    typing_env: ty::TypingEnv<'tcx>,
1022}
1023impl<'tcx> EagerlyNormalizeConsts<'tcx> {
1024    fn new(fcx: &FnCtxt<'_, 'tcx>) -> Self {
1025        // FIXME(#132279, generic_const_exprs): Using `try_normalize_erasing_regions` here
1026        // means we can't handle opaque types in their defining scope.
1027        EagerlyNormalizeConsts { tcx: fcx.tcx, typing_env: fcx.typing_env(fcx.param_env) }
1028    }
1029}
1030
1031impl<'tcx> TypeFolder<TyCtxt<'tcx>> for EagerlyNormalizeConsts<'tcx> {
1032    fn cx(&self) -> TyCtxt<'tcx> {
1033        self.tcx
1034    }
1035
1036    fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
1037        self.tcx.try_normalize_erasing_regions(self.typing_env, ct).unwrap_or(ct)
1038    }
1039}
1040
1041struct HasRecursiveOpaque<'a, 'tcx> {
1042    def_id: LocalDefId,
1043    seen: FxHashSet<LocalDefId>,
1044    opaques: &'a FxIndexMap<LocalDefId, ty::DefinitionSiteHiddenType<'tcx>>,
1045    tcx: TyCtxt<'tcx>,
1046}
1047
1048impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for HasRecursiveOpaque<'_, 'tcx> {
1049    type Result = ControlFlow<()>;
1050
1051    fn visit_ty(&mut self, t: Ty<'tcx>) -> Self::Result {
1052        if let ty::Alias(ty::Opaque, alias_ty) = *t.kind()
1053            && let Some(def_id) = alias_ty.def_id.as_local()
1054        {
1055            if self.def_id == def_id {
1056                return ControlFlow::Break(());
1057            }
1058
1059            if self.seen.insert(def_id)
1060                && let Some(hidden_ty) = self.opaques.get(&def_id)
1061            {
1062                hidden_ty.ty.instantiate(self.tcx, alias_ty.args).visit_with(self)?;
1063            }
1064        }
1065
1066        t.super_visit_with(self)
1067    }
1068}