1use std::mem;
12use std::ops::ControlFlow;
13
14use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
15use rustc_data_structures::unord::ExtendUnord;
16use rustc_errors::{E0720, ErrorGuaranteed};
17use rustc_hir::def_id::LocalDefId;
18use rustc_hir::intravisit::{self, InferKind, Visitor};
19use rustc_hir::{self as hir, AmbigArg, HirId};
20use rustc_infer::traits::solve::Goal;
21use rustc_middle::traits::ObligationCause;
22use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCoercion};
23use rustc_middle::ty::{
24 self, DefiningScopeKind, OpaqueHiddenType, Ty, TyCtxt, TypeFoldable, TypeFolder,
25 TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor,
26 fold_regions,
27};
28use rustc_span::{Span, sym};
29use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded;
30use rustc_trait_selection::opaque_types::opaque_type_has_defining_use_args;
31use rustc_trait_selection::solve;
32use tracing::{debug, instrument};
33
34use crate::FnCtxt;
35
36impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
40 pub(crate) fn resolve_type_vars_in_body(
41 &self,
42 body: &'tcx hir::Body<'tcx>,
43 ) -> &'tcx ty::TypeckResults<'tcx> {
44 let item_def_id = self.tcx.hir_body_owner_def_id(body.id());
45
46 let rustc_dump_user_args =
49 self.has_rustc_attrs && self.tcx.has_attr(item_def_id, sym::rustc_dump_user_args);
50
51 let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_args);
52 for param in body.params {
53 wbcx.visit_node_id(param.pat.span, param.hir_id);
54 }
55 match self.tcx.hir_body_owner_kind(item_def_id) {
56 hir::BodyOwnerKind::Const { .. }
58 | hir::BodyOwnerKind::Static(_)
59 | hir::BodyOwnerKind::GlobalAsm => {
60 let item_hir_id = self.tcx.local_def_id_to_hir_id(item_def_id);
61 wbcx.visit_node_id(body.value.span, item_hir_id);
62 }
63 hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => {}
65 }
66 wbcx.visit_body(body);
67 wbcx.visit_min_capture_map();
68 wbcx.eval_closure_size();
69 wbcx.visit_fake_reads_map();
70 wbcx.visit_closures();
71 wbcx.visit_liberated_fn_sigs();
72 wbcx.visit_fru_field_types();
73 wbcx.visit_opaque_types();
74 wbcx.visit_coercion_casts();
75 wbcx.visit_user_provided_tys();
76 wbcx.visit_user_provided_sigs();
77 wbcx.visit_coroutine_interior();
78 wbcx.visit_transmutes();
79 wbcx.visit_offset_of_container_types();
80 wbcx.visit_potentially_region_dependent_goals();
81
82 wbcx.typeck_results.rvalue_scopes =
83 mem::take(&mut self.typeck_results.borrow_mut().rvalue_scopes);
84
85 let used_trait_imports =
86 mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
87 debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
88 wbcx.typeck_results.used_trait_imports = used_trait_imports;
89
90 debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
91
92 self.tcx.arena.alloc(wbcx.typeck_results)
93 }
94}
95
96struct WritebackCx<'cx, 'tcx> {
102 fcx: &'cx FnCtxt<'cx, 'tcx>,
103
104 typeck_results: ty::TypeckResults<'tcx>,
105
106 body: &'tcx hir::Body<'tcx>,
107
108 rustc_dump_user_args: bool,
109}
110
111impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
112 fn new(
113 fcx: &'cx FnCtxt<'cx, 'tcx>,
114 body: &'tcx hir::Body<'tcx>,
115 rustc_dump_user_args: bool,
116 ) -> WritebackCx<'cx, 'tcx> {
117 let owner = body.id().hir_id.owner;
118
119 let mut wbcx = WritebackCx {
120 fcx,
121 typeck_results: ty::TypeckResults::new(owner),
122 body,
123 rustc_dump_user_args,
124 };
125
126 if let Some(e) = fcx.tainted_by_errors() {
130 wbcx.typeck_results.tainted_by_errors = Some(e);
131 }
132
133 wbcx
134 }
135
136 fn tcx(&self) -> TyCtxt<'tcx> {
137 self.fcx.tcx
138 }
139
140 fn write_ty_to_typeck_results(&mut self, hir_id: HirId, ty: Ty<'tcx>) {
141 debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
142 assert!(
143 !ty.has_infer() && !ty.has_placeholders() && !ty.has_free_regions(),
144 "{ty} can't be put into typeck results"
145 );
146 self.typeck_results.node_types_mut().insert(hir_id, ty);
147 }
148
149 fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
154 match e.kind {
155 hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
156 let inner_ty = self.typeck_results.node_type(inner.hir_id);
157
158 if inner_ty.is_scalar() {
159 self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
160 self.typeck_results.node_args_mut().remove(e.hir_id);
161 }
162 }
163 hir::ExprKind::Binary(ref op, lhs, rhs) => {
164 let lhs_ty = self.typeck_results.node_type(lhs.hir_id);
165 let rhs_ty = self.typeck_results.node_type(rhs.hir_id);
166
167 if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
168 self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
169 self.typeck_results.node_args_mut().remove(e.hir_id);
170
171 if !op.node.is_by_value() {
172 let mut adjustments = self.typeck_results.adjustments_mut();
173 if let Some(a) = adjustments.get_mut(lhs.hir_id) {
174 a.pop();
175 }
176 if let Some(a) = adjustments.get_mut(rhs.hir_id) {
177 a.pop();
178 }
179 }
180 }
181 }
182 hir::ExprKind::AssignOp(_, lhs, rhs) => {
183 let lhs_ty = self.typeck_results.node_type(lhs.hir_id);
184 let rhs_ty = self.typeck_results.node_type(rhs.hir_id);
185
186 if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
187 self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
188 self.typeck_results.node_args_mut().remove(e.hir_id);
189
190 if let Some(a) = self.typeck_results.adjustments_mut().get_mut(lhs.hir_id) {
191 a.pop();
192 }
193 }
194 }
195 _ => {}
196 }
197 }
198
199 fn is_builtin_index(
204 &mut self,
205 e: &hir::Expr<'_>,
206 base_ty: Ty<'tcx>,
207 index_ty: Ty<'tcx>,
208 ) -> bool {
209 if let Some(elem_ty) = base_ty.builtin_index()
210 && let Some(exp_ty) = self.typeck_results.expr_ty_opt(e)
211 {
212 elem_ty == exp_ty && index_ty == self.fcx.tcx.types.usize
213 } else {
214 false
215 }
216 }
217
218 fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
223 if let hir::ExprKind::Index(base, index, _) = e.kind {
224 let base_ty = self.typeck_results.expr_ty_adjusted(base);
226 if let ty::Ref(_, base_ty_inner, _) = *base_ty.kind() {
227 let index_ty = self.typeck_results.expr_ty_adjusted(index);
228 if self.is_builtin_index(e, base_ty_inner, index_ty) {
229 self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
231 self.typeck_results.node_args_mut().remove(e.hir_id);
232
233 if let Some(a) = self.typeck_results.adjustments_mut().get_mut(base.hir_id)
234 && let Some(Adjustment {
240 kind: Adjust::Pointer(PointerCoercion::Unsize),
241 ..
242 }) = a.pop()
243 {
244 a.pop();
246 }
247 }
248 }
249 }
250 }
251}
252
253impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
262 fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
263 match e.kind {
264 hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
265 let body = self.fcx.tcx.hir_body(body);
266 for param in body.params {
267 self.visit_node_id(e.span, param.hir_id);
268 }
269
270 self.visit_body(body);
271 }
272 hir::ExprKind::Struct(_, fields, _) => {
273 for field in fields {
274 self.visit_field_id(field.hir_id);
275 }
276 }
277 hir::ExprKind::Field(..) | hir::ExprKind::OffsetOf(..) => {
278 self.visit_field_id(e.hir_id);
279 }
280 _ => {}
281 }
282
283 self.visit_node_id(e.span, e.hir_id);
284 intravisit::walk_expr(self, e);
285
286 self.fix_scalar_builtin_expr(e);
287 self.fix_index_builtin_expr(e);
288 }
289
290 fn visit_inline_const(&mut self, anon_const: &hir::ConstBlock) {
291 let span = self.tcx().def_span(anon_const.def_id);
292 self.visit_node_id(span, anon_const.hir_id);
293
294 let body = self.tcx().hir_body(anon_const.body);
295 self.visit_body(body);
296 }
297
298 fn visit_generic_param(&mut self, p: &'tcx hir::GenericParam<'tcx>) {
299 match &p.kind {
300 hir::GenericParamKind::Lifetime { .. } => {
301 }
303 hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => {
304 self.tcx()
305 .dcx()
306 .span_delayed_bug(p.span, format!("unexpected generic param: {p:?}"));
307 }
308 }
309 }
310
311 fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) {
312 self.visit_node_id(b.span, b.hir_id);
313 intravisit::walk_block(self, b);
314 }
315
316 fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
317 match p.kind {
318 hir::PatKind::Binding(..) => {
319 let typeck_results = self.fcx.typeck_results.borrow();
320 let bm = typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span);
321 self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
322 }
323 hir::PatKind::Struct(_, fields, _) => {
324 for field in fields {
325 self.visit_field_id(field.hir_id);
326 }
327 }
328 _ => {}
329 };
330
331 self.visit_rust_2024_migration_desugared_pats(p.hir_id);
332 self.visit_skipped_ref_pats(p.hir_id);
333 self.visit_pat_adjustments(p.span, p.hir_id);
334
335 self.visit_node_id(p.span, p.hir_id);
336 intravisit::walk_pat(self, p);
337 }
338
339 fn visit_pat_expr(&mut self, expr: &'tcx hir::PatExpr<'tcx>) {
340 self.visit_node_id(expr.span, expr.hir_id);
341 intravisit::walk_pat_expr(self, expr);
342 }
343
344 fn visit_local(&mut self, l: &'tcx hir::LetStmt<'tcx>) {
345 intravisit::walk_local(self, l);
346 let var_ty = self.fcx.local_ty(l.span, l.hir_id);
347 let var_ty = self.resolve(var_ty, &l.span);
348 self.write_ty_to_typeck_results(l.hir_id, var_ty);
349 }
350
351 fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx, AmbigArg>) {
352 intravisit::walk_ty(self, hir_ty);
353 if let Some(ty) = self.fcx.node_ty_opt(hir_ty.hir_id) {
356 let ty = self.resolve(ty, &hir_ty.span);
357 self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
358 }
359 }
360
361 fn visit_infer(
362 &mut self,
363 inf_id: HirId,
364 inf_span: Span,
365 _kind: InferKind<'cx>,
366 ) -> Self::Result {
367 self.visit_id(inf_id);
368
369 if let Some(ty) = self.fcx.node_ty_opt(inf_id) {
373 let ty = self.resolve(ty, &inf_span);
374 self.write_ty_to_typeck_results(inf_id, ty);
375 }
376 }
377}
378
379impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
380 fn eval_closure_size(&mut self) {
381 self.tcx().with_stable_hashing_context(|ref hcx| {
382 let fcx_typeck_results = self.fcx.typeck_results.borrow();
383
384 self.typeck_results.closure_size_eval = fcx_typeck_results
385 .closure_size_eval
386 .to_sorted(hcx, false)
387 .into_iter()
388 .map(|(&closure_def_id, data)| {
389 let closure_hir_id = self.tcx().local_def_id_to_hir_id(closure_def_id);
390 let data = self.resolve(*data, &closure_hir_id);
391 (closure_def_id, data)
392 })
393 .collect();
394 })
395 }
396
397 fn visit_min_capture_map(&mut self) {
398 self.tcx().with_stable_hashing_context(|ref hcx| {
399 let fcx_typeck_results = self.fcx.typeck_results.borrow();
400
401 self.typeck_results.closure_min_captures = fcx_typeck_results
402 .closure_min_captures
403 .to_sorted(hcx, false)
404 .into_iter()
405 .map(|(&closure_def_id, root_min_captures)| {
406 let root_var_map_wb = root_min_captures
407 .iter()
408 .map(|(var_hir_id, min_list)| {
409 let min_list_wb = min_list
410 .iter()
411 .map(|captured_place| {
412 let locatable =
413 captured_place.info.path_expr_id.unwrap_or_else(|| {
414 self.tcx().local_def_id_to_hir_id(closure_def_id)
415 });
416 self.resolve(captured_place.clone(), &locatable)
417 })
418 .collect();
419 (*var_hir_id, min_list_wb)
420 })
421 .collect();
422 (closure_def_id, root_var_map_wb)
423 })
424 .collect();
425 })
426 }
427
428 fn visit_fake_reads_map(&mut self) {
429 self.tcx().with_stable_hashing_context(move |ref hcx| {
430 let fcx_typeck_results = self.fcx.typeck_results.borrow();
431
432 self.typeck_results.closure_fake_reads = fcx_typeck_results
433 .closure_fake_reads
434 .to_sorted(hcx, true)
435 .into_iter()
436 .map(|(&closure_def_id, fake_reads)| {
437 let resolved_fake_reads = fake_reads
438 .iter()
439 .map(|(place, cause, hir_id)| {
440 let locatable = self.tcx().local_def_id_to_hir_id(closure_def_id);
441 let resolved_fake_read = self.resolve(place.clone(), &locatable);
442 (resolved_fake_read, *cause, *hir_id)
443 })
444 .collect();
445
446 (closure_def_id, resolved_fake_reads)
447 })
448 .collect();
449 });
450 }
451
452 fn visit_closures(&mut self) {
453 let fcx_typeck_results = self.fcx.typeck_results.borrow();
454 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
455 let common_hir_owner = fcx_typeck_results.hir_owner;
456
457 let fcx_closure_kind_origins =
458 fcx_typeck_results.closure_kind_origins().items_in_stable_order();
459
460 for (local_id, origin) in fcx_closure_kind_origins {
461 let hir_id = HirId { owner: common_hir_owner, local_id };
462 let place_span = origin.0;
463 let place = self.resolve(origin.1.clone(), &place_span);
464 self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
465 }
466 }
467
468 fn visit_coercion_casts(&mut self) {
469 let fcx_typeck_results = self.fcx.typeck_results.borrow();
470
471 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
472
473 let fcx_coercion_casts = fcx_typeck_results.coercion_casts().to_sorted_stable_ord();
474 for &local_id in fcx_coercion_casts {
475 self.typeck_results.set_coercion_cast(local_id);
476 }
477 }
478
479 fn visit_user_provided_tys(&mut self) {
480 let fcx_typeck_results = self.fcx.typeck_results.borrow();
481 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
482 let common_hir_owner = fcx_typeck_results.hir_owner;
483
484 if self.rustc_dump_user_args {
485 let sorted_user_provided_types =
486 fcx_typeck_results.user_provided_types().items_in_stable_order();
487
488 let mut errors_buffer = Vec::new();
489 for (local_id, c_ty) in sorted_user_provided_types {
490 let hir_id = HirId { owner: common_hir_owner, local_id };
491
492 if let ty::UserTypeKind::TypeOf(_, user_args) = c_ty.value.kind {
493 let span = self.tcx().hir_span(hir_id);
495 let err =
498 self.tcx().dcx().struct_span_err(span, format!("user args: {user_args:?}"));
499 errors_buffer.push(err);
500 }
501 }
502
503 if !errors_buffer.is_empty() {
504 errors_buffer.sort_by_key(|diag| diag.span.primary_span());
505 for err in errors_buffer {
506 err.emit();
507 }
508 }
509 }
510
511 self.typeck_results.user_provided_types_mut().extend(
512 fcx_typeck_results.user_provided_types().items().map(|(local_id, c_ty)| {
513 let hir_id = HirId { owner: common_hir_owner, local_id };
514 (hir_id, *c_ty)
515 }),
516 );
517 }
518
519 fn visit_user_provided_sigs(&mut self) {
520 let fcx_typeck_results = self.fcx.typeck_results.borrow();
521 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
522
523 self.typeck_results.user_provided_sigs.extend_unord(
524 fcx_typeck_results.user_provided_sigs.items().map(|(def_id, c_sig)| (*def_id, *c_sig)),
525 );
526 }
527
528 fn visit_coroutine_interior(&mut self) {
529 let fcx_typeck_results = self.fcx.typeck_results.borrow();
530 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
531 for (predicate, cause) in &fcx_typeck_results.coroutine_stalled_predicates {
532 let (predicate, cause) =
533 self.resolve_coroutine_predicate((*predicate, cause.clone()), &cause.span);
534 self.typeck_results.coroutine_stalled_predicates.insert((predicate, cause));
535 }
536 }
537
538 fn visit_transmutes(&mut self) {
539 let tcx = self.tcx();
540 let fcx_typeck_results = self.fcx.typeck_results.borrow();
541 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
542 for &(from, to, hir_id) in self.fcx.deferred_transmute_checks.borrow().iter() {
543 let span = tcx.hir_span(hir_id);
544 let from = self.resolve(from, &span);
545 let to = self.resolve(to, &span);
546 self.typeck_results.transmutes_to_check.push((from, to, hir_id));
547 }
548 }
549
550 fn visit_opaque_types_next(&mut self) {
551 let mut fcx_typeck_results = self.fcx.typeck_results.borrow_mut();
552 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
553 for hidden_ty in fcx_typeck_results.concrete_opaque_types.values() {
554 assert!(!hidden_ty.has_infer());
555 }
556
557 assert_eq!(self.typeck_results.concrete_opaque_types.len(), 0);
558 self.typeck_results.concrete_opaque_types =
559 mem::take(&mut fcx_typeck_results.concrete_opaque_types);
560 }
561
562 #[instrument(skip(self), level = "debug")]
563 fn visit_opaque_types(&mut self) {
564 if self.fcx.next_trait_solver() {
565 return self.visit_opaque_types_next();
566 }
567
568 let tcx = self.tcx();
569 let opaque_types = self.fcx.infcx.clone_opaque_types();
572 let num_entries = self.fcx.inner.borrow_mut().opaque_types().num_entries();
573 let prev = self.fcx.checked_opaque_types_storage_entries.replace(Some(num_entries));
574 debug_assert_eq!(prev, None);
575 for (opaque_type_key, hidden_type) in opaque_types {
576 let hidden_type = self.resolve(hidden_type, &hidden_type.span);
577 let opaque_type_key = self.resolve(opaque_type_key, &hidden_type.span);
578 if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind()
579 && alias_ty.def_id == opaque_type_key.def_id.to_def_id()
580 && alias_ty.args == opaque_type_key.args
581 {
582 continue;
583 }
584
585 if let Err(err) = opaque_type_has_defining_use_args(
586 self.fcx,
587 opaque_type_key,
588 hidden_type.span,
589 DefiningScopeKind::HirTypeck,
590 ) {
591 self.typeck_results.concrete_opaque_types.insert(
592 opaque_type_key.def_id,
593 ty::OpaqueHiddenType::new_error(tcx, err.report(self.fcx)),
594 );
595 }
596
597 let hidden_type = hidden_type.remap_generic_params_to_declaration_params(
598 opaque_type_key,
599 tcx,
600 DefiningScopeKind::HirTypeck,
601 );
602
603 if let Some(prev) = self
604 .typeck_results
605 .concrete_opaque_types
606 .insert(opaque_type_key.def_id, hidden_type)
607 {
608 let entry = &mut self
609 .typeck_results
610 .concrete_opaque_types
611 .get_mut(&opaque_type_key.def_id)
612 .unwrap();
613 if prev.ty != hidden_type.ty {
614 if let Some(guar) = self.typeck_results.tainted_by_errors {
615 entry.ty = Ty::new_error(tcx, guar);
616 } else {
617 let (Ok(guar) | Err(guar)) =
618 prev.build_mismatch_error(&hidden_type, tcx).map(|d| d.emit());
619 entry.ty = Ty::new_error(tcx, guar);
620 }
621 }
622
623 entry.span = prev.span.substitute_dummy(hidden_type.span);
626 }
627 }
628
629 let recursive_opaques: Vec<_> = self
630 .typeck_results
631 .concrete_opaque_types
632 .iter()
633 .filter(|&(&def_id, hidden_ty)| {
634 hidden_ty
635 .ty
636 .visit_with(&mut HasRecursiveOpaque {
637 def_id,
638 seen: Default::default(),
639 opaques: &self.typeck_results.concrete_opaque_types,
640 tcx,
641 })
642 .is_break()
643 })
644 .map(|(def_id, hidden_ty)| (*def_id, hidden_ty.span))
645 .collect();
646 for (def_id, span) in recursive_opaques {
647 let guar = self
648 .fcx
649 .dcx()
650 .struct_span_err(span, "cannot resolve opaque type")
651 .with_code(E0720)
652 .emit();
653 self.typeck_results
654 .concrete_opaque_types
655 .insert(def_id, OpaqueHiddenType { span, ty: Ty::new_error(tcx, guar) });
656 }
657 }
658
659 fn visit_field_id(&mut self, hir_id: HirId) {
660 if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
661 {
662 self.typeck_results.field_indices_mut().insert(hir_id, index);
663 }
664 }
665
666 #[instrument(skip(self, span), level = "debug")]
667 fn visit_node_id(&mut self, span: Span, hir_id: HirId) {
668 if let Some(def) =
670 self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
671 {
672 self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
673 }
674
675 self.visit_adjustments(span, hir_id);
677
678 let n_ty = self.fcx.node_ty(hir_id);
680 let n_ty = self.resolve(n_ty, &span);
681 self.write_ty_to_typeck_results(hir_id, n_ty);
682 debug!(?n_ty);
683
684 if let Some(args) = self.fcx.typeck_results.borrow().node_args_opt(hir_id) {
686 let args = self.resolve(args, &span);
687 debug!("write_args_to_tcx({:?}, {:?})", hir_id, args);
688 assert!(!args.has_infer() && !args.has_placeholders());
689 self.typeck_results.node_args_mut().insert(hir_id, args);
690 }
691 }
692
693 #[instrument(skip(self, span), level = "debug")]
694 fn visit_adjustments(&mut self, span: Span, hir_id: HirId) {
695 let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
696 match adjustment {
697 None => {
698 debug!("no adjustments for node");
699 }
700
701 Some(adjustment) => {
702 let resolved_adjustment = self.resolve(adjustment, &span);
703 debug!(?resolved_adjustment);
704 self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
705 }
706 }
707 }
708
709 #[instrument(skip(self), level = "debug")]
710 fn visit_rust_2024_migration_desugared_pats(&mut self, hir_id: hir::HirId) {
711 if let Some(is_hard_error) = self
712 .fcx
713 .typeck_results
714 .borrow_mut()
715 .rust_2024_migration_desugared_pats_mut()
716 .remove(hir_id)
717 {
718 debug!(
719 "node is a pat whose match ergonomics are desugared by the Rust 2024 migration lint"
720 );
721 self.typeck_results
722 .rust_2024_migration_desugared_pats_mut()
723 .insert(hir_id, is_hard_error);
724 }
725 }
726
727 #[instrument(skip(self, span), level = "debug")]
728 fn visit_pat_adjustments(&mut self, span: Span, hir_id: HirId) {
729 let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
730 match adjustment {
731 None => {
732 debug!("no pat_adjustments for node");
733 }
734
735 Some(adjustment) => {
736 let resolved_adjustment = self.resolve(adjustment, &span);
737 debug!(?resolved_adjustment);
738 self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
739 }
740 }
741 }
742
743 #[instrument(skip(self), level = "debug")]
744 fn visit_skipped_ref_pats(&mut self, hir_id: hir::HirId) {
745 if self.fcx.typeck_results.borrow_mut().skipped_ref_pats_mut().remove(hir_id) {
746 debug!("node is a skipped ref pat");
747 self.typeck_results.skipped_ref_pats_mut().insert(hir_id);
748 }
749 }
750
751 fn visit_liberated_fn_sigs(&mut self) {
752 let fcx_typeck_results = self.fcx.typeck_results.borrow();
753 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
754 let common_hir_owner = fcx_typeck_results.hir_owner;
755
756 let fcx_liberated_fn_sigs = fcx_typeck_results.liberated_fn_sigs().items_in_stable_order();
757
758 for (local_id, &fn_sig) in fcx_liberated_fn_sigs {
759 let hir_id = HirId { owner: common_hir_owner, local_id };
760 let fn_sig = self.resolve(fn_sig, &hir_id);
761 self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
762 }
763 }
764
765 fn visit_fru_field_types(&mut self) {
766 let fcx_typeck_results = self.fcx.typeck_results.borrow();
767 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
768 let common_hir_owner = fcx_typeck_results.hir_owner;
769
770 let fcx_fru_field_types = fcx_typeck_results.fru_field_types().items_in_stable_order();
771
772 for (local_id, ftys) in fcx_fru_field_types {
773 let hir_id = HirId { owner: common_hir_owner, local_id };
774 let ftys = self.resolve(ftys.clone(), &hir_id);
775 self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
776 }
777 }
778
779 fn visit_offset_of_container_types(&mut self) {
780 let fcx_typeck_results = self.fcx.typeck_results.borrow();
781 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
782 let common_hir_owner = fcx_typeck_results.hir_owner;
783
784 for (local_id, &(container, ref indices)) in
785 fcx_typeck_results.offset_of_data().items_in_stable_order()
786 {
787 let hir_id = HirId { owner: common_hir_owner, local_id };
788 let container = self.resolve(container, &hir_id);
789 self.typeck_results.offset_of_data_mut().insert(hir_id, (container, indices.clone()));
790 }
791 }
792
793 fn visit_potentially_region_dependent_goals(&mut self) {
794 let obligations = self.fcx.take_hir_typeck_potentially_region_dependent_goals();
795 if self.fcx.tainted_by_errors().is_none() {
796 for obligation in obligations {
797 let (predicate, mut cause) =
798 self.fcx.resolve_vars_if_possible((obligation.predicate, obligation.cause));
799 if predicate.has_non_region_infer() {
800 self.fcx.dcx().span_delayed_bug(
801 cause.span,
802 format!("unexpected inference variable after writeback: {predicate:?}"),
803 );
804 } else {
805 let predicate = self.tcx().erase_and_anonymize_regions(predicate);
806 if cause.has_infer() || cause.has_placeholders() {
807 cause = self.fcx.misc(cause.span);
810 }
811 self.typeck_results
812 .potentially_region_dependent_goals
813 .insert((predicate, cause));
814 }
815 }
816 }
817 }
818
819 fn resolve<T>(&mut self, value: T, span: &dyn Locatable) -> T
820 where
821 T: TypeFoldable<TyCtxt<'tcx>>,
822 {
823 let value = self.fcx.resolve_vars_if_possible(value);
824
825 let mut goals = vec![];
826 let value =
827 value.fold_with(&mut Resolver::new(self.fcx, span, self.body, true, &mut goals));
828
829 let mut unexpected_goals = vec![];
833 self.typeck_results.coroutine_stalled_predicates.extend(
834 goals
835 .into_iter()
836 .map(|pred| {
837 self.fcx.resolve_vars_if_possible(pred).fold_with(&mut Resolver::new(
838 self.fcx,
839 span,
840 self.body,
841 false,
842 &mut unexpected_goals,
843 ))
844 })
845 .map(|goal| (goal.predicate, self.fcx.misc(span.to_span(self.fcx.tcx)))),
847 );
848 assert_eq!(unexpected_goals, vec![]);
849
850 assert!(!value.has_infer());
851
852 if let Err(guar) = value.error_reported() {
856 self.typeck_results.tainted_by_errors = Some(guar);
857 }
858
859 value
860 }
861
862 fn resolve_coroutine_predicate<T>(&mut self, value: T, span: &dyn Locatable) -> T
863 where
864 T: TypeFoldable<TyCtxt<'tcx>>,
865 {
866 let value = self.fcx.resolve_vars_if_possible(value);
867
868 let mut goals = vec![];
869 let value =
870 value.fold_with(&mut Resolver::new(self.fcx, span, self.body, false, &mut goals));
871 assert_eq!(goals, vec![]);
872
873 assert!(!value.has_infer());
874
875 if let Err(guar) = value.error_reported() {
879 self.typeck_results.tainted_by_errors = Some(guar);
880 }
881
882 value
883 }
884}
885
886pub(crate) trait Locatable {
887 fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
888}
889
890impl Locatable for Span {
891 fn to_span(&self, _: TyCtxt<'_>) -> Span {
892 *self
893 }
894}
895
896impl Locatable for HirId {
897 fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
898 tcx.hir_span(*self)
899 }
900}
901
902struct Resolver<'cx, 'tcx> {
903 fcx: &'cx FnCtxt<'cx, 'tcx>,
904 span: &'cx dyn Locatable,
905 body: &'tcx hir::Body<'tcx>,
906 should_normalize: bool,
909 nested_goals: &'cx mut Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
910}
911
912impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
913 fn new(
914 fcx: &'cx FnCtxt<'cx, 'tcx>,
915 span: &'cx dyn Locatable,
916 body: &'tcx hir::Body<'tcx>,
917 should_normalize: bool,
918 nested_goals: &'cx mut Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
919 ) -> Resolver<'cx, 'tcx> {
920 Resolver { fcx, span, body, nested_goals, should_normalize }
921 }
922
923 fn report_error(&self, p: impl Into<ty::Term<'tcx>>) -> ErrorGuaranteed {
924 if let Some(guar) = self.fcx.tainted_by_errors() {
925 guar
926 } else {
927 self.fcx
928 .err_ctxt()
929 .emit_inference_failure_err(
930 self.fcx.tcx.hir_body_owner_def_id(self.body.id()),
931 self.span.to_span(self.fcx.tcx),
932 p.into(),
933 TypeAnnotationNeeded::E0282,
934 false,
935 )
936 .emit()
937 }
938 }
939
940 #[instrument(level = "debug", skip(self, outer_exclusive_binder, new_err))]
941 fn handle_term<T>(
942 &mut self,
943 value: T,
944 outer_exclusive_binder: impl FnOnce(T) -> ty::DebruijnIndex,
945 new_err: impl Fn(TyCtxt<'tcx>, ErrorGuaranteed) -> T,
946 ) -> T
947 where
948 T: Into<ty::Term<'tcx>> + TypeSuperFoldable<TyCtxt<'tcx>> + Copy,
949 {
950 let tcx = self.fcx.tcx;
951 let mut value = if self.should_normalize && self.fcx.next_trait_solver() {
954 let body_id = tcx.hir_body_owner_def_id(self.body.id());
955 let cause = ObligationCause::misc(self.span.to_span(tcx), body_id);
956 let at = self.fcx.at(&cause, self.fcx.param_env);
957 let universes = vec![None; outer_exclusive_binder(value).as_usize()];
958 match solve::deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
959 at, value, universes,
960 ) {
961 Ok((value, goals)) => {
962 self.nested_goals.extend(goals);
963 value
964 }
965 Err(errors) => {
966 let guar = self.fcx.err_ctxt().report_fulfillment_errors(errors);
967 new_err(tcx, guar)
968 }
969 }
970 } else {
971 value
972 };
973
974 if value.has_non_region_infer() {
976 let guar = self.report_error(value);
977 value = new_err(tcx, guar);
978 }
979
980 value = fold_regions(tcx, value, |_, _| tcx.lifetimes.re_erased);
990
991 if tcx.features().generic_const_exprs() {
993 value = value.fold_with(&mut EagerlyNormalizeConsts::new(self.fcx));
994 }
995
996 value
997 }
998}
999
1000impl<'cx, 'tcx> TypeFolder<TyCtxt<'tcx>> for Resolver<'cx, 'tcx> {
1001 fn cx(&self) -> TyCtxt<'tcx> {
1002 self.fcx.tcx
1003 }
1004
1005 fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
1006 match r.kind() {
1007 ty::ReBound(..) => r,
1008 _ => self.fcx.tcx.lifetimes.re_erased,
1009 }
1010 }
1011
1012 fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
1013 self.handle_term(ty, Ty::outer_exclusive_binder, Ty::new_error)
1014 }
1015
1016 fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
1017 self.handle_term(ct, ty::Const::outer_exclusive_binder, ty::Const::new_error)
1018 }
1019
1020 fn fold_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> {
1021 assert!(
1022 !self.should_normalize,
1023 "normalizing predicates in writeback is not generally sound"
1024 );
1025 predicate.super_fold_with(self)
1026 }
1027}
1028
1029struct EagerlyNormalizeConsts<'tcx> {
1030 tcx: TyCtxt<'tcx>,
1031 typing_env: ty::TypingEnv<'tcx>,
1032}
1033impl<'tcx> EagerlyNormalizeConsts<'tcx> {
1034 fn new(fcx: &FnCtxt<'_, 'tcx>) -> Self {
1035 EagerlyNormalizeConsts { tcx: fcx.tcx, typing_env: fcx.typing_env(fcx.param_env) }
1038 }
1039}
1040
1041impl<'tcx> TypeFolder<TyCtxt<'tcx>> for EagerlyNormalizeConsts<'tcx> {
1042 fn cx(&self) -> TyCtxt<'tcx> {
1043 self.tcx
1044 }
1045
1046 fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
1047 self.tcx.try_normalize_erasing_regions(self.typing_env, ct).unwrap_or(ct)
1048 }
1049}
1050
1051struct HasRecursiveOpaque<'a, 'tcx> {
1052 def_id: LocalDefId,
1053 seen: FxHashSet<LocalDefId>,
1054 opaques: &'a FxIndexMap<LocalDefId, ty::OpaqueHiddenType<'tcx>>,
1055 tcx: TyCtxt<'tcx>,
1056}
1057
1058impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for HasRecursiveOpaque<'_, 'tcx> {
1059 type Result = ControlFlow<()>;
1060
1061 fn visit_ty(&mut self, t: Ty<'tcx>) -> Self::Result {
1062 if let ty::Alias(ty::Opaque, alias_ty) = *t.kind()
1063 && let Some(def_id) = alias_ty.def_id.as_local()
1064 {
1065 if self.def_id == def_id {
1066 return ControlFlow::Break(());
1067 }
1068
1069 if self.seen.insert(def_id)
1070 && let Some(hidden_ty) = self.opaques.get(&def_id)
1071 {
1072 ty::EarlyBinder::bind(hidden_ty.ty)
1073 .instantiate(self.tcx, alias_ty.args)
1074 .visit_with(self)?;
1075 }
1076 }
1077
1078 t.super_visit_with(self)
1079 }
1080}