1use std::mem;
12use std::ops::ControlFlow;
13
14use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
15use rustc_data_structures::unord::ExtendUnord;
16use rustc_errors::{E0720, ErrorGuaranteed};
17use rustc_hir::def_id::LocalDefId;
18use rustc_hir::intravisit::{self, InferKind, Visitor};
19use rustc_hir::{self as hir, AmbigArg, HirId};
20use rustc_infer::traits::solve::Goal;
21use rustc_middle::traits::ObligationCause;
22use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCoercion};
23use rustc_middle::ty::{
24 self, DefiningScopeKind, OpaqueHiddenType, Ty, TyCtxt, TypeFoldable, TypeFolder,
25 TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor,
26 fold_regions,
27};
28use rustc_span::{Span, sym};
29use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded;
30use rustc_trait_selection::opaque_types::check_opaque_type_parameter_valid;
31use rustc_trait_selection::solve;
32use tracing::{debug, instrument};
33
34use crate::FnCtxt;
35
36impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
40 pub(crate) fn resolve_type_vars_in_body(
41 &self,
42 body: &'tcx hir::Body<'tcx>,
43 ) -> &'tcx ty::TypeckResults<'tcx> {
44 let item_def_id = self.tcx.hir_body_owner_def_id(body.id());
45
46 let rustc_dump_user_args = self.tcx.has_attr(item_def_id, sym::rustc_dump_user_args);
49
50 let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_args);
51 for param in body.params {
52 wbcx.visit_node_id(param.pat.span, param.hir_id);
53 }
54 match self.tcx.hir_body_owner_kind(item_def_id) {
55 hir::BodyOwnerKind::Const { .. }
57 | hir::BodyOwnerKind::Static(_)
58 | hir::BodyOwnerKind::GlobalAsm => {
59 let item_hir_id = self.tcx.local_def_id_to_hir_id(item_def_id);
60 wbcx.visit_node_id(body.value.span, item_hir_id);
61 }
62 hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => {}
64 }
65 wbcx.visit_body(body);
66 wbcx.visit_min_capture_map();
67 wbcx.eval_closure_size();
68 wbcx.visit_fake_reads_map();
69 wbcx.visit_closures();
70 wbcx.visit_liberated_fn_sigs();
71 wbcx.visit_fru_field_types();
72 wbcx.visit_opaque_types();
73 wbcx.visit_coercion_casts();
74 wbcx.visit_user_provided_tys();
75 wbcx.visit_user_provided_sigs();
76 wbcx.visit_coroutine_interior();
77 wbcx.visit_offset_of_container_types();
78
79 wbcx.typeck_results.rvalue_scopes =
80 mem::take(&mut self.typeck_results.borrow_mut().rvalue_scopes);
81
82 let used_trait_imports =
83 mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
84 debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
85 wbcx.typeck_results.used_trait_imports = used_trait_imports;
86
87 debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
88
89 self.tcx.arena.alloc(wbcx.typeck_results)
90 }
91}
92
93struct WritebackCx<'cx, 'tcx> {
99 fcx: &'cx FnCtxt<'cx, 'tcx>,
100
101 typeck_results: ty::TypeckResults<'tcx>,
102
103 body: &'tcx hir::Body<'tcx>,
104
105 rustc_dump_user_args: bool,
106}
107
108impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
109 fn new(
110 fcx: &'cx FnCtxt<'cx, 'tcx>,
111 body: &'tcx hir::Body<'tcx>,
112 rustc_dump_user_args: bool,
113 ) -> WritebackCx<'cx, 'tcx> {
114 let owner = body.id().hir_id.owner;
115
116 let mut wbcx = WritebackCx {
117 fcx,
118 typeck_results: ty::TypeckResults::new(owner),
119 body,
120 rustc_dump_user_args,
121 };
122
123 if let Some(e) = fcx.tainted_by_errors() {
127 wbcx.typeck_results.tainted_by_errors = Some(e);
128 }
129
130 wbcx
131 }
132
133 fn tcx(&self) -> TyCtxt<'tcx> {
134 self.fcx.tcx
135 }
136
137 fn write_ty_to_typeck_results(&mut self, hir_id: HirId, ty: Ty<'tcx>) {
138 debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
139 assert!(
140 !ty.has_infer() && !ty.has_placeholders() && !ty.has_free_regions(),
141 "{ty} can't be put into typeck results"
142 );
143 self.typeck_results.node_types_mut().insert(hir_id, ty);
144 }
145
146 fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
151 match e.kind {
152 hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
153 let inner_ty = self.typeck_results.node_type(inner.hir_id);
154
155 if inner_ty.is_scalar() {
156 self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
157 self.typeck_results.node_args_mut().remove(e.hir_id);
158 }
159 }
160 hir::ExprKind::Binary(ref op, lhs, rhs) => {
161 let lhs_ty = self.typeck_results.node_type(lhs.hir_id);
162 let rhs_ty = self.typeck_results.node_type(rhs.hir_id);
163
164 if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
165 self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
166 self.typeck_results.node_args_mut().remove(e.hir_id);
167
168 if !op.node.is_by_value() {
169 let mut adjustments = self.typeck_results.adjustments_mut();
170 if let Some(a) = adjustments.get_mut(lhs.hir_id) {
171 a.pop();
172 }
173 if let Some(a) = adjustments.get_mut(rhs.hir_id) {
174 a.pop();
175 }
176 }
177 }
178 }
179 hir::ExprKind::AssignOp(_, lhs, rhs) => {
180 let lhs_ty = self.typeck_results.node_type(lhs.hir_id);
181 let rhs_ty = self.typeck_results.node_type(rhs.hir_id);
182
183 if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
184 self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
185 self.typeck_results.node_args_mut().remove(e.hir_id);
186
187 if let Some(a) = self.typeck_results.adjustments_mut().get_mut(lhs.hir_id) {
188 a.pop();
189 }
190 }
191 }
192 _ => {}
193 }
194 }
195
196 fn is_builtin_index(
201 &mut self,
202 e: &hir::Expr<'_>,
203 base_ty: Ty<'tcx>,
204 index_ty: Ty<'tcx>,
205 ) -> bool {
206 if let Some(elem_ty) = base_ty.builtin_index()
207 && let Some(exp_ty) = self.typeck_results.expr_ty_opt(e)
208 {
209 elem_ty == exp_ty && index_ty == self.fcx.tcx.types.usize
210 } else {
211 false
212 }
213 }
214
215 fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
220 if let hir::ExprKind::Index(ref base, ref index, _) = e.kind {
221 let base_ty = self.typeck_results.expr_ty_adjusted(base);
223 if let ty::Ref(_, base_ty_inner, _) = *base_ty.kind() {
224 let index_ty = self.typeck_results.expr_ty_adjusted(index);
225 if self.is_builtin_index(e, base_ty_inner, index_ty) {
226 self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
228 self.typeck_results.node_args_mut().remove(e.hir_id);
229
230 if let Some(a) = self.typeck_results.adjustments_mut().get_mut(base.hir_id) {
231 if let Some(Adjustment {
238 kind: Adjust::Pointer(PointerCoercion::Unsize),
239 ..
240 }) = a.pop()
241 {
242 a.pop();
244 }
245 }
246 }
247 }
248 }
249 }
250}
251
252impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
261 fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
262 match e.kind {
263 hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
264 let body = self.fcx.tcx.hir_body(body);
265 for param in body.params {
266 self.visit_node_id(e.span, param.hir_id);
267 }
268
269 self.visit_body(body);
270 }
271 hir::ExprKind::Struct(_, fields, _) => {
272 for field in fields {
273 self.visit_field_id(field.hir_id);
274 }
275 }
276 hir::ExprKind::Field(..) | hir::ExprKind::OffsetOf(..) => {
277 self.visit_field_id(e.hir_id);
278 }
279 _ => {}
280 }
281
282 self.visit_node_id(e.span, e.hir_id);
283 intravisit::walk_expr(self, e);
284
285 self.fix_scalar_builtin_expr(e);
286 self.fix_index_builtin_expr(e);
287 }
288
289 fn visit_inline_const(&mut self, anon_const: &hir::ConstBlock) {
290 let span = self.tcx().def_span(anon_const.def_id);
291 self.visit_node_id(span, anon_const.hir_id);
292
293 let body = self.tcx().hir_body(anon_const.body);
294 self.visit_body(body);
295 }
296
297 fn visit_generic_param(&mut self, p: &'tcx hir::GenericParam<'tcx>) {
298 match &p.kind {
299 hir::GenericParamKind::Lifetime { .. } => {
300 }
302 hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => {
303 self.tcx()
304 .dcx()
305 .span_delayed_bug(p.span, format!("unexpected generic param: {p:?}"));
306 }
307 }
308 }
309
310 fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) {
311 self.visit_node_id(b.span, b.hir_id);
312 intravisit::walk_block(self, b);
313 }
314
315 fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
316 match p.kind {
317 hir::PatKind::Binding(..) => {
318 let typeck_results = self.fcx.typeck_results.borrow();
319 let bm = typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span);
320 self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
321 }
322 hir::PatKind::Struct(_, fields, _) => {
323 for field in fields {
324 self.visit_field_id(field.hir_id);
325 }
326 }
327 _ => {}
328 };
329
330 self.visit_rust_2024_migration_desugared_pats(p.hir_id);
331 self.visit_skipped_ref_pats(p.hir_id);
332 self.visit_pat_adjustments(p.span, p.hir_id);
333
334 self.visit_node_id(p.span, p.hir_id);
335 intravisit::walk_pat(self, p);
336 }
337
338 fn visit_pat_expr(&mut self, expr: &'tcx hir::PatExpr<'tcx>) {
339 self.visit_node_id(expr.span, expr.hir_id);
340 intravisit::walk_pat_expr(self, expr);
341 }
342
343 fn visit_local(&mut self, l: &'tcx hir::LetStmt<'tcx>) {
344 intravisit::walk_local(self, l);
345 let var_ty = self.fcx.local_ty(l.span, l.hir_id);
346 let var_ty = self.resolve(var_ty, &l.span);
347 self.write_ty_to_typeck_results(l.hir_id, var_ty);
348 }
349
350 fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx, AmbigArg>) {
351 intravisit::walk_ty(self, hir_ty);
352 if let Some(ty) = self.fcx.node_ty_opt(hir_ty.hir_id) {
355 let ty = self.resolve(ty, &hir_ty.span);
356 self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
357 }
358 }
359
360 fn visit_infer(
361 &mut self,
362 inf_id: HirId,
363 inf_span: Span,
364 _kind: InferKind<'cx>,
365 ) -> Self::Result {
366 self.visit_id(inf_id);
367
368 if let Some(ty) = self.fcx.node_ty_opt(inf_id) {
372 let ty = self.resolve(ty, &inf_span);
373 self.write_ty_to_typeck_results(inf_id, ty);
374 }
375 }
376}
377
378impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
379 fn eval_closure_size(&mut self) {
380 self.tcx().with_stable_hashing_context(|ref hcx| {
381 let fcx_typeck_results = self.fcx.typeck_results.borrow();
382
383 self.typeck_results.closure_size_eval = fcx_typeck_results
384 .closure_size_eval
385 .to_sorted(hcx, false)
386 .into_iter()
387 .map(|(&closure_def_id, data)| {
388 let closure_hir_id = self.tcx().local_def_id_to_hir_id(closure_def_id);
389 let data = self.resolve(*data, &closure_hir_id);
390 (closure_def_id, data)
391 })
392 .collect();
393 })
394 }
395
396 fn visit_min_capture_map(&mut self) {
397 self.tcx().with_stable_hashing_context(|ref hcx| {
398 let fcx_typeck_results = self.fcx.typeck_results.borrow();
399
400 self.typeck_results.closure_min_captures = fcx_typeck_results
401 .closure_min_captures
402 .to_sorted(hcx, false)
403 .into_iter()
404 .map(|(&closure_def_id, root_min_captures)| {
405 let root_var_map_wb = root_min_captures
406 .iter()
407 .map(|(var_hir_id, min_list)| {
408 let min_list_wb = min_list
409 .iter()
410 .map(|captured_place| {
411 let locatable =
412 captured_place.info.path_expr_id.unwrap_or_else(|| {
413 self.tcx().local_def_id_to_hir_id(closure_def_id)
414 });
415 self.resolve(captured_place.clone(), &locatable)
416 })
417 .collect();
418 (*var_hir_id, min_list_wb)
419 })
420 .collect();
421 (closure_def_id, root_var_map_wb)
422 })
423 .collect();
424 })
425 }
426
427 fn visit_fake_reads_map(&mut self) {
428 self.tcx().with_stable_hashing_context(move |ref hcx| {
429 let fcx_typeck_results = self.fcx.typeck_results.borrow();
430
431 self.typeck_results.closure_fake_reads = fcx_typeck_results
432 .closure_fake_reads
433 .to_sorted(hcx, true)
434 .into_iter()
435 .map(|(&closure_def_id, fake_reads)| {
436 let resolved_fake_reads = fake_reads
437 .iter()
438 .map(|(place, cause, hir_id)| {
439 let locatable = self.tcx().local_def_id_to_hir_id(closure_def_id);
440 let resolved_fake_read = self.resolve(place.clone(), &locatable);
441 (resolved_fake_read, *cause, *hir_id)
442 })
443 .collect();
444
445 (closure_def_id, resolved_fake_reads)
446 })
447 .collect();
448 });
449 }
450
451 fn visit_closures(&mut self) {
452 let fcx_typeck_results = self.fcx.typeck_results.borrow();
453 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
454 let common_hir_owner = fcx_typeck_results.hir_owner;
455
456 let fcx_closure_kind_origins =
457 fcx_typeck_results.closure_kind_origins().items_in_stable_order();
458
459 for (local_id, origin) in fcx_closure_kind_origins {
460 let hir_id = HirId { owner: common_hir_owner, local_id };
461 let place_span = origin.0;
462 let place = self.resolve(origin.1.clone(), &place_span);
463 self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
464 }
465 }
466
467 fn visit_coercion_casts(&mut self) {
468 let fcx_typeck_results = self.fcx.typeck_results.borrow();
469
470 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
471
472 let fcx_coercion_casts = fcx_typeck_results.coercion_casts().to_sorted_stable_ord();
473 for &local_id in fcx_coercion_casts {
474 self.typeck_results.set_coercion_cast(local_id);
475 }
476 }
477
478 fn visit_user_provided_tys(&mut self) {
479 let fcx_typeck_results = self.fcx.typeck_results.borrow();
480 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
481 let common_hir_owner = fcx_typeck_results.hir_owner;
482
483 if self.rustc_dump_user_args {
484 let sorted_user_provided_types =
485 fcx_typeck_results.user_provided_types().items_in_stable_order();
486
487 let mut errors_buffer = Vec::new();
488 for (local_id, c_ty) in sorted_user_provided_types {
489 let hir_id = HirId { owner: common_hir_owner, local_id };
490
491 if let ty::UserTypeKind::TypeOf(_, user_args) = c_ty.value.kind {
492 let span = self.tcx().hir_span(hir_id);
494 let err =
497 self.tcx().dcx().struct_span_err(span, format!("user args: {user_args:?}"));
498 errors_buffer.push(err);
499 }
500 }
501
502 if !errors_buffer.is_empty() {
503 errors_buffer.sort_by_key(|diag| diag.span.primary_span());
504 for err in errors_buffer {
505 err.emit();
506 }
507 }
508 }
509
510 self.typeck_results.user_provided_types_mut().extend(
511 fcx_typeck_results.user_provided_types().items().map(|(local_id, c_ty)| {
512 let hir_id = HirId { owner: common_hir_owner, local_id };
513 (hir_id, *c_ty)
514 }),
515 );
516 }
517
518 fn visit_user_provided_sigs(&mut self) {
519 let fcx_typeck_results = self.fcx.typeck_results.borrow();
520 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
521
522 self.typeck_results.user_provided_sigs.extend_unord(
523 fcx_typeck_results.user_provided_sigs.items().map(|(def_id, c_sig)| (*def_id, *c_sig)),
524 );
525 }
526
527 fn visit_coroutine_interior(&mut self) {
528 let fcx_typeck_results = self.fcx.typeck_results.borrow();
529 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
530 for (predicate, cause) in &fcx_typeck_results.coroutine_stalled_predicates {
531 let (predicate, cause) =
532 self.resolve_coroutine_predicate((*predicate, cause.clone()), &cause.span);
533 self.typeck_results.coroutine_stalled_predicates.insert((predicate, cause));
534 }
535 }
536
537 #[instrument(skip(self), level = "debug")]
538 fn visit_opaque_types(&mut self) {
539 let tcx = self.tcx();
540 let opaque_types = self.fcx.infcx.clone_opaque_types();
543 let num_entries = self.fcx.inner.borrow_mut().opaque_types().num_entries();
544 let prev = self.fcx.checked_opaque_types_storage_entries.replace(Some(num_entries));
545 debug_assert_eq!(prev, None);
546 for (opaque_type_key, hidden_type) in opaque_types {
547 let hidden_type = self.resolve(hidden_type, &hidden_type.span);
548 let opaque_type_key = self.resolve(opaque_type_key, &hidden_type.span);
549
550 if !self.fcx.next_trait_solver() {
551 if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind()
552 && alias_ty.def_id == opaque_type_key.def_id.to_def_id()
553 && alias_ty.args == opaque_type_key.args
554 {
555 continue;
556 }
557 }
558
559 if let Err(err) = check_opaque_type_parameter_valid(
560 &self.fcx,
561 opaque_type_key,
562 hidden_type.span,
563 DefiningScopeKind::HirTypeck,
564 ) {
565 self.typeck_results.concrete_opaque_types.insert(
566 opaque_type_key.def_id,
567 ty::OpaqueHiddenType::new_error(tcx, err.report(self.fcx)),
568 );
569 }
570
571 let hidden_type = hidden_type.remap_generic_params_to_declaration_params(
572 opaque_type_key,
573 tcx,
574 DefiningScopeKind::HirTypeck,
575 );
576
577 if let Some(prev) = self
578 .typeck_results
579 .concrete_opaque_types
580 .insert(opaque_type_key.def_id, hidden_type)
581 {
582 let entry = &mut self
583 .typeck_results
584 .concrete_opaque_types
585 .get_mut(&opaque_type_key.def_id)
586 .unwrap();
587 if prev.ty != hidden_type.ty {
588 if let Some(guar) = self.typeck_results.tainted_by_errors {
589 entry.ty = Ty::new_error(tcx, guar);
590 } else {
591 let (Ok(guar) | Err(guar)) =
592 prev.build_mismatch_error(&hidden_type, tcx).map(|d| d.emit());
593 entry.ty = Ty::new_error(tcx, guar);
594 }
595 }
596
597 entry.span = prev.span.substitute_dummy(hidden_type.span);
600 }
601 }
602
603 let recursive_opaques: Vec<_> = self
604 .typeck_results
605 .concrete_opaque_types
606 .iter()
607 .filter(|&(&def_id, hidden_ty)| {
608 hidden_ty
609 .ty
610 .visit_with(&mut HasRecursiveOpaque {
611 def_id,
612 seen: Default::default(),
613 opaques: &self.typeck_results.concrete_opaque_types,
614 tcx,
615 })
616 .is_break()
617 })
618 .map(|(def_id, hidden_ty)| (*def_id, hidden_ty.span))
619 .collect();
620 for (def_id, span) in recursive_opaques {
621 let guar = self
622 .fcx
623 .dcx()
624 .struct_span_err(span, "cannot resolve opaque type")
625 .with_code(E0720)
626 .emit();
627 self.typeck_results
628 .concrete_opaque_types
629 .insert(def_id, OpaqueHiddenType { span, ty: Ty::new_error(tcx, guar) });
630 }
631 }
632
633 fn visit_field_id(&mut self, hir_id: HirId) {
634 if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
635 {
636 self.typeck_results.field_indices_mut().insert(hir_id, index);
637 }
638 }
639
640 #[instrument(skip(self, span), level = "debug")]
641 fn visit_node_id(&mut self, span: Span, hir_id: HirId) {
642 if let Some(def) =
644 self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
645 {
646 self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
647 }
648
649 self.visit_adjustments(span, hir_id);
651
652 let n_ty = self.fcx.node_ty(hir_id);
654 let n_ty = self.resolve(n_ty, &span);
655 self.write_ty_to_typeck_results(hir_id, n_ty);
656 debug!(?n_ty);
657
658 if let Some(args) = self.fcx.typeck_results.borrow().node_args_opt(hir_id) {
660 let args = self.resolve(args, &span);
661 debug!("write_args_to_tcx({:?}, {:?})", hir_id, args);
662 assert!(!args.has_infer() && !args.has_placeholders());
663 self.typeck_results.node_args_mut().insert(hir_id, args);
664 }
665 }
666
667 #[instrument(skip(self, span), level = "debug")]
668 fn visit_adjustments(&mut self, span: Span, hir_id: HirId) {
669 let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
670 match adjustment {
671 None => {
672 debug!("no adjustments for node");
673 }
674
675 Some(adjustment) => {
676 let resolved_adjustment = self.resolve(adjustment, &span);
677 debug!(?resolved_adjustment);
678 self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
679 }
680 }
681 }
682
683 #[instrument(skip(self), level = "debug")]
684 fn visit_rust_2024_migration_desugared_pats(&mut self, hir_id: hir::HirId) {
685 if let Some(is_hard_error) = self
686 .fcx
687 .typeck_results
688 .borrow_mut()
689 .rust_2024_migration_desugared_pats_mut()
690 .remove(hir_id)
691 {
692 debug!(
693 "node is a pat whose match ergonomics are desugared by the Rust 2024 migration lint"
694 );
695 self.typeck_results
696 .rust_2024_migration_desugared_pats_mut()
697 .insert(hir_id, is_hard_error);
698 }
699 }
700
701 #[instrument(skip(self, span), level = "debug")]
702 fn visit_pat_adjustments(&mut self, span: Span, hir_id: HirId) {
703 let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
704 match adjustment {
705 None => {
706 debug!("no pat_adjustments for node");
707 }
708
709 Some(adjustment) => {
710 let resolved_adjustment = self.resolve(adjustment, &span);
711 debug!(?resolved_adjustment);
712 self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
713 }
714 }
715 }
716
717 #[instrument(skip(self), level = "debug")]
718 fn visit_skipped_ref_pats(&mut self, hir_id: hir::HirId) {
719 if self.fcx.typeck_results.borrow_mut().skipped_ref_pats_mut().remove(hir_id) {
720 debug!("node is a skipped ref pat");
721 self.typeck_results.skipped_ref_pats_mut().insert(hir_id);
722 }
723 }
724
725 fn visit_liberated_fn_sigs(&mut self) {
726 let fcx_typeck_results = self.fcx.typeck_results.borrow();
727 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
728 let common_hir_owner = fcx_typeck_results.hir_owner;
729
730 let fcx_liberated_fn_sigs = fcx_typeck_results.liberated_fn_sigs().items_in_stable_order();
731
732 for (local_id, &fn_sig) in fcx_liberated_fn_sigs {
733 let hir_id = HirId { owner: common_hir_owner, local_id };
734 let fn_sig = self.resolve(fn_sig, &hir_id);
735 self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
736 }
737 }
738
739 fn visit_fru_field_types(&mut self) {
740 let fcx_typeck_results = self.fcx.typeck_results.borrow();
741 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
742 let common_hir_owner = fcx_typeck_results.hir_owner;
743
744 let fcx_fru_field_types = fcx_typeck_results.fru_field_types().items_in_stable_order();
745
746 for (local_id, ftys) in fcx_fru_field_types {
747 let hir_id = HirId { owner: common_hir_owner, local_id };
748 let ftys = self.resolve(ftys.clone(), &hir_id);
749 self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
750 }
751 }
752
753 fn visit_offset_of_container_types(&mut self) {
754 let fcx_typeck_results = self.fcx.typeck_results.borrow();
755 assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
756 let common_hir_owner = fcx_typeck_results.hir_owner;
757
758 for (local_id, &(container, ref indices)) in
759 fcx_typeck_results.offset_of_data().items_in_stable_order()
760 {
761 let hir_id = HirId { owner: common_hir_owner, local_id };
762 let container = self.resolve(container, &hir_id);
763 self.typeck_results.offset_of_data_mut().insert(hir_id, (container, indices.clone()));
764 }
765 }
766
767 fn resolve<T>(&mut self, value: T, span: &dyn Locatable) -> T
768 where
769 T: TypeFoldable<TyCtxt<'tcx>>,
770 {
771 let value = self.fcx.resolve_vars_if_possible(value);
772
773 let mut goals = vec![];
774 let value =
775 value.fold_with(&mut Resolver::new(self.fcx, span, self.body, true, &mut goals));
776
777 let mut unexpected_goals = vec![];
781 self.typeck_results.coroutine_stalled_predicates.extend(
782 goals
783 .into_iter()
784 .map(|pred| {
785 self.fcx.resolve_vars_if_possible(pred).fold_with(&mut Resolver::new(
786 self.fcx,
787 span,
788 self.body,
789 false,
790 &mut unexpected_goals,
791 ))
792 })
793 .map(|goal| (goal.predicate, self.fcx.misc(span.to_span(self.fcx.tcx)))),
795 );
796 assert_eq!(unexpected_goals, vec![]);
797
798 assert!(!value.has_infer());
799
800 if let Err(guar) = value.error_reported() {
804 self.typeck_results.tainted_by_errors = Some(guar);
805 }
806
807 value
808 }
809
810 fn resolve_coroutine_predicate<T>(&mut self, value: T, span: &dyn Locatable) -> T
811 where
812 T: TypeFoldable<TyCtxt<'tcx>>,
813 {
814 let value = self.fcx.resolve_vars_if_possible(value);
815
816 let mut goals = vec![];
817 let value =
818 value.fold_with(&mut Resolver::new(self.fcx, span, self.body, false, &mut goals));
819 assert_eq!(goals, vec![]);
820
821 assert!(!value.has_infer());
822
823 if let Err(guar) = value.error_reported() {
827 self.typeck_results.tainted_by_errors = Some(guar);
828 }
829
830 value
831 }
832}
833
834pub(crate) trait Locatable {
835 fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
836}
837
838impl Locatable for Span {
839 fn to_span(&self, _: TyCtxt<'_>) -> Span {
840 *self
841 }
842}
843
844impl Locatable for HirId {
845 fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
846 tcx.hir_span(*self)
847 }
848}
849
850struct Resolver<'cx, 'tcx> {
851 fcx: &'cx FnCtxt<'cx, 'tcx>,
852 span: &'cx dyn Locatable,
853 body: &'tcx hir::Body<'tcx>,
854 should_normalize: bool,
857 nested_goals: &'cx mut Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
858}
859
860impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
861 fn new(
862 fcx: &'cx FnCtxt<'cx, 'tcx>,
863 span: &'cx dyn Locatable,
864 body: &'tcx hir::Body<'tcx>,
865 should_normalize: bool,
866 nested_goals: &'cx mut Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
867 ) -> Resolver<'cx, 'tcx> {
868 Resolver { fcx, span, body, nested_goals, should_normalize }
869 }
870
871 fn report_error(&self, p: impl Into<ty::Term<'tcx>>) -> ErrorGuaranteed {
872 if let Some(guar) = self.fcx.tainted_by_errors() {
873 guar
874 } else {
875 self.fcx
876 .err_ctxt()
877 .emit_inference_failure_err(
878 self.fcx.tcx.hir_body_owner_def_id(self.body.id()),
879 self.span.to_span(self.fcx.tcx),
880 p.into(),
881 TypeAnnotationNeeded::E0282,
882 false,
883 )
884 .emit()
885 }
886 }
887
888 fn handle_term<T>(
889 &mut self,
890 value: T,
891 outer_exclusive_binder: impl FnOnce(T) -> ty::DebruijnIndex,
892 new_err: impl Fn(TyCtxt<'tcx>, ErrorGuaranteed) -> T,
893 ) -> T
894 where
895 T: Into<ty::Term<'tcx>> + TypeSuperFoldable<TyCtxt<'tcx>> + Copy,
896 {
897 let tcx = self.fcx.tcx;
898 let mut value = if self.should_normalize && self.fcx.next_trait_solver() {
901 let body_id = tcx.hir_body_owner_def_id(self.body.id());
902 let cause = ObligationCause::misc(self.span.to_span(tcx), body_id);
903 let at = self.fcx.at(&cause, self.fcx.param_env);
904 let universes = vec![None; outer_exclusive_binder(value).as_usize()];
905 match solve::deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
906 at, value, universes,
907 ) {
908 Ok((value, goals)) => {
909 self.nested_goals.extend(goals);
910 value
911 }
912 Err(errors) => {
913 let guar = self.fcx.err_ctxt().report_fulfillment_errors(errors);
914 new_err(tcx, guar)
915 }
916 }
917 } else {
918 value
919 };
920
921 if value.has_non_region_infer() {
923 let guar = self.report_error(value);
924 value = new_err(tcx, guar);
925 }
926
927 value = fold_regions(tcx, value, |_, _| tcx.lifetimes.re_erased);
937
938 if tcx.features().generic_const_exprs() {
940 value = value.fold_with(&mut EagerlyNormalizeConsts::new(self.fcx));
941 }
942
943 value
944 }
945}
946
947impl<'cx, 'tcx> TypeFolder<TyCtxt<'tcx>> for Resolver<'cx, 'tcx> {
948 fn cx(&self) -> TyCtxt<'tcx> {
949 self.fcx.tcx
950 }
951
952 fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
953 debug_assert!(!r.is_bound(), "Should not be resolving bound region.");
954 self.fcx.tcx.lifetimes.re_erased
955 }
956
957 fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
958 self.handle_term(ty, Ty::outer_exclusive_binder, Ty::new_error)
959 }
960
961 fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
962 self.handle_term(ct, ty::Const::outer_exclusive_binder, ty::Const::new_error)
963 }
964
965 fn fold_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> {
966 assert!(
967 !self.should_normalize,
968 "normalizing predicates in writeback is not generally sound"
969 );
970 predicate.super_fold_with(self)
971 }
972}
973
974struct EagerlyNormalizeConsts<'tcx> {
975 tcx: TyCtxt<'tcx>,
976 typing_env: ty::TypingEnv<'tcx>,
977}
978impl<'tcx> EagerlyNormalizeConsts<'tcx> {
979 fn new(fcx: &FnCtxt<'_, 'tcx>) -> Self {
980 EagerlyNormalizeConsts { tcx: fcx.tcx, typing_env: fcx.typing_env(fcx.param_env) }
983 }
984}
985
986impl<'tcx> TypeFolder<TyCtxt<'tcx>> for EagerlyNormalizeConsts<'tcx> {
987 fn cx(&self) -> TyCtxt<'tcx> {
988 self.tcx
989 }
990
991 fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
992 self.tcx.try_normalize_erasing_regions(self.typing_env, ct).unwrap_or(ct)
993 }
994}
995
996struct HasRecursiveOpaque<'a, 'tcx> {
997 def_id: LocalDefId,
998 seen: FxHashSet<LocalDefId>,
999 opaques: &'a FxIndexMap<LocalDefId, ty::OpaqueHiddenType<'tcx>>,
1000 tcx: TyCtxt<'tcx>,
1001}
1002
1003impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for HasRecursiveOpaque<'_, 'tcx> {
1004 type Result = ControlFlow<()>;
1005
1006 fn visit_ty(&mut self, t: Ty<'tcx>) -> Self::Result {
1007 if let ty::Alias(ty::Opaque, alias_ty) = *t.kind()
1008 && let Some(def_id) = alias_ty.def_id.as_local()
1009 {
1010 if self.def_id == def_id {
1011 return ControlFlow::Break(());
1012 }
1013
1014 if self.seen.insert(def_id)
1015 && let Some(hidden_ty) = self.opaques.get(&def_id)
1016 {
1017 ty::EarlyBinder::bind(hidden_ty.ty)
1018 .instantiate(self.tcx, alias_ty.args)
1019 .visit_with(self)?;
1020 }
1021 }
1022
1023 t.super_visit_with(self)
1024 }
1025}