1use std::iter;
40
41use ast::visit::Visitor;
42use hir::def::{DefKind, PartialRes, Res};
43use hir::{BodyId, HirId};
44use rustc_abi::ExternAbi;
45use rustc_ast as ast;
46use rustc_ast::*;
47use rustc_attr_parsing::{AttributeParser, ShouldEmit};
48use rustc_data_structures::fx::FxHashSet;
49use rustc_errors::ErrorGuaranteed;
50use rustc_hir as hir;
51use rustc_hir::attrs::{AttributeKind, InlineAttr};
52use rustc_hir::def_id::{DefId, LocalDefId};
53use rustc_middle::span_bug;
54use rustc_middle::ty::{Asyncness, DelegationAttrs, DelegationFnSigAttrs, ResolverAstLowering};
55use rustc_span::symbol::kw;
56use rustc_span::{DUMMY_SP, Ident, Span, Symbol};
57use smallvec::SmallVec;
58
59use crate::delegation::generics::{GenericsGenerationResult, GenericsGenerationResults};
60use crate::errors::{CycleInDelegationSignatureResolution, UnresolvedDelegationCallee};
61use crate::{
62 AllowReturnTypeNotation, GenericArgsMode, ImplTraitContext, ImplTraitPosition, LoweringContext,
63 ParamMode, ResolverAstLoweringExt,
64};
65
66mod generics;
67
68pub(crate) struct DelegationResults<'hir> {
69 pub body_id: hir::BodyId,
70 pub sig: hir::FnSig<'hir>,
71 pub ident: Ident,
72 pub generics: &'hir hir::Generics<'hir>,
73}
74
75struct AttrAdditionInfo {
76 pub equals: fn(&hir::Attribute) -> bool,
77 pub kind: AttrAdditionKind,
78}
79
80enum AttrAdditionKind {
81 Default { factory: fn(Span) -> hir::Attribute },
82 Inherit { flag: DelegationFnSigAttrs, factory: fn(Span, &hir::Attribute) -> hir::Attribute },
83}
84
85const PARENT_ID: hir::ItemLocalId = hir::ItemLocalId::ZERO;
86
87static ATTRS_ADDITIONS: &[AttrAdditionInfo] = &[
88 AttrAdditionInfo {
89 equals: |a| #[allow(non_exhaustive_omitted_patterns)] match a {
hir::Attribute::Parsed(AttributeKind::MustUse { .. }) => true,
_ => false,
}matches!(a, hir::Attribute::Parsed(AttributeKind::MustUse { .. })),
90 kind: AttrAdditionKind::Inherit {
91 factory: |span, original_attr| {
92 let reason = match original_attr {
93 hir::Attribute::Parsed(AttributeKind::MustUse { reason, .. }) => *reason,
94 _ => None,
95 };
96
97 hir::Attribute::Parsed(AttributeKind::MustUse { span, reason })
98 },
99 flag: DelegationFnSigAttrs::MUST_USE,
100 },
101 },
102 AttrAdditionInfo {
103 equals: |a| #[allow(non_exhaustive_omitted_patterns)] match a {
hir::Attribute::Parsed(AttributeKind::Inline(..)) => true,
_ => false,
}matches!(a, hir::Attribute::Parsed(AttributeKind::Inline(..))),
104 kind: AttrAdditionKind::Default {
105 factory: |span| hir::Attribute::Parsed(AttributeKind::Inline(InlineAttr::Hint, span)),
106 },
107 },
108];
109
110type DelegationIdsVec = SmallVec<[DefId; 1]>;
111
112struct DelegationIds {
116 path: DelegationIdsVec,
117}
118
119impl DelegationIds {
120 fn new(path: DelegationIdsVec) -> Self {
121 if !!path.is_empty() {
::core::panicking::panic("assertion failed: !path.is_empty()")
};assert!(!path.is_empty());
122 Self { path }
123 }
124
125 fn root_function_id(&self) -> DefId {
127 *self.path.last().expect("Ids vector can't be empty")
128 }
129
130 fn delegee_id(&self) -> DefId {
133 *self.path.first().expect("Ids vector can't be empty")
134 }
135}
136
137impl<'hir> LoweringContext<'_, 'hir> {
138 fn is_method(&self, def_id: DefId, span: Span) -> bool {
139 match self.tcx.def_kind(def_id) {
140 DefKind::Fn => false,
141 DefKind::AssocFn => match def_id.as_local() {
142 Some(local_def_id) => self
143 .resolver
144 .delegation_fn_sigs
145 .get(&local_def_id)
146 .is_some_and(|sig| sig.has_self),
147 None => self.tcx.associated_item(def_id).is_method(),
148 },
149 _ => ::rustc_middle::util::bug::span_bug_fmt(span,
format_args!("unexpected DefKind for delegation item"))span_bug!(span, "unexpected DefKind for delegation item"),
150 }
151 }
152
153 pub(crate) fn lower_delegation(
154 &mut self,
155 delegation: &Delegation,
156 item_id: NodeId,
157 ) -> DelegationResults<'hir> {
158 let span = self.lower_span(delegation.path.segments.last().unwrap().ident.span);
159
160 let ids = if let Some(delegation_info) =
162 self.resolver.delegation_infos.get(&self.local_def_id(item_id))
163 {
164 self.get_delegation_ids(delegation_info.resolution_node, span)
165 } else {
166 return self.generate_delegation_error(
167 self.dcx().span_delayed_bug(
168 span,
169 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("LoweringContext: the delegation {0:?} is unresolved",
item_id))
})format!("LoweringContext: the delegation {:?} is unresolved", item_id),
170 ),
171 span,
172 delegation,
173 );
174 };
175
176 match ids {
177 Ok(ids) => {
178 self.add_attrs_if_needed(span, &ids);
179
180 let delegee_id = ids.delegee_id();
181 let root_function_id = ids.root_function_id();
182
183 let is_method = self.is_method(root_function_id, span);
187
188 let (param_count, c_variadic) = self.param_count(root_function_id);
191
192 let mut generics = self.lower_delegation_generics(
193 delegation,
194 ids.root_function_id(),
195 item_id,
196 span,
197 );
198
199 let body_id = self.lower_delegation_body(
200 delegation,
201 item_id,
202 is_method,
203 param_count,
204 &mut generics,
205 span,
206 );
207
208 let decl = self.lower_delegation_decl(
212 delegee_id,
213 param_count,
214 c_variadic,
215 span,
216 &generics,
217 );
218
219 let sig = self.lower_delegation_sig(root_function_id, decl, span);
222 let ident = self.lower_ident(delegation.ident);
223
224 let generics = self.arena.alloc(hir::Generics {
225 has_where_clause_predicates: false,
226 params: self.arena.alloc_from_iter(generics.all_params(item_id, span, self)),
227 predicates: self
228 .arena
229 .alloc_from_iter(generics.all_predicates(item_id, span, self)),
230 span,
231 where_clause_span: span,
232 });
233
234 DelegationResults { body_id, sig, ident, generics }
235 }
236 Err(err) => self.generate_delegation_error(err, span, delegation),
237 }
238 }
239
240 fn add_attrs_if_needed(&mut self, span: Span, ids: &DelegationIds) {
241 let new_attrs =
242 self.create_new_attrs(ATTRS_ADDITIONS, span, ids, self.attrs.get(&PARENT_ID));
243
244 if new_attrs.is_empty() {
245 return;
246 }
247
248 let new_arena_allocated_attrs = match self.attrs.get(&PARENT_ID) {
249 Some(existing_attrs) => self.arena.alloc_from_iter(
250 existing_attrs.iter().map(|a| a.clone()).chain(new_attrs.into_iter()),
251 ),
252 None => self.arena.alloc_from_iter(new_attrs.into_iter()),
253 };
254
255 self.attrs.insert(PARENT_ID, new_arena_allocated_attrs);
256 }
257
258 fn create_new_attrs(
259 &self,
260 candidate_additions: &[AttrAdditionInfo],
261 span: Span,
262 ids: &DelegationIds,
263 existing_attrs: Option<&&[hir::Attribute]>,
264 ) -> Vec<hir::Attribute> {
265 let defs_orig_attrs = ids
266 .path
267 .iter()
268 .map(|def_id| (*def_id, self.parse_local_original_attrs(*def_id)))
269 .collect::<Vec<_>>();
270
271 candidate_additions
272 .iter()
273 .filter_map(|addition_info| {
274 if let Some(existing_attrs) = existing_attrs
275 && existing_attrs
276 .iter()
277 .any(|existing_attr| (addition_info.equals)(existing_attr))
278 {
279 return None;
280 }
281
282 match addition_info.kind {
283 AttrAdditionKind::Default { factory } => Some(factory(span)),
284 AttrAdditionKind::Inherit { flag, factory } => {
285 for (def_id, orig_attrs) in &defs_orig_attrs {
286 let original_attr = match def_id.as_local() {
287 Some(local_id) => self
288 .get_attrs(local_id)
289 .flags
290 .contains(flag)
291 .then(|| {
292 orig_attrs
293 .as_ref()
294 .map(|attrs| {
295 attrs.iter().find(|base_attr| {
296 (addition_info.equals)(base_attr)
297 })
298 })
299 .flatten()
300 })
301 .flatten(),
302 None =>
303 {
304 #[allow(deprecated)]
305 self.tcx
306 .get_all_attrs(*def_id)
307 .iter()
308 .find(|base_attr| (addition_info.equals)(base_attr))
309 }
310 };
311
312 if let Some(original_attr) = original_attr {
313 return Some(factory(span, original_attr));
314 }
315 }
316
317 None
318 }
319 }
320 })
321 .collect::<Vec<_>>()
322 }
323
324 fn parse_local_original_attrs(&self, def_id: DefId) -> Option<Vec<hir::Attribute>> {
325 if let Some(local_id) = def_id.as_local() {
326 let attrs = &self.get_attrs(local_id).to_inherit;
327
328 if !attrs.is_empty() {
329 return Some(AttributeParser::parse_limited_all(
330 self.tcx.sess,
331 attrs,
332 None,
333 hir::Target::Fn,
334 DUMMY_SP,
335 DUMMY_NODE_ID,
336 Some(self.tcx.features()),
337 ShouldEmit::Nothing,
338 ));
339 }
340 }
341
342 None
343 }
344
345 fn get_attrs(&self, local_id: LocalDefId) -> &DelegationAttrs {
346 if let Some(fn_sig) = self.resolver.delegation_fn_sigs.get(&local_id) {
348 &fn_sig.attrs
349 } else {
350 &self.resolver.delegation_infos[&local_id].attrs
351 }
352 }
353
354 fn get_delegation_ids(
355 &self,
356 mut node_id: NodeId,
357 span: Span,
358 ) -> Result<DelegationIds, ErrorGuaranteed> {
359 let mut visited: FxHashSet<NodeId> = Default::default();
360 let mut path: DelegationIdsVec = Default::default();
361
362 loop {
363 visited.insert(node_id);
364
365 let Some(def_id) = self.get_resolution_id(node_id) else {
366 return Err(self.tcx.dcx().span_delayed_bug(
367 span,
368 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("LoweringContext: couldn\'t resolve node {0:?} in delegation item",
node_id))
})format!(
369 "LoweringContext: couldn't resolve node {:?} in delegation item",
370 node_id
371 ),
372 ));
373 };
374
375 path.push(def_id);
376
377 if let Some(local_id) = def_id.as_local()
381 && let Some(delegation_info) = self.resolver.delegation_infos.get(&local_id)
382 {
383 node_id = delegation_info.resolution_node;
384 if visited.contains(&node_id) {
385 return Err(match visited.len() {
388 1 => self.dcx().emit_err(UnresolvedDelegationCallee { span }),
389 _ => self.dcx().emit_err(CycleInDelegationSignatureResolution { span }),
390 });
391 }
392 } else {
393 return Ok(DelegationIds::new(path));
394 }
395 }
396 }
397
398 fn get_resolution_id(&self, node_id: NodeId) -> Option<DefId> {
399 self.resolver.get_partial_res(node_id).and_then(|r| r.expect_full_res().opt_def_id())
400 }
401
402 fn param_count(&self, def_id: DefId) -> (usize, bool ) {
404 if let Some(local_sig_id) = def_id.as_local() {
405 match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
406 Some(sig) => (sig.param_count, sig.c_variadic),
407 None => (0, false),
408 }
409 } else {
410 let sig = self.tcx.fn_sig(def_id).skip_binder().skip_binder();
411 (sig.inputs().len() + usize::from(sig.c_variadic), sig.c_variadic)
412 }
413 }
414
415 fn lower_delegation_decl(
416 &mut self,
417 sig_id: DefId,
418 param_count: usize,
419 c_variadic: bool,
420 span: Span,
421 generics: &GenericsGenerationResults<'hir>,
422 ) -> &'hir hir::FnDecl<'hir> {
423 let decl_param_count = param_count - c_variadic as usize;
426 let inputs = self.arena.alloc_from_iter((0..decl_param_count).map(|arg| hir::Ty {
427 hir_id: self.next_id(),
428 kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Input(arg)),
429 span,
430 }));
431
432 let output = self.arena.alloc(hir::Ty {
433 hir_id: self.next_id(),
434 kind: hir::TyKind::InferDelegation(
435 sig_id,
436 hir::InferDelegationKind::Output(self.arena.alloc(hir::DelegationGenerics {
437 child_args_segment_id: generics.child.args_segment_id,
438 parent_args_segment_id: generics.parent.args_segment_id,
439 })),
440 ),
441 span,
442 });
443
444 self.arena.alloc(hir::FnDecl {
445 inputs,
446 output: hir::FnRetTy::Return(output),
447 c_variadic,
448 lifetime_elision_allowed: true,
449 implicit_self: hir::ImplicitSelfKind::None,
450 })
451 }
452
453 fn lower_delegation_sig(
454 &mut self,
455 sig_id: DefId,
456 decl: &'hir hir::FnDecl<'hir>,
457 span: Span,
458 ) -> hir::FnSig<'hir> {
459 let header = if let Some(local_sig_id) = sig_id.as_local() {
460 match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
461 Some(sig) => {
462 let parent = self.tcx.parent(sig_id);
463 let default_safety =
467 if sig.attrs.flags.contains(DelegationFnSigAttrs::TARGET_FEATURE)
468 || self.tcx.def_kind(parent) == DefKind::ForeignMod
469 {
470 hir::Safety::Unsafe
471 } else {
472 hir::Safety::Safe
473 };
474 self.lower_fn_header(sig.header, default_safety, &[])
475 }
476 None => self.generate_header_error(),
477 }
478 } else {
479 let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
480 let asyncness = match self.tcx.asyncness(sig_id) {
481 Asyncness::Yes => hir::IsAsync::Async(span),
482 Asyncness::No => hir::IsAsync::NotAsync,
483 };
484 hir::FnHeader {
485 safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features {
486 hir::HeaderSafety::SafeTargetFeatures
487 } else {
488 hir::HeaderSafety::Normal(sig.safety)
489 },
490 constness: self.tcx.constness(sig_id),
491 asyncness,
492 abi: sig.abi,
493 }
494 };
495
496 hir::FnSig { decl, header, span }
497 }
498
499 fn generate_param(
500 &mut self,
501 is_method: bool,
502 idx: usize,
503 span: Span,
504 ) -> (hir::Param<'hir>, NodeId) {
505 let pat_node_id = self.next_node_id();
506 let pat_id = self.lower_node_id(pat_node_id);
507 let name = if is_method && idx == 0 {
509 kw::SelfLower
510 } else {
511 Symbol::intern(&::alloc::__export::must_use({
::alloc::fmt::format(format_args!("arg{0}", idx))
})format!("arg{idx}"))
512 };
513 let ident = Ident::with_dummy_span(name);
514 let pat = self.arena.alloc(hir::Pat {
515 hir_id: pat_id,
516 kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, ident, None),
517 span,
518 default_binding_modes: false,
519 });
520
521 (hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id)
522 }
523
524 fn generate_arg(
525 &mut self,
526 is_method: bool,
527 idx: usize,
528 param_id: HirId,
529 span: Span,
530 ) -> hir::Expr<'hir> {
531 let name = if is_method && idx == 0 {
533 kw::SelfLower
534 } else {
535 Symbol::intern(&::alloc::__export::must_use({
::alloc::fmt::format(format_args!("arg{0}", idx))
})format!("arg{idx}"))
536 };
537
538 let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment {
539 ident: Ident::with_dummy_span(name),
540 hir_id: self.next_id(),
541 res: Res::Local(param_id),
542 args: None,
543 infer_args: false,
544 }));
545
546 let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments });
547 self.mk_expr(hir::ExprKind::Path(hir::QPath::Resolved(None, path)), span)
548 }
549
550 fn lower_delegation_body(
551 &mut self,
552 delegation: &Delegation,
553 item_id: NodeId,
554 is_method: bool,
555 param_count: usize,
556 generics: &mut GenericsGenerationResults<'hir>,
557 span: Span,
558 ) -> BodyId {
559 let block = delegation.body.as_deref();
560
561 self.lower_body(|this| {
562 let mut parameters: Vec<hir::Param<'_>> = Vec::with_capacity(param_count);
563 let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
564
565 for idx in 0..param_count {
566 let (param, pat_node_id) = this.generate_param(is_method, idx, span);
567 parameters.push(param);
568
569 let arg = if let Some(block) = block
570 && idx == 0
571 {
572 let mut self_resolver = SelfResolver {
573 resolver: this.resolver,
574 path_id: delegation.id,
575 self_param_id: pat_node_id,
576 };
577 self_resolver.visit_block(block);
578 this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id);
580 this.lower_target_expr(&block)
581 } else {
582 this.generate_arg(is_method, idx, param.pat.hir_id, span)
583 };
584 args.push(arg);
585 }
586
587 let final_expr = this.finalize_body_lowering(delegation, item_id, args, generics, span);
588
589 (this.arena.alloc_from_iter(parameters), final_expr)
590 })
591 }
592
593 fn lower_target_expr(&mut self, block: &Block) -> hir::Expr<'hir> {
596 if let [stmt] = block.stmts.as_slice()
597 && let StmtKind::Expr(expr) = &stmt.kind
598 {
599 return self.lower_expr_mut(expr);
600 }
601
602 let block = self.lower_block(block, false);
603 self.mk_expr(hir::ExprKind::Block(block, None), block.span)
604 }
605
606 fn finalize_body_lowering(
622 &mut self,
623 delegation: &Delegation,
624 item_id: NodeId,
625 args: Vec<hir::Expr<'hir>>,
626 generics: &mut GenericsGenerationResults<'hir>,
627 span: Span,
628 ) -> hir::Expr<'hir> {
629 let args = self.arena.alloc_from_iter(args);
630
631 let has_generic_args =
632 delegation.path.segments.iter().rev().skip(1).any(|segment| segment.args.is_some());
633
634 let call = if self
635 .get_resolution_id(delegation.id)
636 .map(|def_id| self.is_method(def_id, span))
637 .unwrap_or_default()
638 && delegation.qself.is_none()
639 && !has_generic_args
640 && !args.is_empty()
641 {
642 let ast_segment = delegation.path.segments.last().unwrap();
643 let segment = self.lower_path_segment(
644 delegation.path.span,
645 ast_segment,
646 ParamMode::Optional,
647 GenericArgsMode::Err,
648 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
649 None,
650 );
651
652 let segment = self.process_segment(item_id, span, &segment, &mut generics.child, false);
655 let segment = self.arena.alloc(segment);
656
657 self.arena.alloc(hir::Expr {
658 hir_id: self.next_id(),
659 kind: hir::ExprKind::MethodCall(segment, &args[0], &args[1..], span),
660 span,
661 })
662 } else {
663 let path = self.lower_qpath(
664 delegation.id,
665 &delegation.qself,
666 &delegation.path,
667 ParamMode::Optional,
668 AllowReturnTypeNotation::No,
669 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
670 None,
671 );
672
673 let new_path = match path {
674 hir::QPath::Resolved(ty, path) => {
675 let mut new_path = path.clone();
676 let len = new_path.segments.len();
677
678 new_path.segments = self.arena.alloc_from_iter(
679 new_path.segments.iter().enumerate().map(|(idx, segment)| {
680 let mut process_segment = |result, add_lifetimes| {
681 self.process_segment(item_id, span, segment, result, add_lifetimes)
682 };
683
684 if idx + 2 == len {
685 process_segment(&mut generics.parent, true)
686 } else if idx + 1 == len {
687 process_segment(&mut generics.child, false)
688 } else {
689 segment.clone()
690 }
691 }),
692 );
693
694 hir::QPath::Resolved(ty, self.arena.alloc(new_path))
695 }
696 hir::QPath::TypeRelative(ty, segment) => {
697 let segment =
698 self.process_segment(item_id, span, segment, &mut generics.child, false);
699
700 hir::QPath::TypeRelative(ty, self.arena.alloc(segment))
701 }
702 };
703
704 let callee_path = self.arena.alloc(self.mk_expr(hir::ExprKind::Path(new_path), span));
705 self.arena.alloc(self.mk_expr(hir::ExprKind::Call(callee_path, args), span))
706 };
707
708 let block = self.arena.alloc(hir::Block {
709 stmts: &[],
710 expr: Some(call),
711 hir_id: self.next_id(),
712 rules: hir::BlockCheckMode::DefaultBlock,
713 span,
714 targeted_by_break: false,
715 });
716
717 self.mk_expr(hir::ExprKind::Block(block, None), span)
718 }
719
720 fn process_segment(
721 &mut self,
722 item_id: NodeId,
723 span: Span,
724 segment: &hir::PathSegment<'hir>,
725 result: &mut GenericsGenerationResult<'hir>,
726 add_lifetimes: bool,
727 ) -> hir::PathSegment<'hir> {
728 let details = result.generics.args_propagation_details();
729
730 let segment = if details.should_propagate
733 && let Some(args) = result
734 .generics
735 .into_hir_generics(self, item_id, span)
736 .into_generic_args(self, add_lifetimes, span)
737 {
738 hir::PathSegment { args: Some(args), ..segment.clone() }
739 } else {
740 segment.clone()
741 };
742
743 if details.use_args_in_sig_inheritance {
744 result.args_segment_id = Some(segment.hir_id);
745 }
746
747 segment
748 }
749
750 fn generate_delegation_error(
751 &mut self,
752 err: ErrorGuaranteed,
753 span: Span,
754 delegation: &Delegation,
755 ) -> DelegationResults<'hir> {
756 let decl = self.arena.alloc(hir::FnDecl {
757 inputs: &[],
758 output: hir::FnRetTy::DefaultReturn(span),
759 c_variadic: false,
760 lifetime_elision_allowed: true,
761 implicit_self: hir::ImplicitSelfKind::None,
762 });
763
764 let header = self.generate_header_error();
765 let sig = hir::FnSig { decl, header, span };
766
767 let ident = self.lower_ident(delegation.ident);
768
769 let body_id = self.lower_body(|this| {
770 let body_expr = match delegation.body.as_ref() {
771 Some(box block) => {
772 let stmts = this.arena.alloc_from_iter([hir::Stmt {
778 hir_id: this.next_id(),
779 kind: rustc_hir::StmtKind::Semi(
780 this.arena.alloc(this.lower_target_expr(block)),
781 ),
782 span,
783 }]);
784
785 let block = this.arena.alloc(hir::Block {
786 stmts,
787 expr: None,
788 hir_id: this.next_id(),
789 rules: hir::BlockCheckMode::DefaultBlock,
790 span,
791 targeted_by_break: false,
792 });
793
794 hir::ExprKind::Block(block, None)
795 }
796 None => hir::ExprKind::Err(err),
797 };
798
799 (&[], this.mk_expr(body_expr, span))
800 });
801
802 let generics = hir::Generics::empty();
803 DelegationResults { ident, generics, body_id, sig }
804 }
805
806 fn generate_header_error(&self) -> hir::FnHeader {
807 hir::FnHeader {
808 safety: hir::Safety::Safe.into(),
809 constness: hir::Constness::NotConst,
810 asyncness: hir::IsAsync::NotAsync,
811 abi: ExternAbi::Rust,
812 }
813 }
814
815 #[inline]
816 fn mk_expr(&mut self, kind: hir::ExprKind<'hir>, span: Span) -> hir::Expr<'hir> {
817 hir::Expr { hir_id: self.next_id(), kind, span }
818 }
819}
820
821struct SelfResolver<'a, 'tcx> {
822 resolver: &'a mut ResolverAstLowering<'tcx>,
823 path_id: NodeId,
824 self_param_id: NodeId,
825}
826
827impl SelfResolver<'_, '_> {
828 fn try_replace_id(&mut self, id: NodeId) {
829 if let Some(res) = self.resolver.partial_res_map.get(&id)
830 && let Some(Res::Local(sig_id)) = res.full_res()
831 && sig_id == self.path_id
832 {
833 let new_res = PartialRes::new(Res::Local(self.self_param_id));
834 self.resolver.partial_res_map.insert(id, new_res);
835 }
836 }
837}
838
839impl<'ast, 'a> Visitor<'ast> for SelfResolver<'a, '_> {
840 fn visit_id(&mut self, id: NodeId) {
841 self.try_replace_id(id);
842 }
843}