Skip to main content

rustc_ast_lowering/
delegation.rs

1//! This module implements expansion of delegation items with early resolved paths.
2//! It includes a delegation to a free functions:
3//!
4//! ```ignore (illustrative)
5//! reuse module::name { target_expr_template }
6//! ```
7//!
8//! And delegation to a trait methods:
9//!
10//! ```ignore (illustrative)
11//! reuse <Type as Trait>::name { target_expr_template }
12//! ```
13//!
14//! After expansion for both cases we get:
15//!
16//! ```ignore (illustrative)
17//! fn name(
18//!     arg0: InferDelegation(sig_id, Input(0)),
19//!     arg1: InferDelegation(sig_id, Input(1)),
20//!     ...,
21//!     argN: InferDelegation(sig_id, Input(N)),
22//! ) -> InferDelegation(sig_id, Output) {
23//!     callee_path(target_expr_template(arg0), arg1, ..., argN)
24//! }
25//! ```
26//!
27//! Where `callee_path` is a path in delegation item e.g. `<Type as Trait>::name`.
28//! `sig_id` is a id of item from which the signature is inherited. It may be a delegation
29//! item id (`item_id`) in case of impl trait or path resolution id (`path_id`) otherwise.
30//!
31//! Since we do not have a proper way to obtain function type information by path resolution
32//! in AST, we mark each function parameter type as `InferDelegation` and inherit it during
33//! HIR ty lowering.
34//!
35//! Similarly generics, predicates and header are set to the "default" values.
36//! In case of discrepancy with callee function the `UnsupportedDelegation` error will
37//! also be emitted during HIR ty lowering.
38
39use std::iter;
40
41use ast::visit::Visitor;
42use hir::def::{DefKind, PartialRes, Res};
43use hir::{BodyId, HirId};
44use rustc_abi::ExternAbi;
45use rustc_ast as ast;
46use rustc_ast::*;
47use rustc_data_structures::fx::FxHashSet;
48use rustc_errors::ErrorGuaranteed;
49use rustc_hir as hir;
50use rustc_hir::attrs::{AttributeKind, InlineAttr};
51use rustc_hir::def_id::DefId;
52use rustc_middle::span_bug;
53use rustc_middle::ty::Asyncness;
54use rustc_span::symbol::kw;
55use rustc_span::{Ident, Span, Symbol};
56use smallvec::SmallVec;
57
58use crate::delegation::generics::{GenericsGenerationResult, GenericsGenerationResults};
59use crate::errors::{CycleInDelegationSignatureResolution, UnresolvedDelegationCallee};
60use crate::{
61    AllowReturnTypeNotation, GenericArgsMode, ImplTraitContext, ImplTraitPosition, LoweringContext,
62    ParamMode, ResolverAstLoweringExt,
63};
64
65mod generics;
66
67pub(crate) struct DelegationResults<'hir> {
68    pub body_id: hir::BodyId,
69    pub sig: hir::FnSig<'hir>,
70    pub ident: Ident,
71    pub generics: &'hir hir::Generics<'hir>,
72}
73
74struct AttrAdditionInfo {
75    pub equals: fn(&hir::Attribute) -> bool,
76    pub kind: AttrAdditionKind,
77}
78
79enum AttrAdditionKind {
80    Default { factory: fn(Span) -> hir::Attribute },
81    Inherit { factory: fn(Span, &hir::Attribute) -> hir::Attribute },
82}
83
84const PARENT_ID: hir::ItemLocalId = hir::ItemLocalId::ZERO;
85
86static ATTRS_ADDITIONS: &[AttrAdditionInfo] = &[
87    AttrAdditionInfo {
88        equals: |a| #[allow(non_exhaustive_omitted_patterns)] match a {
    hir::Attribute::Parsed(AttributeKind::MustUse { .. }) => true,
    _ => false,
}matches!(a, hir::Attribute::Parsed(AttributeKind::MustUse { .. })),
89        kind: AttrAdditionKind::Inherit {
90            factory: |span, original_attr| {
91                let reason = match original_attr {
92                    hir::Attribute::Parsed(AttributeKind::MustUse { reason, .. }) => *reason,
93                    _ => None,
94                };
95
96                hir::Attribute::Parsed(AttributeKind::MustUse { span, reason })
97            },
98        },
99    },
100    AttrAdditionInfo {
101        equals: |a| #[allow(non_exhaustive_omitted_patterns)] match a {
    hir::Attribute::Parsed(AttributeKind::Inline(..)) => true,
    _ => false,
}matches!(a, hir::Attribute::Parsed(AttributeKind::Inline(..))),
102        kind: AttrAdditionKind::Default {
103            factory: |span| hir::Attribute::Parsed(AttributeKind::Inline(InlineAttr::Hint, span)),
104        },
105    },
106];
107
108impl<'hir, R: ResolverAstLoweringExt<'hir>> LoweringContext<'_, 'hir, R> {
109    fn is_method(&self, def_id: DefId, span: Span) -> bool {
110        match self.tcx.def_kind(def_id) {
111            DefKind::Fn => false,
112            DefKind::AssocFn => self.tcx.associated_item(def_id).is_method(),
113            _ => ::rustc_middle::util::bug::span_bug_fmt(span,
    format_args!("unexpected DefKind for delegation item"))span_bug!(span, "unexpected DefKind for delegation item"),
114        }
115    }
116
117    pub(crate) fn lower_delegation(
118        &mut self,
119        delegation: &Delegation,
120        item_id: NodeId,
121    ) -> DelegationResults<'hir> {
122        let span = self.lower_span(delegation.path.segments.last().unwrap().ident.span);
123
124        // Delegation can be unresolved in illegal places such as function bodies in extern blocks (see #151356)
125        let sig_id = if let Some(delegation_info) =
126            self.resolver.delegation_info(self.local_def_id(item_id))
127        {
128            self.get_sig_id(delegation_info.resolution_node, span)
129        } else {
130            self.dcx().span_delayed_bug(
131                span,
132                ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("LoweringContext: the delegation {0:?} is unresolved",
                item_id))
    })format!("LoweringContext: the delegation {:?} is unresolved", item_id),
133            );
134
135            return self.generate_delegation_error(span, delegation);
136        };
137
138        match sig_id {
139            Ok(sig_id) => {
140                self.add_attrs_if_needed(span, sig_id);
141
142                let is_method = self.is_method(sig_id, span);
143
144                let (param_count, c_variadic) = self.param_count(sig_id);
145
146                let mut generics = self.uplift_delegation_generics(delegation, sig_id, item_id);
147
148                let body_id = self.lower_delegation_body(
149                    delegation,
150                    is_method,
151                    param_count,
152                    &mut generics,
153                    span,
154                );
155
156                let decl =
157                    self.lower_delegation_decl(sig_id, param_count, c_variadic, span, &generics);
158
159                let sig = self.lower_delegation_sig(sig_id, decl, span);
160                let ident = self.lower_ident(delegation.ident);
161
162                let generics = self.arena.alloc(hir::Generics {
163                    has_where_clause_predicates: false,
164                    params: self.arena.alloc_from_iter(generics.all_params(span, self)),
165                    predicates: self.arena.alloc_from_iter(generics.all_predicates(span, self)),
166                    span,
167                    where_clause_span: span,
168                });
169
170                DelegationResults { body_id, sig, ident, generics }
171            }
172            Err(_) => self.generate_delegation_error(span, delegation),
173        }
174    }
175
176    fn add_attrs_if_needed(&mut self, span: Span, sig_id: DefId) {
177        let new_attrs =
178            self.create_new_attrs(ATTRS_ADDITIONS, span, sig_id, self.attrs.get(&PARENT_ID));
179
180        if new_attrs.is_empty() {
181            return;
182        }
183
184        let new_arena_allocated_attrs = match self.attrs.get(&PARENT_ID) {
185            Some(existing_attrs) => self.arena.alloc_from_iter(
186                existing_attrs.iter().map(|a| a.clone()).chain(new_attrs.into_iter()),
187            ),
188            None => self.arena.alloc_from_iter(new_attrs.into_iter()),
189        };
190
191        self.attrs.insert(PARENT_ID, new_arena_allocated_attrs);
192    }
193
194    fn create_new_attrs(
195        &self,
196        candidate_additions: &[AttrAdditionInfo],
197        span: Span,
198        sig_id: DefId,
199        existing_attrs: Option<&&[hir::Attribute]>,
200    ) -> Vec<hir::Attribute> {
201        candidate_additions
202            .iter()
203            .filter_map(|addition_info| {
204                if let Some(existing_attrs) = existing_attrs
205                    && existing_attrs
206                        .iter()
207                        .any(|existing_attr| (addition_info.equals)(existing_attr))
208                {
209                    return None;
210                }
211
212                match addition_info.kind {
213                    AttrAdditionKind::Default { factory } => Some(factory(span)),
214                    AttrAdditionKind::Inherit { factory, .. } =>
215                    {
216                        #[allow(deprecated)]
217                        self.tcx
218                            .get_all_attrs(sig_id)
219                            .iter()
220                            .find_map(|a| (addition_info.equals)(a).then(|| factory(span, a)))
221                    }
222                }
223            })
224            .collect::<Vec<_>>()
225    }
226
227    fn get_sig_id(&self, mut node_id: NodeId, span: Span) -> Result<DefId, ErrorGuaranteed> {
228        let mut visited: FxHashSet<NodeId> = Default::default();
229        let mut path: SmallVec<[DefId; 1]> = Default::default();
230
231        loop {
232            visited.insert(node_id);
233
234            let Some(def_id) = self.get_resolution_id(node_id) else {
235                return Err(self.tcx.dcx().span_delayed_bug(
236                    span,
237                    ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("LoweringContext: couldn\'t resolve node {0:?} in delegation item",
                node_id))
    })format!(
238                        "LoweringContext: couldn't resolve node {:?} in delegation item",
239                        node_id
240                    ),
241                ));
242            };
243
244            path.push(def_id);
245
246            // If def_id is in local crate and it corresponds to another delegation
247            // it means that we refer to another delegation as a callee, so in order to obtain
248            // a signature DefId we obtain NodeId of the callee delegation and try to get signature from it.
249            if let Some(local_id) = def_id.as_local()
250                && let Some(delegation_info) = self.resolver.delegation_info(local_id)
251            {
252                node_id = delegation_info.resolution_node;
253                if visited.contains(&node_id) {
254                    // We encountered a cycle in the resolution, or delegation callee refers to non-existent
255                    // entity, in this case emit an error.
256                    return Err(match visited.len() {
257                        1 => self.dcx().emit_err(UnresolvedDelegationCallee { span }),
258                        _ => self.dcx().emit_err(CycleInDelegationSignatureResolution { span }),
259                    });
260                }
261            } else {
262                return Ok(path[0]);
263            }
264        }
265    }
266
267    fn get_resolution_id(&self, node_id: NodeId) -> Option<DefId> {
268        self.resolver.get_partial_res(node_id).and_then(|r| r.expect_full_res().opt_def_id())
269    }
270
271    // Function parameter count, including C variadic `...` if present.
272    fn param_count(&self, def_id: DefId) -> (usize, bool /*c_variadic*/) {
273        let sig = self.tcx.fn_sig(def_id).skip_binder().skip_binder();
274        (sig.inputs().len() + usize::from(sig.c_variadic), sig.c_variadic)
275    }
276
277    fn lower_delegation_decl(
278        &mut self,
279        sig_id: DefId,
280        param_count: usize,
281        c_variadic: bool,
282        span: Span,
283        generics: &GenericsGenerationResults<'hir>,
284    ) -> &'hir hir::FnDecl<'hir> {
285        // The last parameter in C variadic functions is skipped in the signature,
286        // like during regular lowering.
287        let decl_param_count = param_count - c_variadic as usize;
288        let inputs = self.arena.alloc_from_iter((0..decl_param_count).map(|arg| hir::Ty {
289            hir_id: self.next_id(),
290            kind: hir::TyKind::InferDelegation(hir::InferDelegation::Sig(
291                sig_id,
292                hir::InferDelegationSig::Input(arg),
293            )),
294            span,
295        }));
296
297        let output = self.arena.alloc(hir::Ty {
298            hir_id: self.next_id(),
299            kind: hir::TyKind::InferDelegation(hir::InferDelegation::Sig(
300                sig_id,
301                hir::InferDelegationSig::Output(self.arena.alloc(hir::DelegationGenerics {
302                    child_args_segment_id: generics.child.args_segment_id,
303                    parent_args_segment_id: generics.parent.args_segment_id,
304                })),
305            )),
306            span,
307        });
308
309        self.arena.alloc(hir::FnDecl {
310            inputs,
311            output: hir::FnRetTy::Return(output),
312            c_variadic,
313            lifetime_elision_allowed: true,
314            implicit_self: hir::ImplicitSelfKind::None,
315        })
316    }
317
318    fn lower_delegation_sig(
319        &mut self,
320        sig_id: DefId,
321        decl: &'hir hir::FnDecl<'hir>,
322        span: Span,
323    ) -> hir::FnSig<'hir> {
324        let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
325        let asyncness = match self.tcx.asyncness(sig_id) {
326            Asyncness::Yes => hir::IsAsync::Async(span),
327            Asyncness::No => hir::IsAsync::NotAsync,
328        };
329
330        let header = hir::FnHeader {
331            safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features {
332                hir::HeaderSafety::SafeTargetFeatures
333            } else {
334                hir::HeaderSafety::Normal(sig.safety)
335            },
336            constness: self.tcx.constness(sig_id),
337            asyncness,
338            abi: sig.abi,
339        };
340
341        hir::FnSig { decl, header, span }
342    }
343
344    fn generate_param(
345        &mut self,
346        is_method: bool,
347        idx: usize,
348        span: Span,
349    ) -> (hir::Param<'hir>, NodeId) {
350        let pat_node_id = self.next_node_id();
351        let pat_id = self.lower_node_id(pat_node_id);
352        // FIXME(cjgillot) AssocItem currently relies on self parameter being exactly named `self`.
353        let name = if is_method && idx == 0 {
354            kw::SelfLower
355        } else {
356            Symbol::intern(&::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("arg{0}", idx))
    })format!("arg{idx}"))
357        };
358        let ident = Ident::with_dummy_span(name);
359        let pat = self.arena.alloc(hir::Pat {
360            hir_id: pat_id,
361            kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, ident, None),
362            span,
363            default_binding_modes: false,
364        });
365
366        (hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id)
367    }
368
369    fn generate_arg(
370        &mut self,
371        is_method: bool,
372        idx: usize,
373        param_id: HirId,
374        span: Span,
375    ) -> hir::Expr<'hir> {
376        // FIXME(cjgillot) AssocItem currently relies on self parameter being exactly named `self`.
377        let name = if is_method && idx == 0 {
378            kw::SelfLower
379        } else {
380            Symbol::intern(&::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("arg{0}", idx))
    })format!("arg{idx}"))
381        };
382
383        let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment {
384            ident: Ident::with_dummy_span(name),
385            hir_id: self.next_id(),
386            res: Res::Local(param_id),
387            args: None,
388            infer_args: false,
389        }));
390
391        let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments });
392        self.mk_expr(hir::ExprKind::Path(hir::QPath::Resolved(None, path)), span)
393    }
394
395    fn lower_delegation_body(
396        &mut self,
397        delegation: &Delegation,
398        is_method: bool,
399        param_count: usize,
400        generics: &mut GenericsGenerationResults<'hir>,
401        span: Span,
402    ) -> BodyId {
403        let block = delegation.body.as_deref();
404
405        self.lower_body(|this| {
406            let mut parameters: Vec<hir::Param<'_>> = Vec::with_capacity(param_count);
407            let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
408
409            for idx in 0..param_count {
410                let (param, pat_node_id) = this.generate_param(is_method, idx, span);
411                parameters.push(param);
412
413                let arg = if let Some(block) = block
414                    && idx == 0
415                {
416                    let mut self_resolver = SelfResolver {
417                        resolver: this.resolver,
418                        path_id: delegation.id,
419                        self_param_id: pat_node_id,
420                    };
421                    self_resolver.visit_block(block);
422                    // Target expr needs to lower `self` path.
423                    this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id);
424                    this.lower_target_expr(&block)
425                } else {
426                    this.generate_arg(is_method, idx, param.pat.hir_id, span)
427                };
428                args.push(arg);
429            }
430
431            // If we have no params in signature function but user still wrote some code in
432            // delegation body, then add this code as first arg, eventually an error will be shown,
433            // also nested delegations may need to access information about this code (#154332),
434            // so it is better to leave this code as opposed to bodies of extern functions,
435            // which are completely erased from existence.
436            if param_count == 0
437                && let Some(block) = block
438            {
439                args.push(this.lower_target_expr(&block));
440            }
441
442            let final_expr = this.finalize_body_lowering(delegation, args, generics, span);
443
444            (this.arena.alloc_from_iter(parameters), final_expr)
445        })
446    }
447
448    // FIXME(fn_delegation): Alternatives for target expression lowering:
449    // https://github.com/rust-lang/rfcs/pull/3530#issuecomment-2197170600.
450    fn lower_target_expr(&mut self, block: &Block) -> hir::Expr<'hir> {
451        if let [stmt] = block.stmts.as_slice()
452            && let StmtKind::Expr(expr) = &stmt.kind
453        {
454            return self.lower_expr_mut(expr);
455        }
456
457        let block = self.lower_block(block, false);
458        self.mk_expr(hir::ExprKind::Block(block, None), block.span)
459    }
460
461    // Generates expression for the resulting body. If possible, `MethodCall` is used
462    // to allow autoref/autoderef for target expression. For example in:
463    //
464    // trait Trait : Sized {
465    //     fn by_value(self) -> i32 { 1 }
466    //     fn by_mut_ref(&mut self) -> i32 { 2 }
467    //     fn by_ref(&self) -> i32 { 3 }
468    // }
469    //
470    // struct NewType(SomeType);
471    // impl Trait for NewType {
472    //     reuse Trait::* { self.0 }
473    // }
474    //
475    // `self.0` will automatically coerce.
476    fn finalize_body_lowering(
477        &mut self,
478        delegation: &Delegation,
479        args: Vec<hir::Expr<'hir>>,
480        generics: &mut GenericsGenerationResults<'hir>,
481        span: Span,
482    ) -> hir::Expr<'hir> {
483        let args = self.arena.alloc_from_iter(args);
484
485        let has_generic_args =
486            delegation.path.segments.iter().rev().skip(1).any(|segment| segment.args.is_some());
487
488        let call = if self
489            .get_resolution_id(delegation.id)
490            .map(|def_id| self.is_method(def_id, span))
491            .unwrap_or_default()
492            && delegation.qself.is_none()
493            && !has_generic_args
494            && !args.is_empty()
495        {
496            let ast_segment = delegation.path.segments.last().unwrap();
497            let segment = self.lower_path_segment(
498                delegation.path.span,
499                ast_segment,
500                ParamMode::Optional,
501                GenericArgsMode::Err,
502                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
503                None,
504            );
505
506            // FIXME(fn_delegation): proper support for parent generics propagation
507            // in method call scenario.
508            let segment = self.process_segment(span, &segment, &mut generics.child, false);
509            let segment = self.arena.alloc(segment);
510
511            self.arena.alloc(hir::Expr {
512                hir_id: self.next_id(),
513                kind: hir::ExprKind::MethodCall(segment, &args[0], &args[1..], span),
514                span,
515            })
516        } else {
517            let path = self.lower_qpath(
518                delegation.id,
519                &delegation.qself,
520                &delegation.path,
521                ParamMode::Optional,
522                AllowReturnTypeNotation::No,
523                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
524                None,
525            );
526
527            let new_path = match path {
528                hir::QPath::Resolved(ty, path) => {
529                    let mut new_path = path.clone();
530                    let len = new_path.segments.len();
531
532                    new_path.segments = self.arena.alloc_from_iter(
533                        new_path.segments.iter().enumerate().map(|(idx, segment)| {
534                            let mut process_segment = |result, add_lifetimes| {
535                                self.process_segment(span, segment, result, add_lifetimes)
536                            };
537
538                            if idx + 2 == len {
539                                process_segment(&mut generics.parent, true)
540                            } else if idx + 1 == len {
541                                process_segment(&mut generics.child, false)
542                            } else {
543                                segment.clone()
544                            }
545                        }),
546                    );
547
548                    hir::QPath::Resolved(ty, self.arena.alloc(new_path))
549                }
550                hir::QPath::TypeRelative(ty, segment) => {
551                    let segment = self.process_segment(span, segment, &mut generics.child, false);
552
553                    hir::QPath::TypeRelative(ty, self.arena.alloc(segment))
554                }
555            };
556
557            let callee_path = self.arena.alloc(self.mk_expr(hir::ExprKind::Path(new_path), span));
558            self.arena.alloc(self.mk_expr(hir::ExprKind::Call(callee_path, args), span))
559        };
560
561        let block = self.arena.alloc(hir::Block {
562            stmts: &[],
563            expr: Some(call),
564            hir_id: self.next_id(),
565            rules: hir::BlockCheckMode::DefaultBlock,
566            span,
567            targeted_by_break: false,
568        });
569
570        self.mk_expr(hir::ExprKind::Block(block, None), span)
571    }
572
573    fn process_segment(
574        &mut self,
575        span: Span,
576        segment: &hir::PathSegment<'hir>,
577        result: &mut GenericsGenerationResult<'hir>,
578        add_lifetimes: bool,
579    ) -> hir::PathSegment<'hir> {
580        let details = result.generics.args_propagation_details();
581
582        let segment = if details.should_propagate {
583            let generics = result.generics.into_hir_generics(self, span);
584            let args = generics.into_generic_args(self, add_lifetimes, span);
585
586            // Needed for better error messages (`trait-impl-wrong-args-count.rs` test).
587            let args = if args.is_empty() { None } else { Some(args) };
588
589            hir::PathSegment { args, ..segment.clone() }
590        } else {
591            segment.clone()
592        };
593
594        if details.use_args_in_sig_inheritance {
595            result.args_segment_id = Some(segment.hir_id);
596        }
597
598        segment
599    }
600
601    fn generate_delegation_error(
602        &mut self,
603        span: Span,
604        delegation: &Delegation,
605    ) -> DelegationResults<'hir> {
606        let decl = self.arena.alloc(hir::FnDecl {
607            inputs: &[],
608            output: hir::FnRetTy::DefaultReturn(span),
609            c_variadic: false,
610            lifetime_elision_allowed: true,
611            implicit_self: hir::ImplicitSelfKind::None,
612        });
613
614        let header = self.generate_header_error();
615        let sig = hir::FnSig { decl, header, span };
616
617        let ident = self.lower_ident(delegation.ident);
618
619        let body_id = self.lower_body(|this| {
620            let path = this.lower_qpath(
621                delegation.id,
622                &delegation.qself,
623                &delegation.path,
624                ParamMode::Optional,
625                AllowReturnTypeNotation::No,
626                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
627                None,
628            );
629
630            let callee_path = this.arena.alloc(this.mk_expr(hir::ExprKind::Path(path), span));
631            let args = if let Some(box block) = delegation.body.as_ref() {
632                this.arena.alloc_slice(&[this.lower_target_expr(block)])
633            } else {
634                &mut []
635            };
636
637            let call = this.arena.alloc(this.mk_expr(hir::ExprKind::Call(callee_path, args), span));
638
639            let block = this.arena.alloc(hir::Block {
640                stmts: &[],
641                expr: Some(call),
642                hir_id: this.next_id(),
643                rules: hir::BlockCheckMode::DefaultBlock,
644                span,
645                targeted_by_break: false,
646            });
647
648            (&[], this.mk_expr(hir::ExprKind::Block(block, None), span))
649        });
650
651        let generics = hir::Generics::empty();
652        DelegationResults { ident, generics, body_id, sig }
653    }
654
655    fn generate_header_error(&self) -> hir::FnHeader {
656        hir::FnHeader {
657            safety: hir::Safety::Safe.into(),
658            constness: hir::Constness::NotConst,
659            asyncness: hir::IsAsync::NotAsync,
660            abi: ExternAbi::Rust,
661        }
662    }
663
664    #[inline]
665    fn mk_expr(&mut self, kind: hir::ExprKind<'hir>, span: Span) -> hir::Expr<'hir> {
666        hir::Expr { hir_id: self.next_id(), kind, span }
667    }
668}
669
670struct SelfResolver<'a, R> {
671    resolver: &'a mut R,
672    path_id: NodeId,
673    self_param_id: NodeId,
674}
675
676impl<'tcx, R: ResolverAstLoweringExt<'tcx>> SelfResolver<'_, R> {
677    fn try_replace_id(&mut self, id: NodeId) {
678        if let Some(res) = self.resolver.get_partial_res(id)
679            && let Some(Res::Local(sig_id)) = res.full_res()
680            && sig_id == self.path_id
681        {
682            let new_res = PartialRes::new(Res::Local(self.self_param_id));
683            self.resolver.insert_partial_res(id, new_res);
684        }
685    }
686}
687
688impl<'ast, 'tcx, R: ResolverAstLoweringExt<'tcx>> Visitor<'ast> for SelfResolver<'_, R> {
689    fn visit_id(&mut self, id: NodeId) {
690        self.try_replace_id(id);
691    }
692}