rustc_ast_lowering/
delegation.rs

1//! This module implements expansion of delegation items with early resolved paths.
2//! It includes a delegation to a free functions:
3//!
4//! ```ignore (illustrative)
5//! reuse module::name { target_expr_template }
6//! ```
7//!
8//! And delegation to a trait methods:
9//!
10//! ```ignore (illustrative)
11//! reuse <Type as Trait>::name { target_expr_template }
12//! ```
13//!
14//! After expansion for both cases we get:
15//!
16//! ```ignore (illustrative)
17//! fn name(
18//!     arg0: InferDelegation(sig_id, Input(0)),
19//!     arg1: InferDelegation(sig_id, Input(1)),
20//!     ...,
21//!     argN: InferDelegation(sig_id, Input(N)),
22//! ) -> InferDelegation(sig_id, Output) {
23//!     callee_path(target_expr_template(arg0), arg1, ..., argN)
24//! }
25//! ```
26//!
27//! Where `callee_path` is a path in delegation item e.g. `<Type as Trait>::name`.
28//! `sig_id` is a id of item from which the signature is inherited. It may be a delegation
29//! item id (`item_id`) in case of impl trait or path resolution id (`path_id`) otherwise.
30//!
31//! Since we do not have a proper way to obtain function type information by path resolution
32//! in AST, we mark each function parameter type as `InferDelegation` and inherit it during
33//! HIR ty lowering.
34//!
35//! Similarly generics, predicates and header are set to the "default" values.
36//! In case of discrepancy with callee function the `UnsupportedDelegation` error will
37//! also be emitted during HIR ty lowering.
38
39use std::iter;
40
41use ast::visit::Visitor;
42use hir::def::{DefKind, PartialRes, Res};
43use hir::{BodyId, HirId};
44use rustc_abi::ExternAbi;
45use rustc_ast::*;
46use rustc_attr_parsing::{AttributeParser, ShouldEmit};
47use rustc_errors::ErrorGuaranteed;
48use rustc_hir::Target;
49use rustc_hir::attrs::{AttributeKind, InlineAttr};
50use rustc_hir::def_id::DefId;
51use rustc_middle::span_bug;
52use rustc_middle::ty::{Asyncness, DelegationFnSigAttrs, ResolverAstLowering};
53use rustc_span::symbol::kw;
54use rustc_span::{DUMMY_SP, Ident, Span, Symbol};
55use {rustc_ast as ast, rustc_hir as hir};
56
57use super::{GenericArgsMode, ImplTraitContext, LoweringContext, ParamMode};
58use crate::{AllowReturnTypeNotation, ImplTraitPosition, ResolverAstLoweringExt};
59
60pub(crate) struct DelegationResults<'hir> {
61    pub body_id: hir::BodyId,
62    pub sig: hir::FnSig<'hir>,
63    pub ident: Ident,
64    pub generics: &'hir hir::Generics<'hir>,
65}
66
67struct AttributeAdditionInfo {
68    pub equals: fn(&hir::Attribute) -> bool,
69    pub kind: AttributeAdditionKind,
70}
71
72enum AttributeAdditionKind {
73    Default { factory: fn(Span) -> hir::Attribute },
74    Inherit { flag: DelegationFnSigAttrs, factory: fn(Span, &hir::Attribute) -> hir::Attribute },
75}
76
77const PARENT_ID: hir::ItemLocalId = hir::ItemLocalId::ZERO;
78
79static ATTRIBUTES_ADDITIONS: &[AttributeAdditionInfo] = &[
80    AttributeAdditionInfo {
81        equals: |a| matches!(a, hir::Attribute::Parsed(AttributeKind::MustUse { .. })),
82        kind: AttributeAdditionKind::Inherit {
83            factory: |span, original_attribute| {
84                let reason = match original_attribute {
85                    hir::Attribute::Parsed(AttributeKind::MustUse { reason, .. }) => *reason,
86                    _ => None,
87                };
88
89                hir::Attribute::Parsed(AttributeKind::MustUse { span, reason })
90            },
91            flag: DelegationFnSigAttrs::MUST_USE,
92        },
93    },
94    AttributeAdditionInfo {
95        equals: |a| matches!(a, hir::Attribute::Parsed(AttributeKind::Inline(..))),
96        kind: AttributeAdditionKind::Default {
97            factory: |span| hir::Attribute::Parsed(AttributeKind::Inline(InlineAttr::Hint, span)),
98        },
99    },
100];
101
102impl<'hir> LoweringContext<'_, 'hir> {
103    fn is_method(&self, def_id: DefId, span: Span) -> bool {
104        match self.tcx.def_kind(def_id) {
105            DefKind::Fn => false,
106            DefKind::AssocFn => match def_id.as_local() {
107                Some(local_def_id) => self
108                    .resolver
109                    .delegation_fn_sigs
110                    .get(&local_def_id)
111                    .is_some_and(|sig| sig.has_self),
112                None => self.tcx.associated_item(def_id).is_method(),
113            },
114            _ => span_bug!(span, "unexpected DefKind for delegation item"),
115        }
116    }
117
118    pub(crate) fn lower_delegation(
119        &mut self,
120        delegation: &Delegation,
121        item_id: NodeId,
122        is_in_trait_impl: bool,
123    ) -> DelegationResults<'hir> {
124        let span = self.lower_span(delegation.path.segments.last().unwrap().ident.span);
125        let sig_id = self.get_delegation_sig_id(item_id, delegation.id, span, is_in_trait_impl);
126        match sig_id {
127            Ok(sig_id) => {
128                self.add_attributes_if_needed(span, sig_id);
129
130                let is_method = self.is_method(sig_id, span);
131                let (param_count, c_variadic) = self.param_count(sig_id);
132                let decl = self.lower_delegation_decl(sig_id, param_count, c_variadic, span);
133                let sig = self.lower_delegation_sig(sig_id, decl, span);
134                let body_id = self.lower_delegation_body(delegation, is_method, param_count, span);
135                let ident = self.lower_ident(delegation.ident);
136                let generics = self.lower_delegation_generics(span);
137                DelegationResults { body_id, sig, ident, generics }
138            }
139            Err(err) => self.generate_delegation_error(err, span, delegation),
140        }
141    }
142
143    fn add_attributes_if_needed(&mut self, span: Span, sig_id: DefId) {
144        let new_attributes = self.create_new_attributes(
145            ATTRIBUTES_ADDITIONS,
146            span,
147            sig_id,
148            self.attrs.get(&PARENT_ID),
149        );
150
151        if new_attributes.is_empty() {
152            return;
153        }
154
155        let new_arena_allocated_attributes = match self.attrs.get(&PARENT_ID) {
156            Some(existing_attrs) => self.arena.alloc_from_iter(
157                existing_attrs.iter().map(|a| a.clone()).chain(new_attributes.into_iter()),
158            ),
159            None => self.arena.alloc_from_iter(new_attributes.into_iter()),
160        };
161
162        self.attrs.insert(PARENT_ID, new_arena_allocated_attributes);
163    }
164
165    fn create_new_attributes(
166        &self,
167        candidate_additions: &[AttributeAdditionInfo],
168        span: Span,
169        sig_id: DefId,
170        existing_attrs: Option<&&[hir::Attribute]>,
171    ) -> Vec<hir::Attribute> {
172        let local_original_attributes = self.parse_local_original_attributes(sig_id);
173
174        candidate_additions
175            .iter()
176            .filter_map(|addition_info| {
177                if let Some(existing_attrs) = existing_attrs
178                    && existing_attrs
179                        .iter()
180                        .any(|existing_attr| (addition_info.equals)(existing_attr))
181                {
182                    return None;
183                }
184
185                match addition_info.kind {
186                    AttributeAdditionKind::Default { factory } => Some(factory(span)),
187                    AttributeAdditionKind::Inherit { flag, factory } => {
188                        let original_attribute = match sig_id.as_local() {
189                            Some(local_id) => self
190                                .resolver
191                                .delegation_fn_sigs
192                                .get(&local_id)
193                                .is_some_and(|sig| sig.attrs_flags.contains(flag))
194                                .then(|| {
195                                    local_original_attributes
196                                        .as_ref()
197                                        .map(|attrs| {
198                                            attrs
199                                                .iter()
200                                                .find(|base_attr| (addition_info.equals)(base_attr))
201                                        })
202                                        .flatten()
203                                })
204                                .flatten(),
205                            None => self
206                                .tcx
207                                .get_all_attrs(sig_id)
208                                .iter()
209                                .find(|base_attr| (addition_info.equals)(base_attr)),
210                        };
211
212                        original_attribute.map(|a| factory(span, a))
213                    }
214                }
215            })
216            .collect::<Vec<_>>()
217    }
218
219    fn parse_local_original_attributes(&self, sig_id: DefId) -> Option<Vec<hir::Attribute>> {
220        if let Some(local_id) = sig_id.as_local()
221            && let Some(info) = self.resolver.delegation_fn_sigs.get(&local_id)
222            && !info.to_inherit_attrs.is_empty()
223        {
224            Some(AttributeParser::parse_limited_all(
225                self.tcx.sess,
226                info.to_inherit_attrs.as_slice(),
227                None,
228                Target::Fn,
229                DUMMY_SP,
230                DUMMY_NODE_ID,
231                Some(self.tcx.features()),
232                ShouldEmit::Nothing,
233            ))
234        } else {
235            None
236        }
237    }
238
239    fn get_delegation_sig_id(
240        &self,
241        item_id: NodeId,
242        path_id: NodeId,
243        span: Span,
244        is_in_trait_impl: bool,
245    ) -> Result<DefId, ErrorGuaranteed> {
246        let sig_id = if is_in_trait_impl { item_id } else { path_id };
247        self.get_resolution_id(sig_id, span)
248    }
249
250    fn get_resolution_id(&self, node_id: NodeId, span: Span) -> Result<DefId, ErrorGuaranteed> {
251        let def_id =
252            self.resolver.get_partial_res(node_id).and_then(|r| r.expect_full_res().opt_def_id());
253        def_id.ok_or_else(|| {
254            self.tcx.dcx().span_delayed_bug(
255                span,
256                format!("LoweringContext: couldn't resolve node {:?} in delegation item", node_id),
257            )
258        })
259    }
260
261    fn lower_delegation_generics(&mut self, span: Span) -> &'hir hir::Generics<'hir> {
262        self.arena.alloc(hir::Generics {
263            params: &[],
264            predicates: &[],
265            has_where_clause_predicates: false,
266            where_clause_span: span,
267            span,
268        })
269    }
270
271    // Function parameter count, including C variadic `...` if present.
272    fn param_count(&self, sig_id: DefId) -> (usize, bool /*c_variadic*/) {
273        if let Some(local_sig_id) = sig_id.as_local() {
274            // Map may be filled incorrectly due to recursive delegation.
275            // Error will be emitted later during HIR ty lowering.
276            match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
277                Some(sig) => (sig.param_count, sig.c_variadic),
278                None => (0, false),
279            }
280        } else {
281            let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
282            (sig.inputs().len() + usize::from(sig.c_variadic), sig.c_variadic)
283        }
284    }
285
286    fn lower_delegation_decl(
287        &mut self,
288        sig_id: DefId,
289        param_count: usize,
290        c_variadic: bool,
291        span: Span,
292    ) -> &'hir hir::FnDecl<'hir> {
293        // The last parameter in C variadic functions is skipped in the signature,
294        // like during regular lowering.
295        let decl_param_count = param_count - c_variadic as usize;
296        let inputs = self.arena.alloc_from_iter((0..decl_param_count).map(|arg| hir::Ty {
297            hir_id: self.next_id(),
298            kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Input(arg)),
299            span,
300        }));
301
302        let output = self.arena.alloc(hir::Ty {
303            hir_id: self.next_id(),
304            kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Output),
305            span,
306        });
307
308        self.arena.alloc(hir::FnDecl {
309            inputs,
310            output: hir::FnRetTy::Return(output),
311            c_variadic,
312            lifetime_elision_allowed: true,
313            implicit_self: hir::ImplicitSelfKind::None,
314        })
315    }
316
317    fn lower_delegation_sig(
318        &mut self,
319        sig_id: DefId,
320        decl: &'hir hir::FnDecl<'hir>,
321        span: Span,
322    ) -> hir::FnSig<'hir> {
323        let header = if let Some(local_sig_id) = sig_id.as_local() {
324            match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
325                Some(sig) => {
326                    let parent = self.tcx.parent(sig_id);
327                    // HACK: we override the default safety instead of generating attributes from the ether.
328                    // We are not forwarding the attributes, as the delegation fn sigs are collected on the ast,
329                    // and here we need the hir attributes.
330                    let default_safety =
331                        if sig.attrs_flags.contains(DelegationFnSigAttrs::TARGET_FEATURE)
332                            || self.tcx.def_kind(parent) == DefKind::ForeignMod
333                        {
334                            hir::Safety::Unsafe
335                        } else {
336                            hir::Safety::Safe
337                        };
338                    self.lower_fn_header(sig.header, default_safety, &[])
339                }
340                None => self.generate_header_error(),
341            }
342        } else {
343            let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
344            let asyncness = match self.tcx.asyncness(sig_id) {
345                Asyncness::Yes => hir::IsAsync::Async(span),
346                Asyncness::No => hir::IsAsync::NotAsync,
347            };
348            hir::FnHeader {
349                safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features {
350                    hir::HeaderSafety::SafeTargetFeatures
351                } else {
352                    hir::HeaderSafety::Normal(sig.safety)
353                },
354                constness: self.tcx.constness(sig_id),
355                asyncness,
356                abi: sig.abi,
357            }
358        };
359        hir::FnSig { decl, header, span }
360    }
361
362    fn generate_param(
363        &mut self,
364        is_method: bool,
365        idx: usize,
366        span: Span,
367    ) -> (hir::Param<'hir>, NodeId) {
368        let pat_node_id = self.next_node_id();
369        let pat_id = self.lower_node_id(pat_node_id);
370        // FIXME(cjgillot) AssocItem currently relies on self parameter being exactly named `self`.
371        let name = if is_method && idx == 0 {
372            kw::SelfLower
373        } else {
374            Symbol::intern(&format!("arg{idx}"))
375        };
376        let ident = Ident::with_dummy_span(name);
377        let pat = self.arena.alloc(hir::Pat {
378            hir_id: pat_id,
379            kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, ident, None),
380            span,
381            default_binding_modes: false,
382        });
383
384        (hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id)
385    }
386
387    fn generate_arg(
388        &mut self,
389        is_method: bool,
390        idx: usize,
391        param_id: HirId,
392        span: Span,
393    ) -> hir::Expr<'hir> {
394        // FIXME(cjgillot) AssocItem currently relies on self parameter being exactly named `self`.
395        let name = if is_method && idx == 0 {
396            kw::SelfLower
397        } else {
398            Symbol::intern(&format!("arg{idx}"))
399        };
400        let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment {
401            ident: Ident::with_dummy_span(name),
402            hir_id: self.next_id(),
403            res: Res::Local(param_id),
404            args: None,
405            infer_args: false,
406        }));
407
408        let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments });
409        self.mk_expr(hir::ExprKind::Path(hir::QPath::Resolved(None, path)), span)
410    }
411
412    fn lower_delegation_body(
413        &mut self,
414        delegation: &Delegation,
415        is_method: bool,
416        param_count: usize,
417        span: Span,
418    ) -> BodyId {
419        let block = delegation.body.as_deref();
420
421        self.lower_body(|this| {
422            let mut parameters: Vec<hir::Param<'_>> = Vec::with_capacity(param_count);
423            let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
424
425            for idx in 0..param_count {
426                let (param, pat_node_id) = this.generate_param(is_method, idx, span);
427                parameters.push(param);
428
429                let arg = if let Some(block) = block
430                    && idx == 0
431                {
432                    let mut self_resolver = SelfResolver {
433                        resolver: this.resolver,
434                        path_id: delegation.id,
435                        self_param_id: pat_node_id,
436                    };
437                    self_resolver.visit_block(block);
438                    // Target expr needs to lower `self` path.
439                    this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id);
440                    this.lower_target_expr(&block)
441                } else {
442                    this.generate_arg(is_method, idx, param.pat.hir_id, span)
443                };
444                args.push(arg);
445            }
446
447            let final_expr = this.finalize_body_lowering(delegation, args, span);
448            (this.arena.alloc_from_iter(parameters), final_expr)
449        })
450    }
451
452    // FIXME(fn_delegation): Alternatives for target expression lowering:
453    // https://github.com/rust-lang/rfcs/pull/3530#issuecomment-2197170600.
454    fn lower_target_expr(&mut self, block: &Block) -> hir::Expr<'hir> {
455        if let [stmt] = block.stmts.as_slice()
456            && let StmtKind::Expr(expr) = &stmt.kind
457        {
458            return self.lower_expr_mut(expr);
459        }
460
461        let block = self.lower_block(block, false);
462        self.mk_expr(hir::ExprKind::Block(block, None), block.span)
463    }
464
465    // Generates expression for the resulting body. If possible, `MethodCall` is used
466    // to allow autoref/autoderef for target expression. For example in:
467    //
468    // trait Trait : Sized {
469    //     fn by_value(self) -> i32 { 1 }
470    //     fn by_mut_ref(&mut self) -> i32 { 2 }
471    //     fn by_ref(&self) -> i32 { 3 }
472    // }
473    //
474    // struct NewType(SomeType);
475    // impl Trait for NewType {
476    //     reuse Trait::* { self.0 }
477    // }
478    //
479    // `self.0` will automatically coerce.
480    fn finalize_body_lowering(
481        &mut self,
482        delegation: &Delegation,
483        args: Vec<hir::Expr<'hir>>,
484        span: Span,
485    ) -> hir::Expr<'hir> {
486        let args = self.arena.alloc_from_iter(args);
487
488        let has_generic_args =
489            delegation.path.segments.iter().rev().skip(1).any(|segment| segment.args.is_some());
490
491        let call = if self
492            .get_resolution_id(delegation.id, span)
493            .and_then(|def_id| Ok(self.is_method(def_id, span)))
494            .unwrap_or_default()
495            && delegation.qself.is_none()
496            && !has_generic_args
497            && !args.is_empty()
498        {
499            let ast_segment = delegation.path.segments.last().unwrap();
500            let segment = self.lower_path_segment(
501                delegation.path.span,
502                ast_segment,
503                ParamMode::Optional,
504                GenericArgsMode::Err,
505                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
506                None,
507            );
508            let segment = self.arena.alloc(segment);
509
510            self.arena.alloc(hir::Expr {
511                hir_id: self.next_id(),
512                kind: hir::ExprKind::MethodCall(segment, &args[0], &args[1..], span),
513                span,
514            })
515        } else {
516            let path = self.lower_qpath(
517                delegation.id,
518                &delegation.qself,
519                &delegation.path,
520                ParamMode::Optional,
521                AllowReturnTypeNotation::No,
522                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
523                None,
524            );
525
526            let callee_path = self.arena.alloc(self.mk_expr(hir::ExprKind::Path(path), span));
527            self.arena.alloc(self.mk_expr(hir::ExprKind::Call(callee_path, args), span))
528        };
529        let block = self.arena.alloc(hir::Block {
530            stmts: &[],
531            expr: Some(call),
532            hir_id: self.next_id(),
533            rules: hir::BlockCheckMode::DefaultBlock,
534            span,
535            targeted_by_break: false,
536        });
537
538        self.mk_expr(hir::ExprKind::Block(block, None), span)
539    }
540
541    fn generate_delegation_error(
542        &mut self,
543        err: ErrorGuaranteed,
544        span: Span,
545        delegation: &Delegation,
546    ) -> DelegationResults<'hir> {
547        let generics = self.lower_delegation_generics(span);
548
549        let decl = self.arena.alloc(hir::FnDecl {
550            inputs: &[],
551            output: hir::FnRetTy::DefaultReturn(span),
552            c_variadic: false,
553            lifetime_elision_allowed: true,
554            implicit_self: hir::ImplicitSelfKind::None,
555        });
556
557        let header = self.generate_header_error();
558        let sig = hir::FnSig { decl, header, span };
559
560        let ident = self.lower_ident(delegation.ident);
561
562        let body_id = self.lower_body(|this| {
563            let body_expr = match delegation.body.as_ref() {
564                Some(box block) => {
565                    // Generates a block when we failed to resolve delegation, where a target expression is its only statement,
566                    // thus there will be no ICEs on further stages of analysis (see #144594)
567
568                    // As we generate a void function we want to convert target expression to statement to avoid additional
569                    // errors, such as mismatched return type
570                    let stmts = this.arena.alloc_from_iter([hir::Stmt {
571                        hir_id: this.next_id(),
572                        kind: rustc_hir::StmtKind::Semi(
573                            this.arena.alloc(this.lower_target_expr(block)),
574                        ),
575                        span,
576                    }]);
577
578                    let block = this.arena.alloc(hir::Block {
579                        stmts,
580                        expr: None,
581                        hir_id: this.next_id(),
582                        rules: hir::BlockCheckMode::DefaultBlock,
583                        span,
584                        targeted_by_break: false,
585                    });
586
587                    hir::ExprKind::Block(block, None)
588                }
589                None => hir::ExprKind::Err(err),
590            };
591
592            (&[], this.mk_expr(body_expr, span))
593        });
594
595        DelegationResults { ident, generics, body_id, sig }
596    }
597
598    fn generate_header_error(&self) -> hir::FnHeader {
599        hir::FnHeader {
600            safety: hir::Safety::Safe.into(),
601            constness: hir::Constness::NotConst,
602            asyncness: hir::IsAsync::NotAsync,
603            abi: ExternAbi::Rust,
604        }
605    }
606
607    #[inline]
608    fn mk_expr(&mut self, kind: hir::ExprKind<'hir>, span: Span) -> hir::Expr<'hir> {
609        hir::Expr { hir_id: self.next_id(), kind, span }
610    }
611}
612
613struct SelfResolver<'a> {
614    resolver: &'a mut ResolverAstLowering,
615    path_id: NodeId,
616    self_param_id: NodeId,
617}
618
619impl<'a> SelfResolver<'a> {
620    fn try_replace_id(&mut self, id: NodeId) {
621        if let Some(res) = self.resolver.partial_res_map.get(&id)
622            && let Some(Res::Local(sig_id)) = res.full_res()
623            && sig_id == self.path_id
624        {
625            let new_res = PartialRes::new(Res::Local(self.self_param_id));
626            self.resolver.partial_res_map.insert(id, new_res);
627        }
628    }
629}
630
631impl<'ast, 'a> Visitor<'ast> for SelfResolver<'a> {
632    fn visit_id(&mut self, id: NodeId) {
633        self.try_replace_id(id);
634    }
635}