1use std::iter;
40
41use ast::visit::Visitor;
42use hir::def::{DefKind, PartialRes, Res};
43use hir::{BodyId, HirId};
44use rustc_abi::ExternAbi;
45use rustc_ast::*;
46use rustc_errors::ErrorGuaranteed;
47use rustc_hir::def_id::DefId;
48use rustc_middle::span_bug;
49use rustc_middle::ty::{Asyncness, ResolverAstLowering};
50use rustc_span::{Ident, Span};
51use {rustc_ast as ast, rustc_hir as hir};
52
53use super::{GenericArgsMode, ImplTraitContext, LoweringContext, ParamMode};
54use crate::{AllowReturnTypeNotation, ImplTraitPosition, ResolverAstLoweringExt};
55
56pub(crate) struct DelegationResults<'hir> {
57 pub body_id: hir::BodyId,
58 pub sig: hir::FnSig<'hir>,
59 pub generics: &'hir hir::Generics<'hir>,
60}
61
62impl<'hir> LoweringContext<'_, 'hir> {
63 pub(crate) fn delegation_has_self(&self, item_id: NodeId, path_id: NodeId, span: Span) -> bool {
64 let sig_id = self.get_delegation_sig_id(item_id, path_id, span);
65 let Ok(sig_id) = sig_id else {
66 return false;
67 };
68 self.has_self(sig_id, span)
69 }
70
71 fn has_self(&self, def_id: DefId, span: Span) -> bool {
72 if let Some(local_sig_id) = def_id.as_local() {
73 self.resolver.delegation_fn_sigs.get(&local_sig_id).is_some_and(|sig| sig.has_self)
76 } else {
77 match self.tcx.def_kind(def_id) {
78 DefKind::Fn => false,
79 DefKind::AssocFn => self.tcx.associated_item(def_id).fn_has_self_parameter,
80 _ => span_bug!(span, "unexpected DefKind for delegation item"),
81 }
82 }
83 }
84
85 pub(crate) fn lower_delegation(
86 &mut self,
87 delegation: &Delegation,
88 item_id: NodeId,
89 ) -> DelegationResults<'hir> {
90 let span = self.lower_span(delegation.path.segments.last().unwrap().ident.span);
91 let sig_id = self.get_delegation_sig_id(item_id, delegation.id, span);
92 match sig_id {
93 Ok(sig_id) => {
94 let (param_count, c_variadic) = self.param_count(sig_id);
95 let decl = self.lower_delegation_decl(sig_id, param_count, c_variadic, span);
96 let sig = self.lower_delegation_sig(sig_id, decl, span);
97 let body_id = self.lower_delegation_body(delegation, param_count, span);
98
99 let generics = self.lower_delegation_generics(span);
100 DelegationResults { body_id, sig, generics }
101 }
102 Err(err) => self.generate_delegation_error(err, span),
103 }
104 }
105
106 fn get_delegation_sig_id(
107 &self,
108 item_id: NodeId,
109 path_id: NodeId,
110 span: Span,
111 ) -> Result<DefId, ErrorGuaranteed> {
112 let sig_id = if self.is_in_trait_impl { item_id } else { path_id };
113 self.get_resolution_id(sig_id, span)
114 }
115
116 fn get_resolution_id(&self, node_id: NodeId, span: Span) -> Result<DefId, ErrorGuaranteed> {
117 let def_id =
118 self.resolver.get_partial_res(node_id).and_then(|r| r.expect_full_res().opt_def_id());
119 def_id.ok_or_else(|| {
120 self.tcx.dcx().span_delayed_bug(
121 span,
122 format!("LoweringContext: couldn't resolve node {:?} in delegation item", node_id),
123 )
124 })
125 }
126
127 fn lower_delegation_generics(&mut self, span: Span) -> &'hir hir::Generics<'hir> {
128 self.arena.alloc(hir::Generics {
129 params: &[],
130 predicates: &[],
131 has_where_clause_predicates: false,
132 where_clause_span: span,
133 span,
134 })
135 }
136
137 fn param_count(&self, sig_id: DefId) -> (usize, bool ) {
139 if let Some(local_sig_id) = sig_id.as_local() {
140 match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
143 Some(sig) => (sig.param_count, sig.c_variadic),
144 None => (0, false),
145 }
146 } else {
147 let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
148 (sig.inputs().len() + usize::from(sig.c_variadic), sig.c_variadic)
149 }
150 }
151
152 fn lower_delegation_decl(
153 &mut self,
154 sig_id: DefId,
155 param_count: usize,
156 c_variadic: bool,
157 span: Span,
158 ) -> &'hir hir::FnDecl<'hir> {
159 let decl_param_count = param_count - c_variadic as usize;
162 let inputs = self.arena.alloc_from_iter((0..decl_param_count).map(|arg| hir::Ty {
163 hir_id: self.next_id(),
164 kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Input(arg)),
165 span,
166 }));
167
168 let output = self.arena.alloc(hir::Ty {
169 hir_id: self.next_id(),
170 kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Output),
171 span,
172 });
173
174 self.arena.alloc(hir::FnDecl {
175 inputs,
176 output: hir::FnRetTy::Return(output),
177 c_variadic,
178 lifetime_elision_allowed: true,
179 implicit_self: hir::ImplicitSelfKind::None,
180 })
181 }
182
183 fn lower_delegation_sig(
184 &mut self,
185 sig_id: DefId,
186 decl: &'hir hir::FnDecl<'hir>,
187 span: Span,
188 ) -> hir::FnSig<'hir> {
189 let header = if let Some(local_sig_id) = sig_id.as_local() {
190 match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
191 Some(sig) => self.lower_fn_header(
192 sig.header,
193 if sig.target_feature { hir::Safety::Unsafe } else { hir::Safety::Safe },
197 &[],
198 ),
199 None => self.generate_header_error(),
200 }
201 } else {
202 let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
203 let asyncness = match self.tcx.asyncness(sig_id) {
204 Asyncness::Yes => hir::IsAsync::Async(span),
205 Asyncness::No => hir::IsAsync::NotAsync,
206 };
207 hir::FnHeader {
208 safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features {
209 hir::HeaderSafety::SafeTargetFeatures
210 } else {
211 hir::HeaderSafety::Normal(sig.safety)
212 },
213 constness: self.tcx.constness(sig_id),
214 asyncness,
215 abi: sig.abi,
216 }
217 };
218 hir::FnSig { decl, header, span }
219 }
220
221 fn generate_param(&mut self, span: Span) -> (hir::Param<'hir>, NodeId) {
222 let pat_node_id = self.next_node_id();
223 let pat_id = self.lower_node_id(pat_node_id);
224 let pat = self.arena.alloc(hir::Pat {
225 hir_id: pat_id,
226 kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, Ident::empty(), None),
227 span,
228 default_binding_modes: false,
229 });
230
231 (hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id)
232 }
233
234 fn generate_arg(&mut self, param_id: HirId, span: Span) -> hir::Expr<'hir> {
235 let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment {
236 ident: Ident::empty(),
237 hir_id: self.next_id(),
238 res: Res::Local(param_id),
239 args: None,
240 infer_args: false,
241 }));
242
243 let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments });
244 self.mk_expr(hir::ExprKind::Path(hir::QPath::Resolved(None, path)), span)
245 }
246
247 fn lower_delegation_body(
248 &mut self,
249 delegation: &Delegation,
250 param_count: usize,
251 span: Span,
252 ) -> BodyId {
253 let block = delegation.body.as_deref();
254
255 self.lower_body(|this| {
256 let mut parameters: Vec<hir::Param<'_>> = Vec::with_capacity(param_count);
257 let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
258
259 for idx in 0..param_count {
260 let (param, pat_node_id) = this.generate_param(span);
261 parameters.push(param);
262
263 let arg = if let Some(block) = block
264 && idx == 0
265 {
266 let mut self_resolver = SelfResolver {
267 resolver: this.resolver,
268 path_id: delegation.id,
269 self_param_id: pat_node_id,
270 };
271 self_resolver.visit_block(block);
272 this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id);
274 this.lower_target_expr(&block)
275 } else {
276 this.generate_arg(param.pat.hir_id, span)
277 };
278 args.push(arg);
279 }
280
281 let final_expr = this.finalize_body_lowering(delegation, args, span);
282 (this.arena.alloc_from_iter(parameters), final_expr)
283 })
284 }
285
286 fn lower_target_expr(&mut self, block: &Block) -> hir::Expr<'hir> {
289 if let [stmt] = block.stmts.as_slice()
290 && let StmtKind::Expr(expr) = &stmt.kind
291 {
292 return self.lower_expr_mut(expr);
293 }
294
295 let block = self.lower_block(block, false);
296 self.mk_expr(hir::ExprKind::Block(block, None), block.span)
297 }
298
299 fn finalize_body_lowering(
315 &mut self,
316 delegation: &Delegation,
317 args: Vec<hir::Expr<'hir>>,
318 span: Span,
319 ) -> hir::Expr<'hir> {
320 let args = self.arena.alloc_from_iter(args);
321
322 let has_generic_args =
323 delegation.path.segments.iter().rev().skip(1).any(|segment| segment.args.is_some());
324
325 let call = if self
326 .get_resolution_id(delegation.id, span)
327 .and_then(|def_id| Ok(self.has_self(def_id, span)))
328 .unwrap_or_default()
329 && delegation.qself.is_none()
330 && !has_generic_args
331 {
332 let ast_segment = delegation.path.segments.last().unwrap();
333 let segment = self.lower_path_segment(
334 delegation.path.span,
335 ast_segment,
336 ParamMode::Optional,
337 GenericArgsMode::Err,
338 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
339 None,
340 );
341 let segment = self.arena.alloc(segment);
342
343 self.arena.alloc(hir::Expr {
344 hir_id: self.next_id(),
345 kind: hir::ExprKind::MethodCall(segment, &args[0], &args[1..], span),
346 span,
347 })
348 } else {
349 let path = self.lower_qpath(
350 delegation.id,
351 &delegation.qself,
352 &delegation.path,
353 ParamMode::Optional,
354 AllowReturnTypeNotation::No,
355 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
356 None,
357 );
358
359 let callee_path = self.arena.alloc(self.mk_expr(hir::ExprKind::Path(path), span));
360 self.arena.alloc(self.mk_expr(hir::ExprKind::Call(callee_path, args), span))
361 };
362 let block = self.arena.alloc(hir::Block {
363 stmts: &[],
364 expr: Some(call),
365 hir_id: self.next_id(),
366 rules: hir::BlockCheckMode::DefaultBlock,
367 span,
368 targeted_by_break: false,
369 });
370
371 self.mk_expr(hir::ExprKind::Block(block, None), span)
372 }
373
374 fn generate_delegation_error(
375 &mut self,
376 err: ErrorGuaranteed,
377 span: Span,
378 ) -> DelegationResults<'hir> {
379 let generics = self.lower_delegation_generics(span);
380
381 let decl = self.arena.alloc(hir::FnDecl {
382 inputs: &[],
383 output: hir::FnRetTy::DefaultReturn(span),
384 c_variadic: false,
385 lifetime_elision_allowed: true,
386 implicit_self: hir::ImplicitSelfKind::None,
387 });
388
389 let header = self.generate_header_error();
390 let sig = hir::FnSig { decl, header, span };
391
392 let body_id = self.lower_body(|this| (&[], this.mk_expr(hir::ExprKind::Err(err), span)));
393 DelegationResults { generics, body_id, sig }
394 }
395
396 fn generate_header_error(&self) -> hir::FnHeader {
397 hir::FnHeader {
398 safety: hir::Safety::Safe.into(),
399 constness: hir::Constness::NotConst,
400 asyncness: hir::IsAsync::NotAsync,
401 abi: ExternAbi::Rust,
402 }
403 }
404
405 #[inline]
406 fn mk_expr(&mut self, kind: hir::ExprKind<'hir>, span: Span) -> hir::Expr<'hir> {
407 hir::Expr { hir_id: self.next_id(), kind, span }
408 }
409}
410
411struct SelfResolver<'a> {
412 resolver: &'a mut ResolverAstLowering,
413 path_id: NodeId,
414 self_param_id: NodeId,
415}
416
417impl<'a> SelfResolver<'a> {
418 fn try_replace_id(&mut self, id: NodeId) {
419 if let Some(res) = self.resolver.partial_res_map.get(&id)
420 && let Some(Res::Local(sig_id)) = res.full_res()
421 && sig_id == self.path_id
422 {
423 let new_res = PartialRes::new(Res::Local(self.self_param_id));
424 self.resolver.partial_res_map.insert(id, new_res);
425 }
426 }
427}
428
429impl<'ast, 'a> Visitor<'ast> for SelfResolver<'a> {
430 fn visit_path(&mut self, path: &'ast Path, id: NodeId) {
431 self.try_replace_id(id);
432 visit::walk_path(self, path);
433 }
434
435 fn visit_path_segment(&mut self, seg: &'ast PathSegment) {
436 self.try_replace_id(seg.id);
437 visit::walk_path_segment(self, seg);
438 }
439}