rustc_expand/mbe/transcribe.rs
1use std::mem;
2use std::sync::Arc;
3
4use rustc_ast::ExprKind;
5use rustc_ast::mut_visit::{self, MutVisitor};
6use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, LitKind, Nonterminal, Token, TokenKind};
7use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
8use rustc_data_structures::fx::FxHashMap;
9use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
10use rustc_parse::lexer::nfc_normalize;
11use rustc_parse::parser::ParseNtResult;
12use rustc_session::parse::{ParseSess, SymbolGallery};
13use rustc_span::hygiene::{LocalExpnId, Transparency};
14use rustc_span::{
15 Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans,
16};
17use smallvec::{SmallVec, smallvec};
18
19use crate::errors::{
20 CountRepetitionMisplaced, MetaVarExprUnrecognizedVar, MetaVarsDifSeqMatchers, MustRepeatOnce,
21 NoSyntaxVarsExprRepeat, VarStillRepeating,
22};
23use crate::mbe::macro_parser::NamedMatch;
24use crate::mbe::macro_parser::NamedMatch::*;
25use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
26use crate::mbe::{self, KleeneOp, MetaVarExpr};
27
28// A Marker adds the given mark to the syntax context.
29struct Marker(LocalExpnId, Transparency, FxHashMap<SyntaxContext, SyntaxContext>);
30
31impl MutVisitor for Marker {
32 const VISIT_TOKENS: bool = true;
33
34 fn visit_span(&mut self, span: &mut Span) {
35 // `apply_mark` is a relatively expensive operation, both due to taking hygiene lock, and
36 // by itself. All tokens in a macro body typically have the same syntactic context, unless
37 // it's some advanced case with macro-generated macros. So if we cache the marked version
38 // of that context once, we'll typically have a 100% cache hit rate after that.
39 let Marker(expn_id, transparency, ref mut cache) = *self;
40 *span = span.map_ctxt(|ctxt| {
41 *cache
42 .entry(ctxt)
43 .or_insert_with(|| ctxt.apply_mark(expn_id.to_expn_id(), transparency))
44 });
45 }
46}
47
48/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
49struct Frame<'a> {
50 tts: &'a [mbe::TokenTree],
51 idx: usize,
52 kind: FrameKind,
53}
54
55enum FrameKind {
56 Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
57 Sequence { sep: Option<Token>, kleene_op: KleeneOp },
58}
59
60impl<'a> Frame<'a> {
61 fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
62 Frame {
63 tts: &src.tts,
64 idx: 0,
65 kind: FrameKind::Delimited { delim: src.delim, span, spacing },
66 }
67 }
68
69 fn new_sequence(
70 src: &'a mbe::SequenceRepetition,
71 sep: Option<Token>,
72 kleene_op: KleeneOp,
73 ) -> Frame<'a> {
74 Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
75 }
76}
77
78impl<'a> Iterator for Frame<'a> {
79 type Item = &'a mbe::TokenTree;
80
81 fn next(&mut self) -> Option<&'a mbe::TokenTree> {
82 let res = self.tts.get(self.idx);
83 self.idx += 1;
84 res
85 }
86}
87
88/// This can do Macro-By-Example transcription.
89/// - `interp` is a map of meta-variables to the tokens (non-terminals) they matched in the
90/// invocation. We are assuming we already know there is a match.
91/// - `src` is the RHS of the MBE, that is, the "example" we are filling in.
92///
93/// For example,
94///
95/// ```rust
96/// macro_rules! foo {
97/// ($id:ident) => { println!("{}", stringify!($id)); }
98/// }
99///
100/// foo!(bar);
101/// ```
102///
103/// `interp` would contain `$id => bar` and `src` would contain `println!("{}", stringify!($id));`.
104///
105/// `transcribe` would return a `TokenStream` containing `println!("{}", stringify!(bar));`.
106///
107/// Along the way, we do some additional error checking.
108pub(super) fn transcribe<'a>(
109 psess: &'a ParseSess,
110 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
111 src: &mbe::Delimited,
112 src_span: DelimSpan,
113 transparency: Transparency,
114 expand_id: LocalExpnId,
115) -> PResult<'a, TokenStream> {
116 // Nothing for us to transcribe...
117 if src.tts.is_empty() {
118 return Ok(TokenStream::default());
119 }
120
121 // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
122 // we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
123 // choice of spacing values doesn't matter.
124 let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new_delimited(
125 src,
126 src_span,
127 DelimSpacing::new(Spacing::Alone, Spacing::Alone)
128 )];
129
130 // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
131 // `repeats` keeps track of where we are in matching at each level, with the last element being
132 // the most deeply nested sequence. This is used as a stack.
133 let mut repeats: Vec<(usize, usize)> = Vec::new();
134
135 // `result` contains resulting token stream from the TokenTree we just finished processing. At
136 // the end, this will contain the full result of transcription, but at arbitrary points during
137 // `transcribe`, `result` will contain subsets of the final result.
138 //
139 // Specifically, as we descend into each TokenTree, we will push the existing results onto the
140 // `result_stack` and clear `results`. We will then produce the results of transcribing the
141 // TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
142 // `result_stack` and append `results` too it to produce the new `results` up to that point.
143 //
144 // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
145 // again, and we are done transcribing.
146 let mut result: Vec<TokenTree> = Vec::new();
147 let mut result_stack = Vec::new();
148 let mut marker = Marker(expand_id, transparency, Default::default());
149
150 let dcx = psess.dcx();
151 loop {
152 // Look at the last frame on the stack.
153 // If it still has a TokenTree we have not looked at yet, use that tree.
154 let Some(tree) = stack.last_mut().unwrap().next() else {
155 // This else-case never produces a value for `tree` (it `continue`s or `return`s).
156
157 // Otherwise, if we have just reached the end of a sequence and we can keep repeating,
158 // go back to the beginning of the sequence.
159 let frame = stack.last_mut().unwrap();
160 if let FrameKind::Sequence { sep, .. } = &frame.kind {
161 let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
162 *repeat_idx += 1;
163 if repeat_idx < repeat_len {
164 frame.idx = 0;
165 if let Some(sep) = sep {
166 result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
167 }
168 continue;
169 }
170 }
171
172 // We are done with the top of the stack. Pop it. Depending on what it was, we do
173 // different things. Note that the outermost item must be the delimited, wrapped RHS
174 // that was passed in originally to `transcribe`.
175 match stack.pop().unwrap().kind {
176 // Done with a sequence. Pop from repeats.
177 FrameKind::Sequence { .. } => {
178 repeats.pop();
179 }
180
181 // We are done processing a Delimited. If this is the top-level delimited, we are
182 // done. Otherwise, we unwind the result_stack to append what we have produced to
183 // any previous results.
184 FrameKind::Delimited { delim, span, mut spacing, .. } => {
185 // Hack to force-insert a space after `]` in certain case.
186 // See discussion of the `hex-literal` crate in #114571.
187 if delim == Delimiter::Bracket {
188 spacing.close = Spacing::Alone;
189 }
190 if result_stack.is_empty() {
191 // No results left to compute! We are back at the top-level.
192 return Ok(TokenStream::new(result));
193 }
194
195 // Step back into the parent Delimited.
196 let tree = TokenTree::Delimited(span, spacing, delim, TokenStream::new(result));
197 result = result_stack.pop().unwrap();
198 result.push(tree);
199 }
200 }
201 continue;
202 };
203
204 // At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
205 // `tree` contains the next `TokenTree` to be processed.
206 match tree {
207 // We are descending into a sequence. We first make sure that the matchers in the RHS
208 // and the matches in `interp` have the same shape. Otherwise, either the caller or the
209 // macro writer has made a mistake.
210 seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
211 match lockstep_iter_size(seq, interp, &repeats) {
212 LockstepIterSize::Unconstrained => {
213 return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
214 }
215
216 LockstepIterSize::Contradiction(msg) => {
217 // FIXME: this really ought to be caught at macro definition time... It
218 // happens when two meta-variables are used in the same repetition in a
219 // sequence, but they come from different sequence matchers and repeat
220 // different amounts.
221 return Err(
222 dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg })
223 );
224 }
225
226 LockstepIterSize::Constraint(len, _) => {
227 // We do this to avoid an extra clone above. We know that this is a
228 // sequence already.
229 let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
230
231 // Is the repetition empty?
232 if len == 0 {
233 if seq.kleene.op == KleeneOp::OneOrMore {
234 // FIXME: this really ought to be caught at macro definition
235 // time... It happens when the Kleene operator in the matcher and
236 // the body for the same meta-variable do not match.
237 return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
238 }
239 } else {
240 // 0 is the initial counter (we have done 0 repetitions so far). `len`
241 // is the total number of repetitions we should generate.
242 repeats.push((0, len));
243
244 // The first time we encounter the sequence we push it to the stack. It
245 // then gets reused (see the beginning of the loop) until we are done
246 // repeating.
247 stack.push(Frame::new_sequence(
248 seq_rep,
249 seq.separator.clone(),
250 seq.kleene.op,
251 ));
252 }
253 }
254 }
255 }
256
257 // Replace the meta-var with the matched token tree from the invocation.
258 mbe::TokenTree::MetaVar(mut sp, mut original_ident) => {
259 // Find the matched nonterminal from the macro invocation, and use it to replace
260 // the meta-var.
261 //
262 // We use `Spacing::Alone` everywhere here, because that's the conservative choice
263 // and spacing of declarative macros is tricky. E.g. in this macro:
264 // ```
265 // macro_rules! idents {
266 // ($($a:ident,)*) => { stringify!($($a)*) }
267 // }
268 // ```
269 // `$a` has no whitespace after it and will be marked `JointHidden`. If you then
270 // call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
271 // if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
272 // producing "xyz", which is bad because it effectively merges tokens.
273 // `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
274 // some of the unnecessary whitespace.
275 let ident = MacroRulesNormalizedIdent::new(original_ident);
276 if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
277 let tt = match cur_matched {
278 MatchedSingle(ParseNtResult::Tt(tt)) => {
279 // `tt`s are emitted into the output stream directly as "raw tokens",
280 // without wrapping them into groups.
281 maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker)
282 }
283 MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
284 marker.visit_span(&mut sp);
285 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
286 let kind = token::NtIdent(*ident, *is_raw);
287 TokenTree::token_alone(kind, sp)
288 }
289 MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
290 marker.visit_span(&mut sp);
291 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
292 let kind = token::NtLifetime(*ident, *is_raw);
293 TokenTree::token_alone(kind, sp)
294 }
295 MatchedSingle(ParseNtResult::Nt(nt)) => {
296 // Other variables are emitted into the output stream as groups with
297 // `Delimiter::Invisible` to maintain parsing priorities.
298 // `Interpolated` is currently used for such groups in rustc parser.
299 marker.visit_span(&mut sp);
300 let use_span = nt.use_span();
301 with_metavar_spans(|mspans| mspans.insert(use_span, sp));
302 TokenTree::token_alone(token::Interpolated(Arc::clone(nt)), sp)
303 }
304 MatchedSeq(..) => {
305 // We were unable to descend far enough. This is an error.
306 return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
307 }
308 };
309 result.push(tt)
310 } else {
311 // If we aren't able to match the meta-var, we push it back into the result but
312 // with modified syntax context. (I believe this supports nested macros).
313 marker.visit_span(&mut sp);
314 marker.visit_ident(&mut original_ident);
315 result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
316 result.push(TokenTree::Token(
317 Token::from_ast_ident(original_ident),
318 Spacing::Alone,
319 ));
320 }
321 }
322
323 // Replace meta-variable expressions with the result of their expansion.
324 mbe::TokenTree::MetaVarExpr(sp, expr) => {
325 transcribe_metavar_expr(
326 dcx,
327 expr,
328 interp,
329 &mut marker,
330 &repeats,
331 &mut result,
332 sp,
333 &psess.symbol_gallery,
334 )?;
335 }
336
337 // If we are entering a new delimiter, we push its contents to the `stack` to be
338 // processed, and we push all of the currently produced results to the `result_stack`.
339 // We will produce all of the results of the inside of the `Delimited` and then we will
340 // jump back out of the Delimited, pop the result_stack and add the new results back to
341 // the previous results (from outside the Delimited).
342 mbe::TokenTree::Delimited(mut span, spacing, delimited) => {
343 mut_visit::visit_delim_span(&mut marker, &mut span);
344 stack.push(Frame::new_delimited(delimited, span, *spacing));
345 result_stack.push(mem::take(&mut result));
346 }
347
348 // Nothing much to do here. Just push the token to the result, being careful to
349 // preserve syntax context.
350 mbe::TokenTree::Token(token) => {
351 let mut token = token.clone();
352 mut_visit::visit_token(&mut marker, &mut token);
353 let tt = TokenTree::Token(token, Spacing::Alone);
354 result.push(tt);
355 }
356
357 // There should be no meta-var declarations in the invocation of a macro.
358 mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl`"),
359 }
360 }
361}
362
363/// Store the metavariable span for this original span into a side table.
364/// FIXME: Try to put the metavariable span into `SpanData` instead of a side table (#118517).
365/// An optimal encoding for inlined spans will need to be selected to minimize regressions.
366/// The side table approach is relatively good, but not perfect due to collisions.
367/// In particular, collisions happen when token is passed as an argument through several macro
368/// calls, like in recursive macros.
369/// The old heuristic below is used to improve spans in case of collisions, but diagnostics are
370/// still degraded sometimes in those cases.
371///
372/// The old heuristic:
373///
374/// Usually metavariables `$var` produce interpolated tokens, which have an additional place for
375/// keeping both the original span and the metavariable span. For `tt` metavariables that's not the
376/// case however, and there's no place for keeping a second span. So we try to give the single
377/// produced span a location that would be most useful in practice (the hygiene part of the span
378/// must not be changed).
379///
380/// Different locations are useful for different purposes:
381/// - The original location is useful when we need to report a diagnostic for the original token in
382/// isolation, without combining it with any surrounding tokens. This case occurs, but it is not
383/// very common in practice.
384/// - The metavariable location is useful when we need to somehow combine the token span with spans
385/// of its surrounding tokens. This is the most common way to use token spans.
386///
387/// So this function replaces the original location with the metavariable location in all cases
388/// except these two:
389/// - The metavariable is an element of undelimited sequence `$($tt)*`.
390/// These are typically used for passing larger amounts of code, and tokens in that code usually
391/// combine with each other and not with tokens outside of the sequence.
392/// - The metavariable span comes from a different crate, then we prefer the more local span.
393fn maybe_use_metavar_location(
394 psess: &ParseSess,
395 stack: &[Frame<'_>],
396 mut metavar_span: Span,
397 orig_tt: &TokenTree,
398 marker: &mut Marker,
399) -> TokenTree {
400 let undelimited_seq = matches!(
401 stack.last(),
402 Some(Frame {
403 tts: [_],
404 kind: FrameKind::Sequence {
405 sep: None,
406 kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
407 ..
408 },
409 ..
410 })
411 );
412 if undelimited_seq {
413 // Do not record metavar spans for tokens from undelimited sequences, for perf reasons.
414 return orig_tt.clone();
415 }
416
417 marker.visit_span(&mut metavar_span);
418 let no_collision = match orig_tt {
419 TokenTree::Token(token, ..) => {
420 with_metavar_spans(|mspans| mspans.insert(token.span, metavar_span))
421 }
422 TokenTree::Delimited(dspan, ..) => with_metavar_spans(|mspans| {
423 mspans.insert(dspan.open, metavar_span)
424 && mspans.insert(dspan.close, metavar_span)
425 && mspans.insert(dspan.entire(), metavar_span)
426 }),
427 };
428 if no_collision || psess.source_map().is_imported(metavar_span) {
429 return orig_tt.clone();
430 }
431
432 // Setting metavar spans for the heuristic spans gives better opportunities for combining them
433 // with neighboring spans even despite their different syntactic contexts.
434 match orig_tt {
435 TokenTree::Token(Token { kind, span }, spacing) => {
436 let span = metavar_span.with_ctxt(span.ctxt());
437 with_metavar_spans(|mspans| mspans.insert(span, metavar_span));
438 TokenTree::Token(Token { kind: kind.clone(), span }, *spacing)
439 }
440 TokenTree::Delimited(dspan, dspacing, delimiter, tts) => {
441 let open = metavar_span.with_ctxt(dspan.open.ctxt());
442 let close = metavar_span.with_ctxt(dspan.close.ctxt());
443 with_metavar_spans(|mspans| {
444 mspans.insert(open, metavar_span) && mspans.insert(close, metavar_span)
445 });
446 let dspan = DelimSpan::from_pair(open, close);
447 TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone())
448 }
449 }
450}
451
452/// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
453/// the set of matches `interpolations`.
454///
455/// See the definition of `repeats` in the `transcribe` function. `repeats` is used to descend
456/// into the right place in nested matchers. If we attempt to descend too far, the macro writer has
457/// made a mistake, and we return `None`.
458fn lookup_cur_matched<'a>(
459 ident: MacroRulesNormalizedIdent,
460 interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
461 repeats: &[(usize, usize)],
462) -> Option<&'a NamedMatch> {
463 interpolations.get(&ident).map(|mut matched| {
464 for &(idx, _) in repeats {
465 match matched {
466 MatchedSingle(_) => break,
467 MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
468 }
469 }
470
471 matched
472 })
473}
474
475/// An accumulator over a TokenTree to be used with `fold`. During transcription, we need to make
476/// sure that the size of each sequence and all of its nested sequences are the same as the sizes
477/// of all the matched (nested) sequences in the macro invocation. If they don't match, somebody
478/// has made a mistake (either the macro writer or caller).
479#[derive(Clone)]
480enum LockstepIterSize {
481 /// No constraints on length of matcher. This is true for any TokenTree variants except a
482 /// `MetaVar` with an actual `MatchedSeq` (as opposed to a `MatchedNonterminal`).
483 Unconstrained,
484
485 /// A `MetaVar` with an actual `MatchedSeq`. The length of the match and the name of the
486 /// meta-var are returned.
487 Constraint(usize, MacroRulesNormalizedIdent),
488
489 /// Two `Constraint`s on the same sequence had different lengths. This is an error.
490 Contradiction(String),
491}
492
493impl LockstepIterSize {
494 /// Find incompatibilities in matcher/invocation sizes.
495 /// - `Unconstrained` is compatible with everything.
496 /// - `Contradiction` is incompatible with everything.
497 /// - `Constraint(len)` is only compatible with other constraints of the same length.
498 fn with(self, other: LockstepIterSize) -> LockstepIterSize {
499 match self {
500 LockstepIterSize::Unconstrained => other,
501 LockstepIterSize::Contradiction(_) => self,
502 LockstepIterSize::Constraint(l_len, l_id) => match other {
503 LockstepIterSize::Unconstrained => self,
504 LockstepIterSize::Contradiction(_) => other,
505 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
506 LockstepIterSize::Constraint(r_len, r_id) => {
507 let msg = format!(
508 "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
509 l_id,
510 l_len,
511 pluralize!(l_len),
512 r_id,
513 r_len,
514 pluralize!(r_len),
515 );
516 LockstepIterSize::Contradiction(msg)
517 }
518 },
519 }
520 }
521}
522
523/// Given a `tree`, make sure that all sequences have the same length as the matches for the
524/// appropriate meta-vars in `interpolations`.
525///
526/// Note that if `repeats` does not match the exact correct depth of a meta-var,
527/// `lookup_cur_matched` will return `None`, which is why this still works even in the presence of
528/// multiple nested matcher sequences.
529///
530/// Example: `$($($x $y)+*);+` -- we need to make sure that `x` and `y` repeat the same amount as
531/// each other at the given depth when the macro was invoked. If they don't it might mean they were
532/// declared at depths which weren't equal or there was a compiler bug. For example, if we have 3 repetitions of
533/// the outer sequence and 4 repetitions of the inner sequence for `x`, we should have the same for
534/// `y`; otherwise, we can't transcribe them both at the given depth.
535fn lockstep_iter_size(
536 tree: &mbe::TokenTree,
537 interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
538 repeats: &[(usize, usize)],
539) -> LockstepIterSize {
540 use mbe::TokenTree;
541 match tree {
542 TokenTree::Delimited(.., delimited) => {
543 delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
544 size.with(lockstep_iter_size(tt, interpolations, repeats))
545 })
546 }
547 TokenTree::Sequence(_, seq) => {
548 seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
549 size.with(lockstep_iter_size(tt, interpolations, repeats))
550 })
551 }
552 TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
553 let name = MacroRulesNormalizedIdent::new(*name);
554 match lookup_cur_matched(name, interpolations, repeats) {
555 Some(matched) => match matched {
556 MatchedSingle(_) => LockstepIterSize::Unconstrained,
557 MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
558 },
559 _ => LockstepIterSize::Unconstrained,
560 }
561 }
562 TokenTree::MetaVarExpr(_, expr) => {
563 expr.for_each_metavar(LockstepIterSize::Unconstrained, |lis, ident| {
564 lis.with(lockstep_iter_size(
565 &TokenTree::MetaVar(ident.span, *ident),
566 interpolations,
567 repeats,
568 ))
569 })
570 }
571 TokenTree::Token(..) => LockstepIterSize::Unconstrained,
572 }
573}
574
575/// Used solely by the `count` meta-variable expression, counts the outermost repetitions at a
576/// given optional nested depth.
577///
578/// For example, a macro parameter of `$( { $( $foo:ident ),* } )*` called with `{ a, b } { c }`:
579///
580/// * `[ $( ${count(foo)} ),* ]` will return [2, 1] with a, b = 2 and c = 1
581/// * `[ $( ${count(foo, 0)} ),* ]` will be the same as `[ $( ${count(foo)} ),* ]`
582/// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is
583/// declared inside a single repetition and the index `1` implies two nested repetitions.
584fn count_repetitions<'a>(
585 dcx: DiagCtxtHandle<'a>,
586 depth_user: usize,
587 mut matched: &NamedMatch,
588 repeats: &[(usize, usize)],
589 sp: &DelimSpan,
590) -> PResult<'a, usize> {
591 // Recursively count the number of matches in `matched` at given depth
592 // (or at the top-level of `matched` if no depth is given).
593 fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
594 match matched {
595 MatchedSingle(_) => Ok(1),
596 MatchedSeq(named_matches) => {
597 if depth_curr == depth_max {
598 Ok(named_matches.len())
599 } else {
600 named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum()
601 }
602 }
603 }
604 }
605
606 /// Maximum depth
607 fn depth(counter: usize, matched: &NamedMatch) -> usize {
608 match matched {
609 MatchedSingle(_) => counter,
610 MatchedSeq(named_matches) => {
611 let rslt = counter + 1;
612 if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
613 }
614 }
615 }
616
617 let depth_max = depth(0, matched)
618 .checked_sub(1)
619 .and_then(|el| el.checked_sub(repeats.len()))
620 .unwrap_or_default();
621 if depth_user > depth_max {
622 return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count"));
623 }
624
625 // `repeats` records all of the nested levels at which we are currently
626 // matching meta-variables. The meta-var-expr `count($x)` only counts
627 // matches that occur in this "subtree" of the `NamedMatch` where we
628 // are currently transcribing, so we need to descend to that subtree
629 // before we start counting. `matched` contains the various levels of the
630 // tree as we descend, and its final value is the subtree we are currently at.
631 for &(idx, _) in repeats {
632 if let MatchedSeq(ads) = matched {
633 matched = &ads[idx];
634 }
635 }
636
637 if let MatchedSingle(_) = matched {
638 return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() }));
639 }
640
641 count(depth_user, depth_max, matched)
642}
643
644/// Returns a `NamedMatch` item declared on the LHS given an arbitrary [Ident]
645fn matched_from_ident<'ctx, 'interp, 'rslt>(
646 dcx: DiagCtxtHandle<'ctx>,
647 ident: Ident,
648 interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
649) -> PResult<'ctx, &'rslt NamedMatch>
650where
651 'interp: 'rslt,
652{
653 let span = ident.span;
654 let key = MacroRulesNormalizedIdent::new(ident);
655 interp.get(&key).ok_or_else(|| dcx.create_err(MetaVarExprUnrecognizedVar { span, key }))
656}
657
658/// Used by meta-variable expressions when an user input is out of the actual declared bounds. For
659/// example, index(999999) in an repetition of only three elements.
660fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> {
661 let msg = if max == 0 {
662 format!(
663 "meta-variable expression `{ty}` with depth parameter \
664 must be called inside of a macro repetition"
665 )
666 } else {
667 format!(
668 "depth parameter of meta-variable expression `{ty}` \
669 must be less than {max}"
670 )
671 };
672 dcx.struct_span_err(span, msg)
673}
674
675fn transcribe_metavar_expr<'a>(
676 dcx: DiagCtxtHandle<'a>,
677 expr: &MetaVarExpr,
678 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
679 marker: &mut Marker,
680 repeats: &[(usize, usize)],
681 result: &mut Vec<TokenTree>,
682 sp: &DelimSpan,
683 symbol_gallery: &SymbolGallery,
684) -> PResult<'a, ()> {
685 let mut visited_span = || {
686 let mut span = sp.entire();
687 marker.visit_span(&mut span);
688 span
689 };
690 match *expr {
691 MetaVarExpr::Concat(ref elements) => {
692 let mut concatenated = String::new();
693 for element in elements.into_iter() {
694 let symbol = match element {
695 MetaVarExprConcatElem::Ident(elem) => elem.name,
696 MetaVarExprConcatElem::Literal(elem) => *elem,
697 MetaVarExprConcatElem::Var(ident) => {
698 match matched_from_ident(dcx, *ident, interp)? {
699 NamedMatch::MatchedSeq(named_matches) => {
700 let Some((curr_idx, _)) = repeats.last() else {
701 return Err(dcx.struct_span_err(sp.entire(), "invalid syntax"));
702 };
703 match &named_matches[*curr_idx] {
704 // FIXME(c410-f3r) Nested repetitions are unimplemented
705 MatchedSeq(_) => unimplemented!(),
706 MatchedSingle(pnr) => {
707 extract_symbol_from_pnr(dcx, pnr, ident.span)?
708 }
709 }
710 }
711 NamedMatch::MatchedSingle(pnr) => {
712 extract_symbol_from_pnr(dcx, pnr, ident.span)?
713 }
714 }
715 }
716 };
717 concatenated.push_str(symbol.as_str());
718 }
719 let symbol = nfc_normalize(&concatenated);
720 let concatenated_span = visited_span();
721 if !rustc_lexer::is_ident(symbol.as_str()) {
722 return Err(dcx.struct_span_err(
723 concatenated_span,
724 "`${concat(..)}` is not generating a valid identifier",
725 ));
726 }
727 symbol_gallery.insert(symbol, concatenated_span);
728 // The current implementation marks the span as coming from the macro regardless of
729 // contexts of the concatenated identifiers but this behavior may change in the
730 // future.
731 result.push(TokenTree::Token(
732 Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
733 Spacing::Alone,
734 ));
735 }
736 MetaVarExpr::Count(original_ident, depth) => {
737 let matched = matched_from_ident(dcx, original_ident, interp)?;
738 let count = count_repetitions(dcx, depth, matched, repeats, sp)?;
739 let tt = TokenTree::token_alone(
740 TokenKind::lit(token::Integer, sym::integer(count), None),
741 visited_span(),
742 );
743 result.push(tt);
744 }
745 MetaVarExpr::Ignore(original_ident) => {
746 // Used to ensure that `original_ident` is present in the LHS
747 let _ = matched_from_ident(dcx, original_ident, interp)?;
748 }
749 MetaVarExpr::Index(depth) => match repeats.iter().nth_back(depth) {
750 Some((index, _)) => {
751 result.push(TokenTree::token_alone(
752 TokenKind::lit(token::Integer, sym::integer(*index), None),
753 visited_span(),
754 ));
755 }
756 None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "index")),
757 },
758 MetaVarExpr::Len(depth) => match repeats.iter().nth_back(depth) {
759 Some((_, length)) => {
760 result.push(TokenTree::token_alone(
761 TokenKind::lit(token::Integer, sym::integer(*length), None),
762 visited_span(),
763 ));
764 }
765 None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "len")),
766 },
767 }
768 Ok(())
769}
770
771/// Extracts an metavariable symbol that can be an identifier, a token tree or a literal.
772fn extract_symbol_from_pnr<'a>(
773 dcx: DiagCtxtHandle<'a>,
774 pnr: &ParseNtResult,
775 span_err: Span,
776) -> PResult<'a, Symbol> {
777 match pnr {
778 ParseNtResult::Ident(nt_ident, is_raw) => {
779 if let IdentIsRaw::Yes = is_raw {
780 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
781 } else {
782 Ok(nt_ident.name)
783 }
784 }
785 ParseNtResult::Tt(TokenTree::Token(
786 Token { kind: TokenKind::Ident(symbol, is_raw), .. },
787 _,
788 )) => {
789 if let IdentIsRaw::Yes = is_raw {
790 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
791 } else {
792 Ok(*symbol)
793 }
794 }
795 ParseNtResult::Tt(TokenTree::Token(
796 Token {
797 kind: TokenKind::Literal(Lit { kind: LitKind::Str, symbol, suffix: None }),
798 ..
799 },
800 _,
801 )) => Ok(*symbol),
802 ParseNtResult::Nt(nt)
803 if let Nonterminal::NtLiteral(expr) = &**nt
804 && let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) =
805 &expr.kind =>
806 {
807 Ok(*symbol)
808 }
809 _ => Err(dcx
810 .struct_err(
811 "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`",
812 )
813 .with_note("currently only string literals are supported")
814 .with_span(span_err)),
815 }
816}