1use std::mem;
2
3use rustc_ast::token::{
4 self, Delimiter, IdentIsRaw, InvisibleOrigin, Lit, LitKind, MetaVarKind, Token, TokenKind,
5};
6use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
7use rustc_ast::{ExprKind, StmtKind, TyKind, UnOp};
8use rustc_data_structures::fx::FxHashMap;
9use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
10use rustc_parse::lexer::nfc_normalize;
11use rustc_parse::parser::ParseNtResult;
12use rustc_session::parse::ParseSess;
13use rustc_span::hygiene::{LocalExpnId, Transparency};
14use rustc_span::{
15 Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans,
16};
17use smallvec::{SmallVec, smallvec};
18
19use crate::errors::{
20 CountRepetitionMisplaced, MacroVarStillRepeating, MetaVarsDifSeqMatchers, MustRepeatOnce,
21 MveUnrecognizedVar, NoSyntaxVarsExprRepeat,
22};
23use crate::mbe::macro_parser::NamedMatch;
24use crate::mbe::macro_parser::NamedMatch::*;
25use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
26use crate::mbe::{self, KleeneOp, MetaVarExpr};
27
28struct TranscrCtx<'psess, 'itp> {
30 psess: &'psess ParseSess,
31
32 interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
34
35 marker: Marker,
37
38 stack: SmallVec<[Frame<'itp>; 1]>,
44
45 repeats: Vec<(usize, usize)>,
51
52 result: Vec<TokenTree>,
65
66 result_stack: Vec<Vec<TokenTree>>,
69}
70
71impl<'psess> TranscrCtx<'psess, '_> {
72 fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
74 let mut span = dspan.entire();
75 self.marker.mark_span(&mut span);
76 span
77 }
78}
79
80struct Marker {
82 expand_id: LocalExpnId,
83 transparency: Transparency,
84 cache: FxHashMap<SyntaxContext, SyntaxContext>,
85}
86
87impl Marker {
88 fn mark_span(&mut self, span: &mut Span) {
90 *span = span.map_ctxt(|ctxt| {
95 *self
96 .cache
97 .entry(ctxt)
98 .or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
99 });
100 }
101}
102
103struct Frame<'a> {
105 tts: &'a [mbe::TokenTree],
106 idx: usize,
107 kind: FrameKind,
108}
109
110enum FrameKind {
111 Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
112 Sequence { sep: Option<Token>, kleene_op: KleeneOp },
113}
114
115impl<'a> Frame<'a> {
116 fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
117 Frame {
118 tts: &src.tts,
119 idx: 0,
120 kind: FrameKind::Delimited { delim: src.delim, span, spacing },
121 }
122 }
123
124 fn new_sequence(
125 src: &'a mbe::SequenceRepetition,
126 sep: Option<Token>,
127 kleene_op: KleeneOp,
128 ) -> Frame<'a> {
129 Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
130 }
131}
132
133impl<'a> Iterator for Frame<'a> {
134 type Item = &'a mbe::TokenTree;
135
136 fn next(&mut self) -> Option<&'a mbe::TokenTree> {
137 let res = self.tts.get(self.idx);
138 self.idx += 1;
139 res
140 }
141}
142
143pub(super) fn transcribe<'a>(
164 psess: &'a ParseSess,
165 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
166 src: &mbe::Delimited,
167 src_span: DelimSpan,
168 transparency: Transparency,
169 expand_id: LocalExpnId,
170) -> PResult<'a, TokenStream> {
171 if src.tts.is_empty() {
173 return Ok(TokenStream::default());
174 }
175
176 let mut tscx = TranscrCtx {
177 psess,
178 interp,
179 marker: Marker { expand_id, transparency, cache: Default::default() },
180 repeats: Vec::new(),
181 stack: smallvec![Frame::new_delimited(
182 src,
183 src_span,
184 DelimSpacing::new(Spacing::Alone, Spacing::Alone)
185 )],
186 result: Vec::new(),
187 result_stack: Vec::new(),
188 };
189
190 loop {
191 let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
194 let frame = tscx.stack.last_mut().unwrap();
199 if let FrameKind::Sequence { sep, .. } = &frame.kind {
200 let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
201 *repeat_idx += 1;
202 if repeat_idx < repeat_len {
203 frame.idx = 0;
204 if let Some(sep) = sep {
205 tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
206 }
207 continue;
208 }
209 }
210
211 match tscx.stack.pop().unwrap().kind {
215 FrameKind::Sequence { .. } => {
217 tscx.repeats.pop();
218 }
219
220 FrameKind::Delimited { delim, span, mut spacing, .. } => {
224 if delim == Delimiter::Bracket {
227 spacing.close = Spacing::Alone;
228 }
229 if tscx.result_stack.is_empty() {
230 return Ok(TokenStream::new(tscx.result));
232 }
233
234 let tree =
236 TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
237 tscx.result = tscx.result_stack.pop().unwrap();
238 tscx.result.push(tree);
239 }
240 }
241 continue;
242 };
243
244 match tree {
247 seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
249 transcribe_sequence(&mut tscx, seq, seq_rep)?;
250 }
251
252 &mbe::TokenTree::MetaVar(sp, original_ident) => {
254 transcribe_metavar(&mut tscx, sp, original_ident)?;
255 }
256
257 mbe::TokenTree::MetaVarExpr(dspan, expr) => {
259 transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
260 }
261
262 &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
268 tscx.marker.mark_span(&mut span.open);
269 tscx.marker.mark_span(&mut span.close);
270 tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
271 tscx.result_stack.push(mem::take(&mut tscx.result));
272 }
273
274 &mbe::TokenTree::Token(mut token) => {
277 tscx.marker.mark_span(&mut token.span);
278 if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
279 tscx.marker.mark_span(&mut ident.span);
280 }
281 let tt = TokenTree::Token(token, Spacing::Alone);
282 tscx.result.push(tt);
283 }
284
285 mbe::TokenTree::MetaVarDecl { .. } => panic!("unexpected `TokenTree::MetaVarDecl`"),
287 }
288 }
289}
290
291fn transcribe_sequence<'tx, 'itp>(
293 tscx: &mut TranscrCtx<'tx, 'itp>,
294 seq: &mbe::TokenTree,
295 seq_rep: &'itp mbe::SequenceRepetition,
296) -> PResult<'tx, ()> {
297 let dcx = tscx.psess.dcx();
298
299 match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
303 LockstepIterSize::Unconstrained => {
304 return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
305 }
306
307 LockstepIterSize::Contradiction(msg) => {
308 return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
313 }
314
315 LockstepIterSize::Constraint(len, _) => {
316 let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
319
320 if len == 0 {
322 if seq.kleene.op == KleeneOp::OneOrMore {
323 return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
327 }
328 } else {
329 tscx.repeats.push((0, len));
332
333 tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator.clone(), seq.kleene.op));
337 }
338 }
339 }
340
341 Ok(())
342}
343
344fn transcribe_metavar<'tx>(
361 tscx: &mut TranscrCtx<'tx, '_>,
362 mut sp: Span,
363 mut original_ident: Ident,
364) -> PResult<'tx, ()> {
365 let dcx = tscx.psess.dcx();
366
367 let ident = MacroRulesNormalizedIdent::new(original_ident);
368 let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
369 tscx.marker.mark_span(&mut sp);
372 tscx.marker.mark_span(&mut original_ident.span);
373 tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
374 tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
375 return Ok(());
376 };
377
378 let MatchedSingle(pnr) = cur_matched else {
379 return Err(dcx.create_err(MacroVarStillRepeating { span: sp, ident }));
381 };
382
383 transcribe_pnr(tscx, sp, pnr)
384}
385
386fn transcribe_pnr<'tx>(
387 tscx: &mut TranscrCtx<'tx, '_>,
388 mut sp: Span,
389 pnr: &ParseNtResult,
390) -> PResult<'tx, ()> {
391 let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
396 if stream.len() == 1 {
397 let tree = stream.iter().next().unwrap();
398 if let TokenTree::Delimited(_, _, delim, inner) = tree
399 && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
400 && mv_kind == *mvk
401 {
402 stream = inner.clone();
403 }
404 }
405
406 tscx.marker.mark_span(&mut sp);
409 with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
410 TokenTree::Delimited(
413 DelimSpan::from_single(sp),
414 DelimSpacing::new(Spacing::Alone, Spacing::Alone),
415 Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
416 stream,
417 )
418 };
419
420 let tt = match pnr {
421 ParseNtResult::Tt(tt) => {
422 maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
427 }
428 ParseNtResult::Ident(ident, is_raw) => {
429 tscx.marker.mark_span(&mut sp);
430 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
431 let kind = token::NtIdent(*ident, *is_raw);
432 TokenTree::token_alone(kind, sp)
433 }
434 ParseNtResult::Lifetime(ident, is_raw) => {
435 tscx.marker.mark_span(&mut sp);
436 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
437 let kind = token::NtLifetime(*ident, *is_raw);
438 TokenTree::token_alone(kind, sp)
439 }
440 ParseNtResult::Item(item) => {
441 mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
442 }
443 ParseNtResult::Block(block) => {
444 mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
445 }
446 ParseNtResult::Stmt(stmt) => {
447 let stream = if let StmtKind::Empty = stmt.kind {
448 TokenStream::token_alone(token::Semi, stmt.span)
450 } else {
451 TokenStream::from_ast(stmt)
452 };
453 mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
454 }
455 ParseNtResult::Pat(pat, pat_kind) => {
456 mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
457 }
458 ParseNtResult::Expr(expr, kind) => {
459 let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
460 ExprKind::Lit(_) => (true, true),
461 ExprKind::Unary(UnOp::Neg, e) if matches!(&e.kind, ExprKind::Lit(_)) => {
462 (true, false)
463 }
464 _ => (false, false),
465 };
466 mk_delimited(
467 expr.span,
468 MetaVarKind::Expr {
469 kind: *kind,
470 can_begin_literal_maybe_minus,
471 can_begin_string_literal,
472 },
473 TokenStream::from_ast(expr),
474 )
475 }
476 ParseNtResult::Literal(lit) => {
477 mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
478 }
479 ParseNtResult::Ty(ty) => {
480 let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
481 mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
482 }
483 ParseNtResult::Meta(attr_item) => {
484 let has_meta_form = attr_item.meta_kind().is_some();
485 mk_delimited(
486 attr_item.span(),
487 MetaVarKind::Meta { has_meta_form },
488 TokenStream::from_ast(attr_item),
489 )
490 }
491 ParseNtResult::Path(path) => {
492 mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
493 }
494 ParseNtResult::Vis(vis) => {
495 mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
496 }
497 };
498
499 tscx.result.push(tt);
500 Ok(())
501}
502
503fn transcribe_metavar_expr<'tx>(
505 tscx: &mut TranscrCtx<'tx, '_>,
506 dspan: DelimSpan,
507 expr: &MetaVarExpr,
508) -> PResult<'tx, ()> {
509 let dcx = tscx.psess.dcx();
510 let tt = match *expr {
511 MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
512 MetaVarExpr::Count(original_ident, depth) => {
513 let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
514 let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
515 TokenTree::token_alone(
516 TokenKind::lit(token::Integer, sym::integer(count), None),
517 tscx.visited_dspan(dspan),
518 )
519 }
520 MetaVarExpr::Ignore(original_ident) => {
521 let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
523 return Ok(());
524 }
525 MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
526 Some((index, _)) => TokenTree::token_alone(
527 TokenKind::lit(token::Integer, sym::integer(*index), None),
528 tscx.visited_dspan(dspan),
529 ),
530 None => {
531 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
532 }
533 },
534 MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
535 Some((_, length)) => TokenTree::token_alone(
536 TokenKind::lit(token::Integer, sym::integer(*length), None),
537 tscx.visited_dspan(dspan),
538 ),
539 None => {
540 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
541 }
542 },
543 };
544 tscx.result.push(tt);
545 Ok(())
546}
547
548fn metavar_expr_concat<'tx>(
550 tscx: &mut TranscrCtx<'tx, '_>,
551 dspan: DelimSpan,
552 elements: &[MetaVarExprConcatElem],
553) -> PResult<'tx, TokenTree> {
554 let dcx = tscx.psess.dcx();
555 let mut concatenated = String::new();
556 for element in elements.into_iter() {
557 let symbol = match element {
558 MetaVarExprConcatElem::Ident(elem) => elem.name,
559 MetaVarExprConcatElem::Literal(elem) => *elem,
560 MetaVarExprConcatElem::Var(ident) => {
561 match matched_from_ident(dcx, *ident, tscx.interp)? {
562 NamedMatch::MatchedSeq(named_matches) => {
563 let Some((curr_idx, _)) = tscx.repeats.last() else {
564 return Err(dcx.struct_span_err(dspan.entire(), "invalid syntax"));
565 };
566 match &named_matches[*curr_idx] {
567 MatchedSeq(_) => {
569 return Err(dcx.struct_span_err(
570 ident.span,
571 "nested repetitions with `${concat(...)}` metavariable expressions are not yet supported",
572 ));
573 }
574 MatchedSingle(pnr) => extract_symbol_from_pnr(dcx, pnr, ident.span)?,
575 }
576 }
577 NamedMatch::MatchedSingle(pnr) => {
578 extract_symbol_from_pnr(dcx, pnr, ident.span)?
579 }
580 }
581 }
582 };
583 concatenated.push_str(symbol.as_str());
584 }
585 let symbol = nfc_normalize(&concatenated);
586 let concatenated_span = tscx.visited_dspan(dspan);
587 if !rustc_lexer::is_ident(symbol.as_str()) {
588 return Err(dcx.struct_span_err(
589 concatenated_span,
590 "`${concat(..)}` is not generating a valid identifier",
591 ));
592 }
593 tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
594
595 Ok(TokenTree::Token(
599 Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
600 Spacing::Alone,
601 ))
602}
603
604fn maybe_use_metavar_location(
635 psess: &ParseSess,
636 stack: &[Frame<'_>],
637 mut metavar_span: Span,
638 orig_tt: &TokenTree,
639 marker: &mut Marker,
640) -> TokenTree {
641 let undelimited_seq = matches!(
642 stack.last(),
643 Some(Frame {
644 tts: [_],
645 kind: FrameKind::Sequence {
646 sep: None,
647 kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
648 ..
649 },
650 ..
651 })
652 );
653 if undelimited_seq {
654 return orig_tt.clone();
656 }
657
658 marker.mark_span(&mut metavar_span);
659 let no_collision = match orig_tt {
660 TokenTree::Token(token, ..) => {
661 with_metavar_spans(|mspans| mspans.insert(token.span, metavar_span))
662 }
663 TokenTree::Delimited(dspan, ..) => with_metavar_spans(|mspans| {
664 mspans.insert(dspan.open, metavar_span)
665 && mspans.insert(dspan.close, metavar_span)
666 && mspans.insert(dspan.entire(), metavar_span)
667 }),
668 };
669 if no_collision || psess.source_map().is_imported(metavar_span) {
670 return orig_tt.clone();
671 }
672
673 match orig_tt {
676 TokenTree::Token(Token { kind, span }, spacing) => {
677 let span = metavar_span.with_ctxt(span.ctxt());
678 with_metavar_spans(|mspans| mspans.insert(span, metavar_span));
679 TokenTree::Token(Token { kind: kind.clone(), span }, *spacing)
680 }
681 TokenTree::Delimited(dspan, dspacing, delimiter, tts) => {
682 let open = metavar_span.with_ctxt(dspan.open.ctxt());
683 let close = metavar_span.with_ctxt(dspan.close.ctxt());
684 with_metavar_spans(|mspans| {
685 mspans.insert(open, metavar_span) && mspans.insert(close, metavar_span)
686 });
687 let dspan = DelimSpan::from_pair(open, close);
688 TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone())
689 }
690 }
691}
692
693fn lookup_cur_matched<'a>(
700 ident: MacroRulesNormalizedIdent,
701 interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
702 repeats: &[(usize, usize)],
703) -> Option<&'a NamedMatch> {
704 interpolations.get(&ident).map(|mut matched| {
705 for &(idx, _) in repeats {
706 match matched {
707 MatchedSingle(_) => break,
708 MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
709 }
710 }
711
712 matched
713 })
714}
715
716#[derive(Clone)]
721enum LockstepIterSize {
722 Unconstrained,
725
726 Constraint(usize, MacroRulesNormalizedIdent),
729
730 Contradiction(String),
732}
733
734impl LockstepIterSize {
735 fn with(self, other: LockstepIterSize) -> LockstepIterSize {
740 match self {
741 LockstepIterSize::Unconstrained => other,
742 LockstepIterSize::Contradiction(_) => self,
743 LockstepIterSize::Constraint(l_len, l_id) => match other {
744 LockstepIterSize::Unconstrained => self,
745 LockstepIterSize::Contradiction(_) => other,
746 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
747 LockstepIterSize::Constraint(r_len, r_id) => {
748 let msg = format!(
749 "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
750 l_id,
751 l_len,
752 pluralize!(l_len),
753 r_id,
754 r_len,
755 pluralize!(r_len),
756 );
757 LockstepIterSize::Contradiction(msg)
758 }
759 },
760 }
761 }
762}
763
764fn lockstep_iter_size(
777 tree: &mbe::TokenTree,
778 interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
779 repeats: &[(usize, usize)],
780) -> LockstepIterSize {
781 use mbe::TokenTree;
782 match tree {
783 TokenTree::Delimited(.., delimited) => {
784 delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
785 size.with(lockstep_iter_size(tt, interpolations, repeats))
786 })
787 }
788 TokenTree::Sequence(_, seq) => {
789 seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
790 size.with(lockstep_iter_size(tt, interpolations, repeats))
791 })
792 }
793 TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl { name, .. } => {
794 let name = MacroRulesNormalizedIdent::new(*name);
795 match lookup_cur_matched(name, interpolations, repeats) {
796 Some(matched) => match matched {
797 MatchedSingle(_) => LockstepIterSize::Unconstrained,
798 MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
799 },
800 _ => LockstepIterSize::Unconstrained,
801 }
802 }
803 TokenTree::MetaVarExpr(_, expr) => {
804 expr.for_each_metavar(LockstepIterSize::Unconstrained, |lis, ident| {
805 lis.with(lockstep_iter_size(
806 &TokenTree::MetaVar(ident.span, *ident),
807 interpolations,
808 repeats,
809 ))
810 })
811 }
812 TokenTree::Token(..) => LockstepIterSize::Unconstrained,
813 }
814}
815
816fn count_repetitions<'dx>(
826 dcx: DiagCtxtHandle<'dx>,
827 depth_user: usize,
828 mut matched: &NamedMatch,
829 repeats: &[(usize, usize)],
830 sp: &DelimSpan,
831) -> PResult<'dx, usize> {
832 fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
835 match matched {
836 MatchedSingle(_) => Ok(1),
837 MatchedSeq(named_matches) => {
838 if depth_curr == depth_max {
839 Ok(named_matches.len())
840 } else {
841 named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum()
842 }
843 }
844 }
845 }
846
847 fn depth(counter: usize, matched: &NamedMatch) -> usize {
849 match matched {
850 MatchedSingle(_) => counter,
851 MatchedSeq(named_matches) => {
852 let rslt = counter + 1;
853 if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
854 }
855 }
856 }
857
858 let depth_max = depth(0, matched)
859 .checked_sub(1)
860 .and_then(|el| el.checked_sub(repeats.len()))
861 .unwrap_or_default();
862 if depth_user > depth_max {
863 return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count"));
864 }
865
866 for &(idx, _) in repeats {
873 if let MatchedSeq(ads) = matched {
874 matched = &ads[idx];
875 }
876 }
877
878 if let MatchedSingle(_) = matched {
879 return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() }));
880 }
881
882 count(depth_user, depth_max, matched)
883}
884
885fn matched_from_ident<'ctx, 'interp, 'rslt>(
887 dcx: DiagCtxtHandle<'ctx>,
888 ident: Ident,
889 interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
890) -> PResult<'ctx, &'rslt NamedMatch>
891where
892 'interp: 'rslt,
893{
894 let span = ident.span;
895 let key = MacroRulesNormalizedIdent::new(ident);
896 interp.get(&key).ok_or_else(|| dcx.create_err(MveUnrecognizedVar { span, key }))
897}
898
899fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> {
902 let msg = if max == 0 {
903 format!(
904 "meta-variable expression `{ty}` with depth parameter \
905 must be called inside of a macro repetition"
906 )
907 } else {
908 format!(
909 "depth parameter of meta-variable expression `{ty}` \
910 must be less than {max}"
911 )
912 };
913 dcx.struct_span_err(span, msg)
914}
915
916fn extract_symbol_from_pnr<'a>(
918 dcx: DiagCtxtHandle<'a>,
919 pnr: &ParseNtResult,
920 span_err: Span,
921) -> PResult<'a, Symbol> {
922 match pnr {
923 ParseNtResult::Ident(nt_ident, is_raw) => {
924 if let IdentIsRaw::Yes = is_raw {
925 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
926 } else {
927 Ok(nt_ident.name)
928 }
929 }
930 ParseNtResult::Tt(TokenTree::Token(
931 Token { kind: TokenKind::Ident(symbol, is_raw), .. },
932 _,
933 )) => {
934 if let IdentIsRaw::Yes = is_raw {
935 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
936 } else {
937 Ok(*symbol)
938 }
939 }
940 ParseNtResult::Tt(TokenTree::Token(
941 Token {
942 kind: TokenKind::Literal(Lit { kind: LitKind::Str, symbol, suffix: None }),
943 ..
944 },
945 _,
946 )) => Ok(*symbol),
947 ParseNtResult::Literal(expr)
948 if let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = &expr.kind =>
949 {
950 Ok(*symbol)
951 }
952 ParseNtResult::Literal(expr)
953 if let ExprKind::Lit(lit @ Lit { kind: LitKind::Integer, symbol, suffix }) =
954 &expr.kind =>
955 {
956 if lit.is_semantic_float() {
957 Err(dcx
958 .struct_err("floats are not supported as metavariables of `${concat(..)}`")
959 .with_span(span_err))
960 } else if suffix.is_none() {
961 Ok(*symbol)
962 } else {
963 Err(dcx
964 .struct_err("integer metavariables of `${concat(..)}` must not be suffixed")
965 .with_span(span_err))
966 }
967 }
968 _ => Err(dcx
969 .struct_err(
970 "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`",
971 )
972 .with_note("currently only string and integer literals are supported")
973 .with_span(span_err)),
974 }
975}