1use std::mem;
2
3use rustc_ast::token::{
4 self, Delimiter, IdentIsRaw, InvisibleOrigin, Lit, LitKind, MetaVarKind, Token, TokenKind,
5};
6use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
7use rustc_ast::{ExprKind, StmtKind, TyKind, UnOp};
8use rustc_data_structures::fx::FxHashMap;
9use rustc_errors::{Diag, DiagCtxtHandle, PResult, listify, pluralize};
10use rustc_parse::lexer::nfc_normalize;
11use rustc_parse::parser::ParseNtResult;
12use rustc_session::parse::ParseSess;
13use rustc_span::hygiene::{LocalExpnId, Transparency};
14use rustc_span::{
15 BytePos, Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, kw, sym,
16 with_metavar_spans,
17};
18use smallvec::{SmallVec, smallvec};
19
20use crate::errors::{
21 CountRepetitionMisplaced, MacroVarStillRepeating, MetaVarsDifSeqMatchers, MustRepeatOnce,
22 MveUnrecognizedVar, NoRepeatableVar, NoSyntaxVarsExprRepeat, VarNoTypo,
23 VarTypoSuggestionRepeatable, VarTypoSuggestionUnrepeatable, VarTypoSuggestionUnrepeatableLabel,
24};
25use crate::mbe::macro_parser::NamedMatch;
26use crate::mbe::macro_parser::NamedMatch::*;
27use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
28use crate::mbe::{self, KleeneOp, MetaVarExpr};
29
30struct TranscrCtx<'psess, 'itp> {
32 psess: &'psess ParseSess,
33
34 interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
36
37 marker: Marker,
39
40 stack: SmallVec<[Frame<'itp>; 1]>,
46
47 repeats: Vec<(usize, usize)>,
53
54 result: Vec<TokenTree>,
67
68 result_stack: Vec<Vec<TokenTree>>,
71}
72
73impl<'psess> TranscrCtx<'psess, '_> {
74 fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
76 let mut span = dspan.entire();
77 self.marker.mark_span(&mut span);
78 span
79 }
80}
81
82struct Marker {
84 expand_id: LocalExpnId,
85 transparency: Transparency,
86 cache: FxHashMap<SyntaxContext, SyntaxContext>,
87}
88
89impl Marker {
90 fn mark_span(&mut self, span: &mut Span) {
92 *span = span.map_ctxt(|ctxt| {
97 *self
98 .cache
99 .entry(ctxt)
100 .or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
101 });
102 }
103}
104
105struct Frame<'a> {
107 tts: &'a [mbe::TokenTree],
108 idx: usize,
109 kind: FrameKind,
110}
111
112enum FrameKind {
113 Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
114 Sequence { sep: Option<Token>, kleene_op: KleeneOp },
115}
116
117impl<'a> Frame<'a> {
118 fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
119 Frame {
120 tts: &src.tts,
121 idx: 0,
122 kind: FrameKind::Delimited { delim: src.delim, span, spacing },
123 }
124 }
125
126 fn new_sequence(
127 src: &'a mbe::SequenceRepetition,
128 sep: Option<Token>,
129 kleene_op: KleeneOp,
130 ) -> Frame<'a> {
131 Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
132 }
133}
134
135impl<'a> Iterator for Frame<'a> {
136 type Item = &'a mbe::TokenTree;
137
138 fn next(&mut self) -> Option<&'a mbe::TokenTree> {
139 let res = self.tts.get(self.idx);
140 self.idx += 1;
141 res
142 }
143}
144
145pub(super) fn transcribe<'a>(
166 psess: &'a ParseSess,
167 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
168 src: &mbe::Delimited,
169 src_span: DelimSpan,
170 transparency: Transparency,
171 expand_id: LocalExpnId,
172) -> PResult<'a, TokenStream> {
173 if src.tts.is_empty() {
175 return Ok(TokenStream::default());
176 }
177
178 let mut tscx = TranscrCtx {
179 psess,
180 interp,
181 marker: Marker { expand_id, transparency, cache: Default::default() },
182 repeats: Vec::new(),
183 stack: {
let count = 0usize + 1usize;
let mut vec = ::smallvec::SmallVec::new();
if count <= vec.inline_size() {
vec.push(Frame::new_delimited(src, src_span,
DelimSpacing::new(Spacing::Alone, Spacing::Alone)));
vec
} else {
::smallvec::SmallVec::from_vec(::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[Frame::new_delimited(src, src_span,
DelimSpacing::new(Spacing::Alone, Spacing::Alone))])))
}
}smallvec![Frame::new_delimited(
184 src,
185 src_span,
186 DelimSpacing::new(Spacing::Alone, Spacing::Alone)
187 )],
188 result: Vec::new(),
189 result_stack: Vec::new(),
190 };
191
192 loop {
193 let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
196 let frame = tscx.stack.last_mut().unwrap();
201 if let FrameKind::Sequence { sep, .. } = &frame.kind {
202 let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
203 *repeat_idx += 1;
204 if repeat_idx < repeat_len {
205 frame.idx = 0;
206 if let Some(sep) = sep {
207 tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
208 }
209 continue;
210 }
211 }
212
213 match tscx.stack.pop().unwrap().kind {
217 FrameKind::Sequence { .. } => {
219 tscx.repeats.pop();
220 }
221
222 FrameKind::Delimited { delim, span, mut spacing, .. } => {
226 if delim == Delimiter::Bracket {
229 spacing.close = Spacing::Alone;
230 }
231 if tscx.result_stack.is_empty() {
232 return Ok(TokenStream::new(tscx.result));
234 }
235
236 let tree =
238 TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
239 tscx.result = tscx.result_stack.pop().unwrap();
240 tscx.result.push(tree);
241 }
242 }
243 continue;
244 };
245
246 match tree {
249 seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
251 transcribe_sequence(&mut tscx, seq, seq_rep, interp)?;
252 }
253
254 &mbe::TokenTree::MetaVar(sp, original_ident) => {
256 transcribe_metavar(&mut tscx, sp, original_ident)?;
257 }
258
259 mbe::TokenTree::MetaVarExpr(dspan, expr) => {
261 transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
262 }
263
264 &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
270 tscx.marker.mark_span(&mut span.open);
271 tscx.marker.mark_span(&mut span.close);
272 tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
273 tscx.result_stack.push(mem::take(&mut tscx.result));
274 }
275
276 &mbe::TokenTree::Token(mut token) => {
279 tscx.marker.mark_span(&mut token.span);
280 if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
281 tscx.marker.mark_span(&mut ident.span);
282 }
283 let tt = TokenTree::Token(token, Spacing::Alone);
284 tscx.result.push(tt);
285 }
286
287 mbe::TokenTree::MetaVarDecl { .. } => {
::core::panicking::panic_fmt(format_args!("unexpected `TokenTree::MetaVarDecl`"));
}panic!("unexpected `TokenTree::MetaVarDecl`"),
289 }
290 }
291}
292
293fn transcribe_sequence<'tx, 'itp>(
295 tscx: &mut TranscrCtx<'tx, 'itp>,
296 seq: &mbe::TokenTree,
297 seq_rep: &'itp mbe::SequenceRepetition,
298 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
300) -> PResult<'tx, ()> {
301 let dcx = tscx.psess.dcx();
302
303 match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
307 LockstepIterSize::Unconstrained => {
308 let mut repeatables = Vec::new();
309 let mut non_repeatables = Vec::new();
310
311 #[allow(rustc::potential_query_instability)]
312 for (name, matcher) in interp.iter() {
313 if matcher.is_repeatable() {
314 repeatables.push(name);
315 } else {
316 non_repeatables.push(name);
317 }
318 }
319
320 let repeatable_names: Vec<Symbol> =
321 repeatables.iter().map(|&name| name.symbol()).collect();
322 let non_repeatable_names: Vec<Symbol> =
323 non_repeatables.iter().map(|&name| name.symbol()).collect();
324 let mut meta_vars = ::alloc::vec::Vec::new()vec![];
325 seq.meta_vars(&mut meta_vars);
326 let mut typo_repeatable = None;
327 let mut typo_unrepeatable = None;
328 let mut typo_unrepeatable_label = None;
329 let mut var_no_typo = None;
330 let mut no_repeatable_var = None;
331
332 for ident in meta_vars {
333 if let Some(name) = rustc_span::edit_distance::find_best_match_for_name(
334 &repeatable_names[..],
335 ident.name,
336 None,
337 ) {
338 typo_repeatable = Some(VarTypoSuggestionRepeatable { span: ident.span, name });
339 } else if let Some(name) = rustc_span::edit_distance::find_best_match_for_name(
340 &non_repeatable_names[..],
341 ident.name,
342 None,
343 ) {
344 typo_unrepeatable = Some(VarTypoSuggestionUnrepeatable { span: ident.span });
345 if let Some(&orig_ident) = non_repeatables.iter().find(|n| n.symbol() == name) {
346 typo_unrepeatable_label = Some(VarTypoSuggestionUnrepeatableLabel {
347 span: orig_ident.ident().span,
348 });
349 }
350 } else {
351 if !repeatable_names.is_empty()
352 && let Some(msg) = listify(&repeatable_names, |s| ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`${0}`", s))
})format!("`${s}`"))
353 {
354 var_no_typo = Some(VarNoTypo { span: ident.span, msg });
355 } else {
356 no_repeatable_var = Some(NoRepeatableVar { span: ident.span });
357 }
358 }
359 }
360 return Err(dcx.create_err(NoSyntaxVarsExprRepeat {
361 span: seq.span(),
362 typo_unrepeatable,
363 typo_repeatable,
364 typo_unrepeatable_label,
365 var_no_typo,
366 no_repeatable_var,
367 }));
368 }
369
370 LockstepIterSize::Contradiction(msg) => {
371 return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
376 }
377
378 LockstepIterSize::Constraint(len, _) => {
379 let mbe::TokenTree::Sequence(sp, seq) = seq else { ::core::panicking::panic("internal error: entered unreachable code")unreachable!() };
382
383 if len == 0 {
385 if seq.kleene.op == KleeneOp::OneOrMore {
386 return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
390 }
391 } else {
392 tscx.repeats.push((0, len));
395
396 tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator, seq.kleene.op));
400 }
401 }
402 }
403
404 Ok(())
405}
406
407fn transcribe_metavar<'tx>(
424 tscx: &mut TranscrCtx<'tx, '_>,
425 mut sp: Span,
426 mut original_ident: Ident,
427) -> PResult<'tx, ()> {
428 let dcx = tscx.psess.dcx();
429
430 let ident = MacroRulesNormalizedIdent::new(original_ident);
431 let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
432 tscx.marker.mark_span(&mut sp);
435 tscx.marker.mark_span(&mut original_ident.span);
436 tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
437 tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
438 return Ok(());
439 };
440
441 let MatchedSingle(pnr) = cur_matched else {
442 return Err(dcx.create_err(MacroVarStillRepeating { span: sp, ident }));
444 };
445
446 transcribe_pnr(tscx, sp, pnr)
447}
448
449fn transcribe_pnr<'tx>(
450 tscx: &mut TranscrCtx<'tx, '_>,
451 mut sp: Span,
452 pnr: &ParseNtResult,
453) -> PResult<'tx, ()> {
454 let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
459 if stream.len() == 1 {
460 let tree = stream.iter().next().unwrap();
461 if let TokenTree::Delimited(_, _, delim, inner) = tree
462 && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
463 && mv_kind == *mvk
464 {
465 stream = inner.clone();
466 }
467 }
468
469 tscx.marker.mark_span(&mut sp);
472 with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
473 TokenTree::Delimited(
476 DelimSpan::from_single(sp),
477 DelimSpacing::new(Spacing::Alone, Spacing::Alone),
478 Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
479 stream,
480 )
481 };
482
483 let tt = match pnr {
484 ParseNtResult::Tt(tt) => {
485 maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
490 }
491 ParseNtResult::Ident(ident, is_raw) => {
492 tscx.marker.mark_span(&mut sp);
493 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
494 let kind = token::NtIdent(*ident, *is_raw);
495 TokenTree::token_alone(kind, sp)
496 }
497 ParseNtResult::Lifetime(ident, is_raw) => {
498 tscx.marker.mark_span(&mut sp);
499 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
500 let kind = token::NtLifetime(*ident, *is_raw);
501 TokenTree::token_alone(kind, sp)
502 }
503 ParseNtResult::Item(item) => {
504 mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
505 }
506 ParseNtResult::Block(block) => {
507 mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
508 }
509 ParseNtResult::Stmt(stmt) => {
510 let stream = if let StmtKind::Empty = stmt.kind {
511 TokenStream::token_alone(token::Semi, stmt.span)
513 } else {
514 TokenStream::from_ast(stmt)
515 };
516 mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
517 }
518 ParseNtResult::Pat(pat, pat_kind) => {
519 mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
520 }
521 ParseNtResult::Expr(expr, kind) => {
522 let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
523 ExprKind::Lit(_) => (true, true),
524 ExprKind::Unary(UnOp::Neg, e) if #[allow(non_exhaustive_omitted_patterns)] match &e.kind {
ExprKind::Lit(_) => true,
_ => false,
}matches!(&e.kind, ExprKind::Lit(_)) => {
525 (true, false)
526 }
527 _ => (false, false),
528 };
529 mk_delimited(
530 expr.span,
531 MetaVarKind::Expr {
532 kind: *kind,
533 can_begin_literal_maybe_minus,
534 can_begin_string_literal,
535 },
536 TokenStream::from_ast(expr),
537 )
538 }
539 ParseNtResult::Literal(lit) => {
540 mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
541 }
542 ParseNtResult::Ty(ty) => {
543 let is_path = #[allow(non_exhaustive_omitted_patterns)] match &ty.kind {
TyKind::Path(None, _path) => true,
_ => false,
}matches!(&ty.kind, TyKind::Path(None, _path));
544 mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
545 }
546 ParseNtResult::Meta(attr_item) => {
547 let has_meta_form = attr_item.meta_kind().is_some();
548 mk_delimited(
549 attr_item.span(),
550 MetaVarKind::Meta { has_meta_form },
551 TokenStream::from_ast(attr_item),
552 )
553 }
554 ParseNtResult::Path(path) => {
555 mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
556 }
557 ParseNtResult::Vis(vis) => {
558 mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
559 }
560 ParseNtResult::Guard(guard) => {
561 let leading_if_span =
566 guard.span_with_leading_if.with_hi(guard.span_with_leading_if.lo() + BytePos(2));
567 let mut ts =
568 TokenStream::token_alone(token::Ident(kw::If, IdentIsRaw::No), leading_if_span);
569 ts.push_stream(TokenStream::from_ast(&guard.cond));
570
571 mk_delimited(guard.span_with_leading_if, MetaVarKind::Guard, ts)
572 }
573 };
574
575 tscx.result.push(tt);
576 Ok(())
577}
578
579fn transcribe_metavar_expr<'tx>(
581 tscx: &mut TranscrCtx<'tx, '_>,
582 dspan: DelimSpan,
583 expr: &MetaVarExpr,
584) -> PResult<'tx, ()> {
585 let dcx = tscx.psess.dcx();
586 let tt = match *expr {
587 MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
588 MetaVarExpr::Count(original_ident, depth) => {
589 let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
590 let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
591 TokenTree::token_alone(
592 TokenKind::lit(token::Integer, sym::integer(count), None),
593 tscx.visited_dspan(dspan),
594 )
595 }
596 MetaVarExpr::Ignore(original_ident) => {
597 let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
599 return Ok(());
600 }
601 MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
602 Some((index, _)) => TokenTree::token_alone(
603 TokenKind::lit(token::Integer, sym::integer(*index), None),
604 tscx.visited_dspan(dspan),
605 ),
606 None => {
607 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
608 }
609 },
610 MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
611 Some((_, length)) => TokenTree::token_alone(
612 TokenKind::lit(token::Integer, sym::integer(*length), None),
613 tscx.visited_dspan(dspan),
614 ),
615 None => {
616 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
617 }
618 },
619 };
620 tscx.result.push(tt);
621 Ok(())
622}
623
624fn metavar_expr_concat<'tx>(
626 tscx: &mut TranscrCtx<'tx, '_>,
627 dspan: DelimSpan,
628 elements: &[MetaVarExprConcatElem],
629) -> PResult<'tx, TokenTree> {
630 let dcx = tscx.psess.dcx();
631 let mut concatenated = String::new();
632 for element in elements {
633 let symbol = match element {
634 MetaVarExprConcatElem::Ident(elem) => elem.name,
635 MetaVarExprConcatElem::Literal(elem) => *elem,
636 MetaVarExprConcatElem::Var(ident) => {
637 let key = MacroRulesNormalizedIdent::new(*ident);
638 match lookup_cur_matched(key, tscx.interp, &tscx.repeats) {
639 Some(NamedMatch::MatchedSingle(pnr)) => {
640 extract_symbol_from_pnr(dcx, pnr, ident.span)?
641 }
642 Some(NamedMatch::MatchedSeq(..)) => {
643 return Err(dcx.struct_span_err(
644 ident.span,
645 "`${concat(...)}` variable is still repeating at this depth",
646 ));
647 }
648 None => {
649 return Err(dcx.create_err(MveUnrecognizedVar { span: ident.span, key }));
650 }
651 }
652 }
653 };
654 concatenated.push_str(symbol.as_str());
655 }
656 let symbol = nfc_normalize(&concatenated);
657 let concatenated_span = tscx.visited_dspan(dspan);
658 if !rustc_lexer::is_ident(symbol.as_str()) {
659 return Err(dcx.struct_span_err(
660 concatenated_span,
661 "`${concat(..)}` is not generating a valid identifier",
662 ));
663 }
664 tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
665
666 Ok(TokenTree::Token(
670 Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
671 Spacing::Alone,
672 ))
673}
674
675fn maybe_use_metavar_location(
706 psess: &ParseSess,
707 stack: &[Frame<'_>],
708 mut metavar_span: Span,
709 orig_tt: &TokenTree,
710 marker: &mut Marker,
711) -> TokenTree {
712 let undelimited_seq = #[allow(non_exhaustive_omitted_patterns)] match stack.last() {
Some(Frame {
tts: [_],
kind: FrameKind::Sequence {
sep: None, kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
..
}, .. }) => true,
_ => false,
}matches!(
713 stack.last(),
714 Some(Frame {
715 tts: [_],
716 kind: FrameKind::Sequence {
717 sep: None,
718 kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
719 ..
720 },
721 ..
722 })
723 );
724 if undelimited_seq {
725 return orig_tt.clone();
727 }
728
729 marker.mark_span(&mut metavar_span);
730 let no_collision = match orig_tt {
731 TokenTree::Token(token, ..) => {
732 with_metavar_spans(|mspans| mspans.insert(token.span, metavar_span))
733 }
734 TokenTree::Delimited(dspan, ..) => with_metavar_spans(|mspans| {
735 mspans.insert(dspan.open, metavar_span)
736 && mspans.insert(dspan.close, metavar_span)
737 && mspans.insert(dspan.entire(), metavar_span)
738 }),
739 };
740 if no_collision || psess.source_map().is_imported(metavar_span) {
741 return orig_tt.clone();
742 }
743
744 match orig_tt {
747 TokenTree::Token(Token { kind, span }, spacing) => {
748 let span = metavar_span.with_ctxt(span.ctxt());
749 with_metavar_spans(|mspans| mspans.insert(span, metavar_span));
750 TokenTree::Token(Token { kind: *kind, span }, *spacing)
751 }
752 TokenTree::Delimited(dspan, dspacing, delimiter, tts) => {
753 let open = metavar_span.with_ctxt(dspan.open.ctxt());
754 let close = metavar_span.with_ctxt(dspan.close.ctxt());
755 with_metavar_spans(|mspans| {
756 mspans.insert(open, metavar_span) && mspans.insert(close, metavar_span)
757 });
758 let dspan = DelimSpan::from_pair(open, close);
759 TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone())
760 }
761 }
762}
763
764fn lookup_cur_matched<'a>(
771 ident: MacroRulesNormalizedIdent,
772 interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
773 repeats: &[(usize, usize)],
774) -> Option<&'a NamedMatch> {
775 interpolations.get(&ident).map(|mut matched| {
776 for &(idx, _) in repeats {
777 match matched {
778 MatchedSingle(_) => break,
779 MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
780 }
781 }
782
783 matched
784 })
785}
786
787#[derive(#[automatically_derived]
impl ::core::clone::Clone for LockstepIterSize {
#[inline]
fn clone(&self) -> LockstepIterSize {
match self {
LockstepIterSize::Unconstrained =>
LockstepIterSize::Unconstrained,
LockstepIterSize::Constraint(__self_0, __self_1) =>
LockstepIterSize::Constraint(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
LockstepIterSize::Contradiction(__self_0) =>
LockstepIterSize::Contradiction(::core::clone::Clone::clone(__self_0)),
}
}
}Clone)]
792enum LockstepIterSize {
793 Unconstrained,
796
797 Constraint(usize, MacroRulesNormalizedIdent),
800
801 Contradiction(String),
803}
804
805impl LockstepIterSize {
806 fn with(self, other: LockstepIterSize) -> LockstepIterSize {
811 match self {
812 LockstepIterSize::Unconstrained => other,
813 LockstepIterSize::Contradiction(_) => self,
814 LockstepIterSize::Constraint(l_len, l_id) => match other {
815 LockstepIterSize::Unconstrained => self,
816 LockstepIterSize::Contradiction(_) => other,
817 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
818 LockstepIterSize::Constraint(r_len, r_id) => {
819 let msg = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("meta-variable `{0}` repeats {1} time{2}, but `{3}` repeats {4} time{5}",
l_id, l_len, if l_len == 1 { "" } else { "s" }, r_id, r_len,
if r_len == 1 { "" } else { "s" }))
})format!(
820 "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
821 l_id,
822 l_len,
823 pluralize!(l_len),
824 r_id,
825 r_len,
826 pluralize!(r_len),
827 );
828 LockstepIterSize::Contradiction(msg)
829 }
830 },
831 }
832 }
833}
834
835fn lockstep_iter_size(
848 tree: &mbe::TokenTree,
849 interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
850 repeats: &[(usize, usize)],
851) -> LockstepIterSize {
852 use mbe::TokenTree;
853 match tree {
854 TokenTree::Delimited(.., delimited) => {
855 delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
856 size.with(lockstep_iter_size(tt, interpolations, repeats))
857 })
858 }
859 TokenTree::Sequence(_, seq) => {
860 seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
861 size.with(lockstep_iter_size(tt, interpolations, repeats))
862 })
863 }
864 TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl { name, .. } => {
865 let name = MacroRulesNormalizedIdent::new(*name);
866 match lookup_cur_matched(name, interpolations, repeats) {
867 Some(matched) => match matched {
868 MatchedSingle(_) => LockstepIterSize::Unconstrained,
869 MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
870 },
871 _ => LockstepIterSize::Unconstrained,
872 }
873 }
874 TokenTree::MetaVarExpr(_, expr) => {
875 expr.for_each_metavar(LockstepIterSize::Unconstrained, |lis, ident| {
876 lis.with(lockstep_iter_size(
877 &TokenTree::MetaVar(ident.span, *ident),
878 interpolations,
879 repeats,
880 ))
881 })
882 }
883 TokenTree::Token(..) => LockstepIterSize::Unconstrained,
884 }
885}
886
887fn count_repetitions<'dx>(
897 dcx: DiagCtxtHandle<'dx>,
898 depth_user: usize,
899 mut matched: &NamedMatch,
900 repeats: &[(usize, usize)],
901 sp: &DelimSpan,
902) -> PResult<'dx, usize> {
903 fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
906 match matched {
907 MatchedSingle(_) => Ok(1),
908 MatchedSeq(named_matches) => {
909 if depth_curr == depth_max {
910 Ok(named_matches.len())
911 } else {
912 named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum()
913 }
914 }
915 }
916 }
917
918 fn depth(counter: usize, matched: &NamedMatch) -> usize {
920 match matched {
921 MatchedSingle(_) => counter,
922 MatchedSeq(named_matches) => {
923 let rslt = counter + 1;
924 if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
925 }
926 }
927 }
928
929 let depth_max = depth(0, matched)
930 .checked_sub(1)
931 .and_then(|el| el.checked_sub(repeats.len()))
932 .unwrap_or_default();
933 if depth_user > depth_max {
934 return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count"));
935 }
936
937 for &(idx, _) in repeats {
944 if let MatchedSeq(ads) = matched {
945 matched = &ads[idx];
946 }
947 }
948
949 if let MatchedSingle(_) = matched {
950 return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() }));
951 }
952
953 count(depth_user, depth_max, matched)
954}
955
956fn matched_from_ident<'ctx, 'interp, 'rslt>(
958 dcx: DiagCtxtHandle<'ctx>,
959 ident: Ident,
960 interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
961) -> PResult<'ctx, &'rslt NamedMatch>
962where
963 'interp: 'rslt,
964{
965 let span = ident.span;
966 let key = MacroRulesNormalizedIdent::new(ident);
967 interp.get(&key).ok_or_else(|| dcx.create_err(MveUnrecognizedVar { span, key }))
968}
969
970fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> {
973 let msg = if max == 0 {
974 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("meta-variable expression `{0}` with depth parameter must be called inside of a macro repetition",
ty))
})format!(
975 "meta-variable expression `{ty}` with depth parameter \
976 must be called inside of a macro repetition"
977 )
978 } else {
979 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("depth parameter of meta-variable expression `{0}` must be less than {1}",
ty, max))
})format!(
980 "depth parameter of meta-variable expression `{ty}` \
981 must be less than {max}"
982 )
983 };
984 dcx.struct_span_err(span, msg)
985}
986
987fn extract_symbol_from_pnr<'a>(
989 dcx: DiagCtxtHandle<'a>,
990 pnr: &ParseNtResult,
991 span_err: Span,
992) -> PResult<'a, Symbol> {
993 match pnr {
994 ParseNtResult::Ident(nt_ident, is_raw) => {
995 if let IdentIsRaw::Yes = is_raw {
996 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
997 } else {
998 Ok(nt_ident.name)
999 }
1000 }
1001 ParseNtResult::Tt(TokenTree::Token(
1002 Token { kind: TokenKind::Ident(symbol, is_raw), .. },
1003 _,
1004 )) => {
1005 if let IdentIsRaw::Yes = is_raw {
1006 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
1007 } else {
1008 Ok(*symbol)
1009 }
1010 }
1011 ParseNtResult::Tt(TokenTree::Token(
1012 Token {
1013 kind: TokenKind::Literal(Lit { kind: LitKind::Str, symbol, suffix: None }),
1014 ..
1015 },
1016 _,
1017 )) => Ok(*symbol),
1018 ParseNtResult::Literal(expr)
1019 if let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = &expr.kind =>
1020 {
1021 Ok(*symbol)
1022 }
1023 ParseNtResult::Literal(expr)
1024 if let ExprKind::Lit(lit @ Lit { kind: LitKind::Integer, symbol, suffix }) =
1025 &expr.kind =>
1026 {
1027 if lit.is_semantic_float() {
1028 Err(dcx
1029 .struct_err("floats are not supported as metavariables of `${concat(..)}`")
1030 .with_span(span_err))
1031 } else if suffix.is_none() {
1032 Ok(*symbol)
1033 } else {
1034 Err(dcx
1035 .struct_err("integer metavariables of `${concat(..)}` must not be suffixed")
1036 .with_span(span_err))
1037 }
1038 }
1039 _ => Err(dcx
1040 .struct_err(
1041 "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`",
1042 )
1043 .with_note("currently only string and integer literals are supported")
1044 .with_span(span_err)),
1045 }
1046}