1use std::mem;
2
3use rustc_ast::token::{
4 self, Delimiter, IdentIsRaw, InvisibleOrigin, Lit, LitKind, MetaVarKind, Token, TokenKind,
5};
6use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
7use rustc_ast::{ExprKind, StmtKind, TyKind, UnOp};
8use rustc_data_structures::fx::FxHashMap;
9use rustc_errors::{Diag, DiagCtxtHandle, PResult, listify, pluralize};
10use rustc_parse::lexer::nfc_normalize;
11use rustc_parse::parser::ParseNtResult;
12use rustc_session::parse::ParseSess;
13use rustc_span::hygiene::{LocalExpnId, Transparency};
14use rustc_span::{
15 Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans,
16};
17use smallvec::{SmallVec, smallvec};
18
19use crate::errors::{
20 CountRepetitionMisplaced, MacroVarStillRepeating, MetaVarsDifSeqMatchers, MustRepeatOnce,
21 MveUnrecognizedVar, NoRepeatableVar, NoSyntaxVarsExprRepeat, VarNoTypo,
22 VarTypoSuggestionRepeatable, VarTypoSuggestionUnrepeatable, VarTypoSuggestionUnrepeatableLabel,
23};
24use crate::mbe::macro_parser::NamedMatch;
25use crate::mbe::macro_parser::NamedMatch::*;
26use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
27use crate::mbe::{self, KleeneOp, MetaVarExpr};
28
29struct TranscrCtx<'psess, 'itp> {
31 psess: &'psess ParseSess,
32
33 interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
35
36 marker: Marker,
38
39 stack: SmallVec<[Frame<'itp>; 1]>,
45
46 repeats: Vec<(usize, usize)>,
52
53 result: Vec<TokenTree>,
66
67 result_stack: Vec<Vec<TokenTree>>,
70}
71
72impl<'psess> TranscrCtx<'psess, '_> {
73 fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
75 let mut span = dspan.entire();
76 self.marker.mark_span(&mut span);
77 span
78 }
79}
80
81struct Marker {
83 expand_id: LocalExpnId,
84 transparency: Transparency,
85 cache: FxHashMap<SyntaxContext, SyntaxContext>,
86}
87
88impl Marker {
89 fn mark_span(&mut self, span: &mut Span) {
91 *span = span.map_ctxt(|ctxt| {
96 *self
97 .cache
98 .entry(ctxt)
99 .or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
100 });
101 }
102}
103
104struct Frame<'a> {
106 tts: &'a [mbe::TokenTree],
107 idx: usize,
108 kind: FrameKind,
109}
110
111enum FrameKind {
112 Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
113 Sequence { sep: Option<Token>, kleene_op: KleeneOp },
114}
115
116impl<'a> Frame<'a> {
117 fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
118 Frame {
119 tts: &src.tts,
120 idx: 0,
121 kind: FrameKind::Delimited { delim: src.delim, span, spacing },
122 }
123 }
124
125 fn new_sequence(
126 src: &'a mbe::SequenceRepetition,
127 sep: Option<Token>,
128 kleene_op: KleeneOp,
129 ) -> Frame<'a> {
130 Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
131 }
132}
133
134impl<'a> Iterator for Frame<'a> {
135 type Item = &'a mbe::TokenTree;
136
137 fn next(&mut self) -> Option<&'a mbe::TokenTree> {
138 let res = self.tts.get(self.idx);
139 self.idx += 1;
140 res
141 }
142}
143
144pub(super) fn transcribe<'a>(
165 psess: &'a ParseSess,
166 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
167 src: &mbe::Delimited,
168 src_span: DelimSpan,
169 transparency: Transparency,
170 expand_id: LocalExpnId,
171) -> PResult<'a, TokenStream> {
172 if src.tts.is_empty() {
174 return Ok(TokenStream::default());
175 }
176
177 let mut tscx = TranscrCtx {
178 psess,
179 interp,
180 marker: Marker { expand_id, transparency, cache: Default::default() },
181 repeats: Vec::new(),
182 stack: {
let count = 0usize + 1usize;
let mut vec = ::smallvec::SmallVec::new();
if count <= vec.inline_size() {
vec.push(Frame::new_delimited(src, src_span,
DelimSpacing::new(Spacing::Alone, Spacing::Alone)));
vec
} else {
::smallvec::SmallVec::from_vec(::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[Frame::new_delimited(src, src_span,
DelimSpacing::new(Spacing::Alone, Spacing::Alone))])))
}
}smallvec![Frame::new_delimited(
183 src,
184 src_span,
185 DelimSpacing::new(Spacing::Alone, Spacing::Alone)
186 )],
187 result: Vec::new(),
188 result_stack: Vec::new(),
189 };
190
191 loop {
192 let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
195 let frame = tscx.stack.last_mut().unwrap();
200 if let FrameKind::Sequence { sep, .. } = &frame.kind {
201 let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
202 *repeat_idx += 1;
203 if repeat_idx < repeat_len {
204 frame.idx = 0;
205 if let Some(sep) = sep {
206 tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
207 }
208 continue;
209 }
210 }
211
212 match tscx.stack.pop().unwrap().kind {
216 FrameKind::Sequence { .. } => {
218 tscx.repeats.pop();
219 }
220
221 FrameKind::Delimited { delim, span, mut spacing, .. } => {
225 if delim == Delimiter::Bracket {
228 spacing.close = Spacing::Alone;
229 }
230 if tscx.result_stack.is_empty() {
231 return Ok(TokenStream::new(tscx.result));
233 }
234
235 let tree =
237 TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
238 tscx.result = tscx.result_stack.pop().unwrap();
239 tscx.result.push(tree);
240 }
241 }
242 continue;
243 };
244
245 match tree {
248 seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
250 transcribe_sequence(&mut tscx, seq, seq_rep, interp)?;
251 }
252
253 &mbe::TokenTree::MetaVar(sp, original_ident) => {
255 transcribe_metavar(&mut tscx, sp, original_ident)?;
256 }
257
258 mbe::TokenTree::MetaVarExpr(dspan, expr) => {
260 transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
261 }
262
263 &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
269 tscx.marker.mark_span(&mut span.open);
270 tscx.marker.mark_span(&mut span.close);
271 tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
272 tscx.result_stack.push(mem::take(&mut tscx.result));
273 }
274
275 &mbe::TokenTree::Token(mut token) => {
278 tscx.marker.mark_span(&mut token.span);
279 if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
280 tscx.marker.mark_span(&mut ident.span);
281 }
282 let tt = TokenTree::Token(token, Spacing::Alone);
283 tscx.result.push(tt);
284 }
285
286 mbe::TokenTree::MetaVarDecl { .. } => {
::core::panicking::panic_fmt(format_args!("unexpected `TokenTree::MetaVarDecl`"));
}panic!("unexpected `TokenTree::MetaVarDecl`"),
288 }
289 }
290}
291
292fn transcribe_sequence<'tx, 'itp>(
294 tscx: &mut TranscrCtx<'tx, 'itp>,
295 seq: &mbe::TokenTree,
296 seq_rep: &'itp mbe::SequenceRepetition,
297 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
299) -> PResult<'tx, ()> {
300 let dcx = tscx.psess.dcx();
301
302 match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
306 LockstepIterSize::Unconstrained => {
307 let mut repeatables = Vec::new();
308 let mut non_repeatables = Vec::new();
309
310 #[allow(rustc::potential_query_instability)]
311 for (name, matcher) in interp.iter() {
312 if matcher.is_repeatable() {
313 repeatables.push(name);
314 } else {
315 non_repeatables.push(name);
316 }
317 }
318
319 let repeatable_names: Vec<Symbol> =
320 repeatables.iter().map(|&name| name.symbol()).collect();
321 let non_repeatable_names: Vec<Symbol> =
322 non_repeatables.iter().map(|&name| name.symbol()).collect();
323 let mut meta_vars = ::alloc::vec::Vec::new()vec![];
324 seq.meta_vars(&mut meta_vars);
325 let mut typo_repeatable = None;
326 let mut typo_unrepeatable = None;
327 let mut typo_unrepeatable_label = None;
328 let mut var_no_typo = None;
329 let mut no_repeatable_var = None;
330
331 for ident in meta_vars {
332 if let Some(name) = rustc_span::edit_distance::find_best_match_for_name(
333 &repeatable_names[..],
334 ident.name,
335 None,
336 ) {
337 typo_repeatable = Some(VarTypoSuggestionRepeatable { span: ident.span, name });
338 } else if let Some(name) = rustc_span::edit_distance::find_best_match_for_name(
339 &non_repeatable_names[..],
340 ident.name,
341 None,
342 ) {
343 typo_unrepeatable = Some(VarTypoSuggestionUnrepeatable { span: ident.span });
344 if let Some(&orig_ident) = non_repeatables.iter().find(|n| n.symbol() == name) {
345 typo_unrepeatable_label = Some(VarTypoSuggestionUnrepeatableLabel {
346 span: orig_ident.ident().span,
347 });
348 }
349 } else {
350 if !repeatable_names.is_empty()
351 && let Some(msg) = listify(&repeatable_names, |s| ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`${0}`", s))
})format!("`${s}`"))
352 {
353 var_no_typo = Some(VarNoTypo { span: ident.span, msg });
354 } else {
355 no_repeatable_var = Some(NoRepeatableVar { span: ident.span });
356 }
357 }
358 }
359 return Err(dcx.create_err(NoSyntaxVarsExprRepeat {
360 span: seq.span(),
361 typo_unrepeatable,
362 typo_repeatable,
363 typo_unrepeatable_label,
364 var_no_typo,
365 no_repeatable_var,
366 }));
367 }
368
369 LockstepIterSize::Contradiction(msg) => {
370 return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
375 }
376
377 LockstepIterSize::Constraint(len, _) => {
378 let mbe::TokenTree::Sequence(sp, seq) = seq else { ::core::panicking::panic("internal error: entered unreachable code")unreachable!() };
381
382 if len == 0 {
384 if seq.kleene.op == KleeneOp::OneOrMore {
385 return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
389 }
390 } else {
391 tscx.repeats.push((0, len));
394
395 tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator.clone(), seq.kleene.op));
399 }
400 }
401 }
402
403 Ok(())
404}
405
406fn transcribe_metavar<'tx>(
423 tscx: &mut TranscrCtx<'tx, '_>,
424 mut sp: Span,
425 mut original_ident: Ident,
426) -> PResult<'tx, ()> {
427 let dcx = tscx.psess.dcx();
428
429 let ident = MacroRulesNormalizedIdent::new(original_ident);
430 let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
431 tscx.marker.mark_span(&mut sp);
434 tscx.marker.mark_span(&mut original_ident.span);
435 tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
436 tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
437 return Ok(());
438 };
439
440 let MatchedSingle(pnr) = cur_matched else {
441 return Err(dcx.create_err(MacroVarStillRepeating { span: sp, ident }));
443 };
444
445 transcribe_pnr(tscx, sp, pnr)
446}
447
448fn transcribe_pnr<'tx>(
449 tscx: &mut TranscrCtx<'tx, '_>,
450 mut sp: Span,
451 pnr: &ParseNtResult,
452) -> PResult<'tx, ()> {
453 let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
458 if stream.len() == 1 {
459 let tree = stream.iter().next().unwrap();
460 if let TokenTree::Delimited(_, _, delim, inner) = tree
461 && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
462 && mv_kind == *mvk
463 {
464 stream = inner.clone();
465 }
466 }
467
468 tscx.marker.mark_span(&mut sp);
471 with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
472 TokenTree::Delimited(
475 DelimSpan::from_single(sp),
476 DelimSpacing::new(Spacing::Alone, Spacing::Alone),
477 Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
478 stream,
479 )
480 };
481
482 let tt = match pnr {
483 ParseNtResult::Tt(tt) => {
484 maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
489 }
490 ParseNtResult::Ident(ident, is_raw) => {
491 tscx.marker.mark_span(&mut sp);
492 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
493 let kind = token::NtIdent(*ident, *is_raw);
494 TokenTree::token_alone(kind, sp)
495 }
496 ParseNtResult::Lifetime(ident, is_raw) => {
497 tscx.marker.mark_span(&mut sp);
498 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
499 let kind = token::NtLifetime(*ident, *is_raw);
500 TokenTree::token_alone(kind, sp)
501 }
502 ParseNtResult::Item(item) => {
503 mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
504 }
505 ParseNtResult::Block(block) => {
506 mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
507 }
508 ParseNtResult::Stmt(stmt) => {
509 let stream = if let StmtKind::Empty = stmt.kind {
510 TokenStream::token_alone(token::Semi, stmt.span)
512 } else {
513 TokenStream::from_ast(stmt)
514 };
515 mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
516 }
517 ParseNtResult::Pat(pat, pat_kind) => {
518 mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
519 }
520 ParseNtResult::Expr(expr, kind) => {
521 let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
522 ExprKind::Lit(_) => (true, true),
523 ExprKind::Unary(UnOp::Neg, e) if #[allow(non_exhaustive_omitted_patterns)] match &e.kind {
ExprKind::Lit(_) => true,
_ => false,
}matches!(&e.kind, ExprKind::Lit(_)) => {
524 (true, false)
525 }
526 _ => (false, false),
527 };
528 mk_delimited(
529 expr.span,
530 MetaVarKind::Expr {
531 kind: *kind,
532 can_begin_literal_maybe_minus,
533 can_begin_string_literal,
534 },
535 TokenStream::from_ast(expr),
536 )
537 }
538 ParseNtResult::Literal(lit) => {
539 mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
540 }
541 ParseNtResult::Ty(ty) => {
542 let is_path = #[allow(non_exhaustive_omitted_patterns)] match &ty.kind {
TyKind::Path(None, _path) => true,
_ => false,
}matches!(&ty.kind, TyKind::Path(None, _path));
543 mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
544 }
545 ParseNtResult::Meta(attr_item) => {
546 let has_meta_form = attr_item.meta_kind().is_some();
547 mk_delimited(
548 attr_item.span(),
549 MetaVarKind::Meta { has_meta_form },
550 TokenStream::from_ast(attr_item),
551 )
552 }
553 ParseNtResult::Path(path) => {
554 mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
555 }
556 ParseNtResult::Vis(vis) => {
557 mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
558 }
559 };
560
561 tscx.result.push(tt);
562 Ok(())
563}
564
565fn transcribe_metavar_expr<'tx>(
567 tscx: &mut TranscrCtx<'tx, '_>,
568 dspan: DelimSpan,
569 expr: &MetaVarExpr,
570) -> PResult<'tx, ()> {
571 let dcx = tscx.psess.dcx();
572 let tt = match *expr {
573 MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
574 MetaVarExpr::Count(original_ident, depth) => {
575 let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
576 let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
577 TokenTree::token_alone(
578 TokenKind::lit(token::Integer, sym::integer(count), None),
579 tscx.visited_dspan(dspan),
580 )
581 }
582 MetaVarExpr::Ignore(original_ident) => {
583 let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
585 return Ok(());
586 }
587 MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
588 Some((index, _)) => TokenTree::token_alone(
589 TokenKind::lit(token::Integer, sym::integer(*index), None),
590 tscx.visited_dspan(dspan),
591 ),
592 None => {
593 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
594 }
595 },
596 MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
597 Some((_, length)) => TokenTree::token_alone(
598 TokenKind::lit(token::Integer, sym::integer(*length), None),
599 tscx.visited_dspan(dspan),
600 ),
601 None => {
602 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
603 }
604 },
605 };
606 tscx.result.push(tt);
607 Ok(())
608}
609
610fn metavar_expr_concat<'tx>(
612 tscx: &mut TranscrCtx<'tx, '_>,
613 dspan: DelimSpan,
614 elements: &[MetaVarExprConcatElem],
615) -> PResult<'tx, TokenTree> {
616 let dcx = tscx.psess.dcx();
617 let mut concatenated = String::new();
618 for element in elements.into_iter() {
619 let symbol = match element {
620 MetaVarExprConcatElem::Ident(elem) => elem.name,
621 MetaVarExprConcatElem::Literal(elem) => *elem,
622 MetaVarExprConcatElem::Var(ident) => {
623 let key = MacroRulesNormalizedIdent::new(*ident);
624 match lookup_cur_matched(key, tscx.interp, &tscx.repeats) {
625 Some(NamedMatch::MatchedSingle(pnr)) => {
626 extract_symbol_from_pnr(dcx, pnr, ident.span)?
627 }
628 Some(NamedMatch::MatchedSeq(..)) => {
629 return Err(dcx.struct_span_err(
630 ident.span,
631 "`${concat(...)}` variable is still repeating at this depth",
632 ));
633 }
634 None => {
635 return Err(dcx.create_err(MveUnrecognizedVar { span: ident.span, key }));
636 }
637 }
638 }
639 };
640 concatenated.push_str(symbol.as_str());
641 }
642 let symbol = nfc_normalize(&concatenated);
643 let concatenated_span = tscx.visited_dspan(dspan);
644 if !rustc_lexer::is_ident(symbol.as_str()) {
645 return Err(dcx.struct_span_err(
646 concatenated_span,
647 "`${concat(..)}` is not generating a valid identifier",
648 ));
649 }
650 tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
651
652 Ok(TokenTree::Token(
656 Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
657 Spacing::Alone,
658 ))
659}
660
661fn maybe_use_metavar_location(
692 psess: &ParseSess,
693 stack: &[Frame<'_>],
694 mut metavar_span: Span,
695 orig_tt: &TokenTree,
696 marker: &mut Marker,
697) -> TokenTree {
698 let undelimited_seq = #[allow(non_exhaustive_omitted_patterns)] match stack.last() {
Some(Frame {
tts: [_],
kind: FrameKind::Sequence {
sep: None, kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
..
}, .. }) => true,
_ => false,
}matches!(
699 stack.last(),
700 Some(Frame {
701 tts: [_],
702 kind: FrameKind::Sequence {
703 sep: None,
704 kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
705 ..
706 },
707 ..
708 })
709 );
710 if undelimited_seq {
711 return orig_tt.clone();
713 }
714
715 marker.mark_span(&mut metavar_span);
716 let no_collision = match orig_tt {
717 TokenTree::Token(token, ..) => {
718 with_metavar_spans(|mspans| mspans.insert(token.span, metavar_span))
719 }
720 TokenTree::Delimited(dspan, ..) => with_metavar_spans(|mspans| {
721 mspans.insert(dspan.open, metavar_span)
722 && mspans.insert(dspan.close, metavar_span)
723 && mspans.insert(dspan.entire(), metavar_span)
724 }),
725 };
726 if no_collision || psess.source_map().is_imported(metavar_span) {
727 return orig_tt.clone();
728 }
729
730 match orig_tt {
733 TokenTree::Token(Token { kind, span }, spacing) => {
734 let span = metavar_span.with_ctxt(span.ctxt());
735 with_metavar_spans(|mspans| mspans.insert(span, metavar_span));
736 TokenTree::Token(Token { kind: kind.clone(), span }, *spacing)
737 }
738 TokenTree::Delimited(dspan, dspacing, delimiter, tts) => {
739 let open = metavar_span.with_ctxt(dspan.open.ctxt());
740 let close = metavar_span.with_ctxt(dspan.close.ctxt());
741 with_metavar_spans(|mspans| {
742 mspans.insert(open, metavar_span) && mspans.insert(close, metavar_span)
743 });
744 let dspan = DelimSpan::from_pair(open, close);
745 TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone())
746 }
747 }
748}
749
750fn lookup_cur_matched<'a>(
757 ident: MacroRulesNormalizedIdent,
758 interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
759 repeats: &[(usize, usize)],
760) -> Option<&'a NamedMatch> {
761 interpolations.get(&ident).map(|mut matched| {
762 for &(idx, _) in repeats {
763 match matched {
764 MatchedSingle(_) => break,
765 MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
766 }
767 }
768
769 matched
770 })
771}
772
773#[derive(#[automatically_derived]
impl ::core::clone::Clone for LockstepIterSize {
#[inline]
fn clone(&self) -> LockstepIterSize {
match self {
LockstepIterSize::Unconstrained =>
LockstepIterSize::Unconstrained,
LockstepIterSize::Constraint(__self_0, __self_1) =>
LockstepIterSize::Constraint(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
LockstepIterSize::Contradiction(__self_0) =>
LockstepIterSize::Contradiction(::core::clone::Clone::clone(__self_0)),
}
}
}Clone)]
778enum LockstepIterSize {
779 Unconstrained,
782
783 Constraint(usize, MacroRulesNormalizedIdent),
786
787 Contradiction(String),
789}
790
791impl LockstepIterSize {
792 fn with(self, other: LockstepIterSize) -> LockstepIterSize {
797 match self {
798 LockstepIterSize::Unconstrained => other,
799 LockstepIterSize::Contradiction(_) => self,
800 LockstepIterSize::Constraint(l_len, l_id) => match other {
801 LockstepIterSize::Unconstrained => self,
802 LockstepIterSize::Contradiction(_) => other,
803 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
804 LockstepIterSize::Constraint(r_len, r_id) => {
805 let msg = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("meta-variable `{0}` repeats {1} time{2}, but `{3}` repeats {4} time{5}",
l_id, l_len, if l_len == 1 { "" } else { "s" }, r_id, r_len,
if r_len == 1 { "" } else { "s" }))
})format!(
806 "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
807 l_id,
808 l_len,
809 pluralize!(l_len),
810 r_id,
811 r_len,
812 pluralize!(r_len),
813 );
814 LockstepIterSize::Contradiction(msg)
815 }
816 },
817 }
818 }
819}
820
821fn lockstep_iter_size(
834 tree: &mbe::TokenTree,
835 interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
836 repeats: &[(usize, usize)],
837) -> LockstepIterSize {
838 use mbe::TokenTree;
839 match tree {
840 TokenTree::Delimited(.., delimited) => {
841 delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
842 size.with(lockstep_iter_size(tt, interpolations, repeats))
843 })
844 }
845 TokenTree::Sequence(_, seq) => {
846 seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
847 size.with(lockstep_iter_size(tt, interpolations, repeats))
848 })
849 }
850 TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl { name, .. } => {
851 let name = MacroRulesNormalizedIdent::new(*name);
852 match lookup_cur_matched(name, interpolations, repeats) {
853 Some(matched) => match matched {
854 MatchedSingle(_) => LockstepIterSize::Unconstrained,
855 MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
856 },
857 _ => LockstepIterSize::Unconstrained,
858 }
859 }
860 TokenTree::MetaVarExpr(_, expr) => {
861 expr.for_each_metavar(LockstepIterSize::Unconstrained, |lis, ident| {
862 lis.with(lockstep_iter_size(
863 &TokenTree::MetaVar(ident.span, *ident),
864 interpolations,
865 repeats,
866 ))
867 })
868 }
869 TokenTree::Token(..) => LockstepIterSize::Unconstrained,
870 }
871}
872
873fn count_repetitions<'dx>(
883 dcx: DiagCtxtHandle<'dx>,
884 depth_user: usize,
885 mut matched: &NamedMatch,
886 repeats: &[(usize, usize)],
887 sp: &DelimSpan,
888) -> PResult<'dx, usize> {
889 fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
892 match matched {
893 MatchedSingle(_) => Ok(1),
894 MatchedSeq(named_matches) => {
895 if depth_curr == depth_max {
896 Ok(named_matches.len())
897 } else {
898 named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum()
899 }
900 }
901 }
902 }
903
904 fn depth(counter: usize, matched: &NamedMatch) -> usize {
906 match matched {
907 MatchedSingle(_) => counter,
908 MatchedSeq(named_matches) => {
909 let rslt = counter + 1;
910 if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
911 }
912 }
913 }
914
915 let depth_max = depth(0, matched)
916 .checked_sub(1)
917 .and_then(|el| el.checked_sub(repeats.len()))
918 .unwrap_or_default();
919 if depth_user > depth_max {
920 return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count"));
921 }
922
923 for &(idx, _) in repeats {
930 if let MatchedSeq(ads) = matched {
931 matched = &ads[idx];
932 }
933 }
934
935 if let MatchedSingle(_) = matched {
936 return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() }));
937 }
938
939 count(depth_user, depth_max, matched)
940}
941
942fn matched_from_ident<'ctx, 'interp, 'rslt>(
944 dcx: DiagCtxtHandle<'ctx>,
945 ident: Ident,
946 interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
947) -> PResult<'ctx, &'rslt NamedMatch>
948where
949 'interp: 'rslt,
950{
951 let span = ident.span;
952 let key = MacroRulesNormalizedIdent::new(ident);
953 interp.get(&key).ok_or_else(|| dcx.create_err(MveUnrecognizedVar { span, key }))
954}
955
956fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> {
959 let msg = if max == 0 {
960 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("meta-variable expression `{0}` with depth parameter must be called inside of a macro repetition",
ty))
})format!(
961 "meta-variable expression `{ty}` with depth parameter \
962 must be called inside of a macro repetition"
963 )
964 } else {
965 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("depth parameter of meta-variable expression `{0}` must be less than {1}",
ty, max))
})format!(
966 "depth parameter of meta-variable expression `{ty}` \
967 must be less than {max}"
968 )
969 };
970 dcx.struct_span_err(span, msg)
971}
972
973fn extract_symbol_from_pnr<'a>(
975 dcx: DiagCtxtHandle<'a>,
976 pnr: &ParseNtResult,
977 span_err: Span,
978) -> PResult<'a, Symbol> {
979 match pnr {
980 ParseNtResult::Ident(nt_ident, is_raw) => {
981 if let IdentIsRaw::Yes = is_raw {
982 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
983 } else {
984 Ok(nt_ident.name)
985 }
986 }
987 ParseNtResult::Tt(TokenTree::Token(
988 Token { kind: TokenKind::Ident(symbol, is_raw), .. },
989 _,
990 )) => {
991 if let IdentIsRaw::Yes = is_raw {
992 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
993 } else {
994 Ok(*symbol)
995 }
996 }
997 ParseNtResult::Tt(TokenTree::Token(
998 Token {
999 kind: TokenKind::Literal(Lit { kind: LitKind::Str, symbol, suffix: None }),
1000 ..
1001 },
1002 _,
1003 )) => Ok(*symbol),
1004 ParseNtResult::Literal(expr)
1005 if let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = &expr.kind =>
1006 {
1007 Ok(*symbol)
1008 }
1009 ParseNtResult::Literal(expr)
1010 if let ExprKind::Lit(lit @ Lit { kind: LitKind::Integer, symbol, suffix }) =
1011 &expr.kind =>
1012 {
1013 if lit.is_semantic_float() {
1014 Err(dcx
1015 .struct_err("floats are not supported as metavariables of `${concat(..)}`")
1016 .with_span(span_err))
1017 } else if suffix.is_none() {
1018 Ok(*symbol)
1019 } else {
1020 Err(dcx
1021 .struct_err("integer metavariables of `${concat(..)}` must not be suffixed")
1022 .with_span(span_err))
1023 }
1024 }
1025 _ => Err(dcx
1026 .struct_err(
1027 "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`",
1028 )
1029 .with_note("currently only string and integer literals are supported")
1030 .with_span(span_err)),
1031 }
1032}