1use std::borrow::Cow;
2use std::collections::hash_map::Entry;
3use std::sync::Arc;
4use std::{mem, slice};
5
6use ast::token::IdentIsRaw;
7use rustc_ast::token::NtPatKind::*;
8use rustc_ast::token::TokenKind::*;
9use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind};
10use rustc_ast::tokenstream::{self, DelimSpan, TokenStream};
11use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
12use rustc_ast_pretty::pprust;
13use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
14use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan};
15use rustc_feature::Features;
16use rustc_hir as hir;
17use rustc_hir::attrs::AttributeKind;
18use rustc_hir::def::MacroKinds;
19use rustc_hir::find_attr;
20use rustc_lint_defs::builtin::{
21 RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
22};
23use rustc_parse::exp;
24use rustc_parse::parser::{Parser, Recovery};
25use rustc_session::Session;
26use rustc_session::parse::{ParseSess, feature_err};
27use rustc_span::edition::Edition;
28use rustc_span::hygiene::Transparency;
29use rustc_span::{Ident, Span, Symbol, kw, sym};
30use tracing::{debug, instrument, trace, trace_span};
31
32use super::diagnostics::{FailedMacro, failed_to_match_macro};
33use super::macro_parser::{NamedMatches, NamedParseResult};
34use super::{SequenceRepetition, diagnostics};
35use crate::base::{
36 AttrProcMacro, BangProcMacro, DummyResult, ExpandResult, ExtCtxt, MacResult,
37 MacroExpanderResult, SyntaxExtension, SyntaxExtensionKind, TTMacroExpander,
38};
39use crate::errors;
40use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
41use crate::mbe::macro_check::check_meta_variables;
42use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
43use crate::mbe::quoted::{RulePart, parse_one_tt};
44use crate::mbe::transcribe::transcribe;
45use crate::mbe::{self, KleeneOp};
46
47pub(crate) struct ParserAnyMacro<'a> {
48 parser: Parser<'a>,
49
50 site_span: Span,
52 macro_ident: Ident,
54 lint_node_id: NodeId,
55 is_trailing_mac: bool,
56 arm_span: Span,
57 is_local: bool,
59}
60
61impl<'a> ParserAnyMacro<'a> {
62 pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
63 let ParserAnyMacro {
64 site_span,
65 macro_ident,
66 ref mut parser,
67 lint_node_id,
68 arm_span,
69 is_trailing_mac,
70 is_local,
71 } = *self;
72 let snapshot = &mut parser.create_snapshot_for_diagnostic();
73 let fragment = match parse_ast_fragment(parser, kind) {
74 Ok(f) => f,
75 Err(err) => {
76 let guar = diagnostics::emit_frag_parse_err(
77 err, parser, snapshot, site_span, arm_span, kind,
78 );
79 return kind.dummy(site_span, guar);
80 }
81 };
82
83 if kind == AstFragmentKind::Expr && parser.token == token::Semi {
87 if is_local {
88 parser.psess.buffer_lint(
89 SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
90 parser.token.span,
91 lint_node_id,
92 errors::TrailingMacro { is_trailing: is_trailing_mac, name: macro_ident },
93 );
94 }
95 parser.bump();
96 }
97
98 let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
100 ensure_complete_parse(parser, &path, kind.name(), site_span);
101 fragment
102 }
103
104 #[instrument(skip(cx, tts))]
105 pub(crate) fn from_tts<'cx>(
106 cx: &'cx mut ExtCtxt<'a>,
107 tts: TokenStream,
108 site_span: Span,
109 arm_span: Span,
110 is_local: bool,
111 macro_ident: Ident,
112 ) -> Self {
113 Self {
114 parser: Parser::new(&cx.sess.psess, tts, None),
115
116 site_span,
120 macro_ident,
121 lint_node_id: cx.current_expansion.lint_node_id,
122 is_trailing_mac: cx.current_expansion.is_trailing_mac,
123 arm_span,
124 is_local,
125 }
126 }
127}
128
129pub(super) enum MacroRule {
130 Func { lhs: Vec<MatcherLoc>, lhs_span: Span, rhs: mbe::TokenTree },
132 Attr {
134 args: Vec<MatcherLoc>,
135 args_span: Span,
136 body: Vec<MatcherLoc>,
137 body_span: Span,
138 rhs: mbe::TokenTree,
139 },
140 Derive { body: Vec<MatcherLoc>, body_span: Span, rhs: mbe::TokenTree },
142}
143
144pub struct MacroRulesMacroExpander {
145 node_id: NodeId,
146 name: Ident,
147 span: Span,
148 transparency: Transparency,
149 kinds: MacroKinds,
150 rules: Vec<MacroRule>,
151}
152
153impl MacroRulesMacroExpander {
154 pub fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, MultiSpan)> {
155 let (span, rhs) = match self.rules[rule_i] {
157 MacroRule::Func { lhs_span, ref rhs, .. } => (MultiSpan::from_span(lhs_span), rhs),
158 MacroRule::Attr { args_span, body_span, ref rhs, .. } => {
159 (MultiSpan::from_spans(vec![args_span, body_span]), rhs)
160 }
161 MacroRule::Derive { body_span, ref rhs, .. } => (MultiSpan::from_span(body_span), rhs),
162 };
163 if has_compile_error_macro(rhs) { None } else { Some((&self.name, span)) }
164 }
165
166 pub fn kinds(&self) -> MacroKinds {
167 self.kinds
168 }
169
170 pub fn expand_derive(
171 &self,
172 cx: &mut ExtCtxt<'_>,
173 sp: Span,
174 body: &TokenStream,
175 ) -> Result<TokenStream, ErrorGuaranteed> {
176 let Self { name, ref rules, node_id, .. } = *self;
179 let psess = &cx.sess.psess;
180
181 if cx.trace_macros() {
182 let msg = format!("expanding `#[derive({name})] {}`", pprust::tts_to_string(body));
183 trace_macros_note(&mut cx.expansions, sp, msg);
184 }
185
186 match try_match_macro_derive(psess, name, body, rules, &mut NoopTracker) {
187 Ok((rule_index, rule, named_matches)) => {
188 let MacroRule::Derive { rhs, .. } = rule else {
189 panic!("try_match_macro_derive returned non-derive rule");
190 };
191 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
192 cx.dcx().span_bug(sp, "malformed macro derive rhs");
193 };
194
195 let id = cx.current_expansion.id;
196 let tts = transcribe(psess, &named_matches, rhs, *rhs_span, self.transparency, id)
197 .map_err(|e| e.emit())?;
198
199 if cx.trace_macros() {
200 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
201 trace_macros_note(&mut cx.expansions, sp, msg);
202 }
203
204 if is_defined_in_current_crate(node_id) {
205 cx.resolver.record_macro_rule_usage(node_id, rule_index);
206 }
207
208 Ok(tts)
209 }
210 Err(CanRetry::No(guar)) => Err(guar),
211 Err(CanRetry::Yes) => {
212 let (_, guar) = failed_to_match_macro(
213 cx.psess(),
214 sp,
215 self.span,
216 name,
217 FailedMacro::Derive,
218 body,
219 rules,
220 );
221 cx.macro_error_and_trace_macros_diag();
222 Err(guar)
223 }
224 }
225 }
226}
227
228impl TTMacroExpander for MacroRulesMacroExpander {
229 fn expand<'cx>(
230 &self,
231 cx: &'cx mut ExtCtxt<'_>,
232 sp: Span,
233 input: TokenStream,
234 ) -> MacroExpanderResult<'cx> {
235 ExpandResult::Ready(expand_macro(
236 cx,
237 sp,
238 self.span,
239 self.node_id,
240 self.name,
241 self.transparency,
242 input,
243 &self.rules,
244 ))
245 }
246}
247
248impl AttrProcMacro for MacroRulesMacroExpander {
249 fn expand(
250 &self,
251 cx: &mut ExtCtxt<'_>,
252 sp: Span,
253 args: TokenStream,
254 body: TokenStream,
255 ) -> Result<TokenStream, ErrorGuaranteed> {
256 expand_macro_attr(
257 cx,
258 sp,
259 self.span,
260 self.node_id,
261 self.name,
262 self.transparency,
263 args,
264 body,
265 &self.rules,
266 )
267 }
268}
269
270struct DummyBang(ErrorGuaranteed);
271
272impl BangProcMacro for DummyBang {
273 fn expand<'cx>(
274 &self,
275 _: &'cx mut ExtCtxt<'_>,
276 _: Span,
277 _: TokenStream,
278 ) -> Result<TokenStream, ErrorGuaranteed> {
279 Err(self.0)
280 }
281}
282
283fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
284 let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
285 cx_expansions.entry(sp).or_default().push(message);
286}
287
288pub(super) trait Tracker<'matcher> {
289 type Failure;
291
292 fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
296
297 fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
299
300 fn after_arm(&mut self, _in_body: bool, _result: &NamedParseResult<Self::Failure>) {}
303
304 fn description() -> &'static str;
306
307 fn recovery() -> Recovery {
308 Recovery::Forbidden
309 }
310}
311
312pub(super) struct NoopTracker;
315
316impl<'matcher> Tracker<'matcher> for NoopTracker {
317 type Failure = ();
318
319 fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
320
321 fn description() -> &'static str {
322 "none"
323 }
324}
325
326#[instrument(skip(cx, transparency, arg, rules))]
328fn expand_macro<'cx>(
329 cx: &'cx mut ExtCtxt<'_>,
330 sp: Span,
331 def_span: Span,
332 node_id: NodeId,
333 name: Ident,
334 transparency: Transparency,
335 arg: TokenStream,
336 rules: &[MacroRule],
337) -> Box<dyn MacResult + 'cx> {
338 let psess = &cx.sess.psess;
339
340 if cx.trace_macros() {
341 let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(&arg));
342 trace_macros_note(&mut cx.expansions, sp, msg);
343 }
344
345 let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
347
348 match try_success_result {
349 Ok((rule_index, rule, named_matches)) => {
350 let MacroRule::Func { rhs, .. } = rule else {
351 panic!("try_match_macro returned non-func rule");
352 };
353 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
354 cx.dcx().span_bug(sp, "malformed macro rhs");
355 };
356 let arm_span = rhs_span.entire();
357
358 let id = cx.current_expansion.id;
360 let tts = match transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id) {
361 Ok(tts) => tts,
362 Err(err) => {
363 let guar = err.emit();
364 return DummyResult::any(arm_span, guar);
365 }
366 };
367
368 if cx.trace_macros() {
369 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
370 trace_macros_note(&mut cx.expansions, sp, msg);
371 }
372
373 let is_local = is_defined_in_current_crate(node_id);
374 if is_local {
375 cx.resolver.record_macro_rule_usage(node_id, rule_index);
376 }
377
378 Box::new(ParserAnyMacro::from_tts(cx, tts, sp, arm_span, is_local, name))
380 }
381 Err(CanRetry::No(guar)) => {
382 debug!("Will not retry matching as an error was emitted already");
383 DummyResult::any(sp, guar)
384 }
385 Err(CanRetry::Yes) => {
386 let (span, guar) = failed_to_match_macro(
388 cx.psess(),
389 sp,
390 def_span,
391 name,
392 FailedMacro::Func,
393 &arg,
394 rules,
395 );
396 cx.macro_error_and_trace_macros_diag();
397 DummyResult::any(span, guar)
398 }
399 }
400}
401
402#[instrument(skip(cx, transparency, args, body, rules))]
404fn expand_macro_attr(
405 cx: &mut ExtCtxt<'_>,
406 sp: Span,
407 def_span: Span,
408 node_id: NodeId,
409 name: Ident,
410 transparency: Transparency,
411 args: TokenStream,
412 body: TokenStream,
413 rules: &[MacroRule],
414) -> Result<TokenStream, ErrorGuaranteed> {
415 let psess = &cx.sess.psess;
416 let is_local = node_id != DUMMY_NODE_ID;
419
420 if cx.trace_macros() {
421 let msg = format!(
422 "expanding `#[{name}({})] {}`",
423 pprust::tts_to_string(&args),
424 pprust::tts_to_string(&body),
425 );
426 trace_macros_note(&mut cx.expansions, sp, msg);
427 }
428
429 match try_match_macro_attr(psess, name, &args, &body, rules, &mut NoopTracker) {
431 Ok((i, rule, named_matches)) => {
432 let MacroRule::Attr { rhs, .. } = rule else {
433 panic!("try_macro_match_attr returned non-attr rule");
434 };
435 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
436 cx.dcx().span_bug(sp, "malformed macro rhs");
437 };
438
439 let id = cx.current_expansion.id;
440 let tts = transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id)
441 .map_err(|e| e.emit())?;
442
443 if cx.trace_macros() {
444 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
445 trace_macros_note(&mut cx.expansions, sp, msg);
446 }
447
448 if is_local {
449 cx.resolver.record_macro_rule_usage(node_id, i);
450 }
451
452 Ok(tts)
453 }
454 Err(CanRetry::No(guar)) => Err(guar),
455 Err(CanRetry::Yes) => {
456 let (_, guar) = failed_to_match_macro(
458 cx.psess(),
459 sp,
460 def_span,
461 name,
462 FailedMacro::Attr(&args),
463 &body,
464 rules,
465 );
466 cx.trace_macros_diag();
467 Err(guar)
468 }
469 }
470}
471
472pub(super) enum CanRetry {
473 Yes,
474 No(ErrorGuaranteed),
476}
477
478#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
482pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
483 psess: &ParseSess,
484 name: Ident,
485 arg: &TokenStream,
486 rules: &'matcher [MacroRule],
487 track: &mut T,
488) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
489 let parser = parser_from_cx(psess, arg.clone(), T::recovery());
509 let mut tt_parser = TtParser::new(name);
511 for (i, rule) in rules.iter().enumerate() {
512 let MacroRule::Func { lhs, .. } = rule else { continue };
513 let _tracing_span = trace_span!("Matching arm", %i);
514
515 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
520
521 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track);
522
523 track.after_arm(true, &result);
524
525 match result {
526 Success(named_matches) => {
527 debug!("Parsed arm successfully");
528 psess.gated_spans.merge(gated_spans_snapshot);
531
532 return Ok((i, rule, named_matches));
533 }
534 Failure(_) => {
535 trace!("Failed to match arm, trying the next one");
536 }
538 Error(_, _) => {
539 debug!("Fatal error occurred during matching");
540 return Err(CanRetry::Yes);
542 }
543 ErrorReported(guarantee) => {
544 debug!("Fatal error occurred and was reported during matching");
545 return Err(CanRetry::No(guarantee));
547 }
548 }
549
550 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
553 }
554
555 Err(CanRetry::Yes)
556}
557
558#[instrument(level = "debug", skip(psess, attr_args, attr_body, rules, track), fields(tracking = %T::description()))]
562pub(super) fn try_match_macro_attr<'matcher, T: Tracker<'matcher>>(
563 psess: &ParseSess,
564 name: Ident,
565 attr_args: &TokenStream,
566 attr_body: &TokenStream,
567 rules: &'matcher [MacroRule],
568 track: &mut T,
569) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
570 let args_parser = parser_from_cx(psess, attr_args.clone(), T::recovery());
572 let body_parser = parser_from_cx(psess, attr_body.clone(), T::recovery());
573 let mut tt_parser = TtParser::new(name);
574 for (i, rule) in rules.iter().enumerate() {
575 let MacroRule::Attr { args, body, .. } = rule else { continue };
576
577 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
578
579 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&args_parser), args, track);
580 track.after_arm(false, &result);
581
582 let mut named_matches = match result {
583 Success(named_matches) => named_matches,
584 Failure(_) => {
585 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
586 continue;
587 }
588 Error(_, _) => return Err(CanRetry::Yes),
589 ErrorReported(guar) => return Err(CanRetry::No(guar)),
590 };
591
592 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track);
593 track.after_arm(true, &result);
594
595 match result {
596 Success(body_named_matches) => {
597 psess.gated_spans.merge(gated_spans_snapshot);
598 #[allow(rustc::potential_query_instability)]
599 named_matches.extend(body_named_matches);
600 return Ok((i, rule, named_matches));
601 }
602 Failure(_) => {
603 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut())
604 }
605 Error(_, _) => return Err(CanRetry::Yes),
606 ErrorReported(guar) => return Err(CanRetry::No(guar)),
607 }
608 }
609
610 Err(CanRetry::Yes)
611}
612
613#[instrument(level = "debug", skip(psess, body, rules, track), fields(tracking = %T::description()))]
617pub(super) fn try_match_macro_derive<'matcher, T: Tracker<'matcher>>(
618 psess: &ParseSess,
619 name: Ident,
620 body: &TokenStream,
621 rules: &'matcher [MacroRule],
622 track: &mut T,
623) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
624 let body_parser = parser_from_cx(psess, body.clone(), T::recovery());
626 let mut tt_parser = TtParser::new(name);
627 for (i, rule) in rules.iter().enumerate() {
628 let MacroRule::Derive { body, .. } = rule else { continue };
629
630 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
631
632 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track);
633 track.after_arm(true, &result);
634
635 match result {
636 Success(named_matches) => {
637 psess.gated_spans.merge(gated_spans_snapshot);
638 return Ok((i, rule, named_matches));
639 }
640 Failure(_) => {
641 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut())
642 }
643 Error(_, _) => return Err(CanRetry::Yes),
644 ErrorReported(guar) => return Err(CanRetry::No(guar)),
645 }
646 }
647
648 Err(CanRetry::Yes)
649}
650
651pub fn compile_declarative_macro(
653 sess: &Session,
654 features: &Features,
655 macro_def: &ast::MacroDef,
656 ident: Ident,
657 attrs: &[hir::Attribute],
658 span: Span,
659 node_id: NodeId,
660 edition: Edition,
661) -> (SyntaxExtension, usize) {
662 let mk_syn_ext = |kind| {
663 let is_local = is_defined_in_current_crate(node_id);
664 SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local)
665 };
666 let dummy_syn_ext =
667 |guar| (mk_syn_ext(SyntaxExtensionKind::Bang(Arc::new(DummyBang(guar)))), 0);
668
669 let macro_rules = macro_def.macro_rules;
670 let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
671
672 let body = macro_def.body.tokens.clone();
673 let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
674
675 let mut guar = None;
678 let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
679
680 let mut kinds = MacroKinds::empty();
681 let mut rules = Vec::new();
682
683 while p.token != token::Eof {
684 let (args, is_derive) = if p.eat_keyword_noexpect(sym::attr) {
685 kinds |= MacroKinds::ATTR;
686 if !features.macro_attr() {
687 feature_err(sess, sym::macro_attr, span, "`macro_rules!` attributes are unstable")
688 .emit();
689 }
690 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr args") {
691 return dummy_syn_ext(guar);
692 }
693 let args = p.parse_token_tree();
694 check_args_parens(sess, sym::attr, &args);
695 let args = parse_one_tt(args, RulePart::Pattern, sess, node_id, features, edition);
696 check_emission(check_lhs(sess, node_id, &args));
697 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr body") {
698 return dummy_syn_ext(guar);
699 }
700 (Some(args), false)
701 } else if p.eat_keyword_noexpect(sym::derive) {
702 kinds |= MacroKinds::DERIVE;
703 let derive_keyword_span = p.prev_token.span;
704 if !features.macro_derive() {
705 feature_err(sess, sym::macro_derive, span, "`macro_rules!` derives are unstable")
706 .emit();
707 }
708 if let Some(guar) = check_no_eof(sess, &p, "expected `()` after `derive`") {
709 return dummy_syn_ext(guar);
710 }
711 let args = p.parse_token_tree();
712 check_args_parens(sess, sym::derive, &args);
713 let args_empty_result = check_args_empty(sess, &args);
714 let args_not_empty = args_empty_result.is_err();
715 check_emission(args_empty_result);
716 if let Some(guar) = check_no_eof(sess, &p, "expected macro derive body") {
717 return dummy_syn_ext(guar);
718 }
719 if p.token == token::FatArrow {
722 let mut err = sess
723 .dcx()
724 .struct_span_err(p.token.span, "expected macro derive body, got `=>`");
725 if args_not_empty {
726 err.span_label(derive_keyword_span, "need `()` after this `derive`");
727 }
728 return dummy_syn_ext(err.emit());
729 }
730 (None, true)
731 } else {
732 kinds |= MacroKinds::BANG;
733 (None, false)
734 };
735 let lhs_tt = p.parse_token_tree();
736 let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
737 check_emission(check_lhs(sess, node_id, &lhs_tt));
738 if let Err(e) = p.expect(exp!(FatArrow)) {
739 return dummy_syn_ext(e.emit());
740 }
741 if let Some(guar) = check_no_eof(sess, &p, "expected right-hand side of macro rule") {
742 return dummy_syn_ext(guar);
743 }
744 let rhs_tt = p.parse_token_tree();
745 let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
746 check_emission(check_rhs(sess, &rhs_tt));
747 check_emission(check_meta_variables(&sess.psess, node_id, args.as_ref(), &lhs_tt, &rhs_tt));
748 let lhs_span = lhs_tt.span();
749 let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
752 mbe::macro_parser::compute_locs(&delimited.tts)
753 } else {
754 return dummy_syn_ext(guar.unwrap());
755 };
756 if let Some(args) = args {
757 let args_span = args.span();
758 let mbe::TokenTree::Delimited(.., delimited) = args else {
759 return dummy_syn_ext(guar.unwrap());
760 };
761 let args = mbe::macro_parser::compute_locs(&delimited.tts);
762 let body_span = lhs_span;
763 rules.push(MacroRule::Attr { args, args_span, body: lhs, body_span, rhs: rhs_tt });
764 } else if is_derive {
765 rules.push(MacroRule::Derive { body: lhs, body_span: lhs_span, rhs: rhs_tt });
766 } else {
767 rules.push(MacroRule::Func { lhs, lhs_span, rhs: rhs_tt });
768 }
769 if p.token == token::Eof {
770 break;
771 }
772 if let Err(e) = p.expect(exp_sep) {
773 return dummy_syn_ext(e.emit());
774 }
775 }
776
777 if rules.is_empty() {
778 let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
779 return dummy_syn_ext(guar);
780 }
781 assert!(!kinds.is_empty());
782
783 let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
784 .unwrap_or(Transparency::fallback(macro_rules));
785
786 if let Some(guar) = guar {
787 return dummy_syn_ext(guar);
790 }
791
792 let nrules = if is_defined_in_current_crate(node_id) { rules.len() } else { 0 };
794
795 let exp = MacroRulesMacroExpander { name: ident, kinds, span, node_id, transparency, rules };
796 (mk_syn_ext(SyntaxExtensionKind::MacroRules(Arc::new(exp))), nrules)
797}
798
799fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> {
800 if p.token == token::Eof {
801 let err_sp = p.token.span.shrink_to_hi();
802 let guar = sess
803 .dcx()
804 .struct_span_err(err_sp, "macro definition ended unexpectedly")
805 .with_span_label(err_sp, msg)
806 .emit();
807 return Some(guar);
808 }
809 None
810}
811
812fn check_args_parens(sess: &Session, rule_kw: Symbol, args: &tokenstream::TokenTree) {
813 if let tokenstream::TokenTree::Delimited(dspan, _, delim, _) = args
815 && *delim != Delimiter::Parenthesis
816 {
817 sess.dcx().emit_err(errors::MacroArgsBadDelim {
818 span: dspan.entire(),
819 sugg: errors::MacroArgsBadDelimSugg { open: dspan.open, close: dspan.close },
820 rule_kw,
821 });
822 }
823}
824
825fn check_args_empty(sess: &Session, args: &tokenstream::TokenTree) -> Result<(), ErrorGuaranteed> {
826 match args {
827 tokenstream::TokenTree::Delimited(.., delimited) if delimited.is_empty() => Ok(()),
828 _ => {
829 let msg = "`derive` rules do not accept arguments; `derive` must be followed by `()`";
830 Err(sess.dcx().span_err(args.span(), msg))
831 }
832 }
833}
834
835fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
836 let e1 = check_lhs_nt_follows(sess, node_id, lhs);
837 let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
838 e1.and(e2)
839}
840
841fn check_lhs_nt_follows(
842 sess: &Session,
843 node_id: NodeId,
844 lhs: &mbe::TokenTree,
845) -> Result<(), ErrorGuaranteed> {
846 if let mbe::TokenTree::Delimited(.., delimited) = lhs {
849 check_matcher(sess, node_id, &delimited.tts)
850 } else {
851 let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
852 Err(sess.dcx().span_err(lhs.span(), msg))
853 }
854}
855
856fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
857 if seq.separator.is_some() {
858 false
859 } else {
860 let mut is_empty = true;
861 let mut iter = seq.tts.iter().peekable();
862 while let Some(tt) = iter.next() {
863 match tt {
864 mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
865 mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
866 let mut now = t;
867 while let Some(&mbe::TokenTree::Token(
868 next @ Token { kind: DocComment(..), .. },
869 )) = iter.peek()
870 {
871 now = next;
872 iter.next();
873 }
874 let span = t.span.to(now.span);
875 sess.dcx().span_note(span, "doc comments are ignored in matcher position");
876 }
877 mbe::TokenTree::Sequence(_, sub_seq)
878 if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
879 || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne) => {}
880 _ => is_empty = false,
881 }
882 }
883 is_empty
884 }
885}
886
887fn check_redundant_vis_repetition(
892 err: &mut Diag<'_>,
893 sess: &Session,
894 seq: &SequenceRepetition,
895 span: &DelimSpan,
896) {
897 if seq.kleene.op == KleeneOp::ZeroOrOne
898 && matches!(
899 seq.tts.first(),
900 Some(mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
901 )
902 {
903 err.note("a `vis` fragment can already be empty");
904 err.multipart_suggestion(
905 "remove the `$(` and `)?`",
906 vec![
907 (
908 sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
909 "".to_string(),
910 ),
911 (span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
912 ],
913 Applicability::MaybeIncorrect,
914 );
915 }
916}
917
918fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
921 use mbe::TokenTree;
922 for tt in tts {
923 match tt {
924 TokenTree::Token(..)
925 | TokenTree::MetaVar(..)
926 | TokenTree::MetaVarDecl { .. }
927 | TokenTree::MetaVarExpr(..) => (),
928 TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
929 TokenTree::Sequence(span, seq) => {
930 if is_empty_token_tree(sess, seq) {
931 let sp = span.entire();
932 let mut err =
933 sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
934 check_redundant_vis_repetition(&mut err, sess, seq, span);
935 return Err(err.emit());
936 }
937 check_lhs_no_empty_seq(sess, &seq.tts)?
938 }
939 }
940 }
941
942 Ok(())
943}
944
945fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
946 match *rhs {
947 mbe::TokenTree::Delimited(..) => Ok(()),
948 _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
949 }
950}
951
952fn check_matcher(
953 sess: &Session,
954 node_id: NodeId,
955 matcher: &[mbe::TokenTree],
956) -> Result<(), ErrorGuaranteed> {
957 let first_sets = FirstSets::new(matcher);
958 let empty_suffix = TokenSet::empty();
959 check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
960 Ok(())
961}
962
963fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
964 match rhs {
965 mbe::TokenTree::Delimited(.., d) => {
966 let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
967 if let mbe::TokenTree::Token(ident) = ident
968 && let TokenKind::Ident(ident, _) = ident.kind
969 && ident == sym::compile_error
970 && let mbe::TokenTree::Token(bang) = bang
971 && let TokenKind::Bang = bang.kind
972 && let mbe::TokenTree::Delimited(.., del) = args
973 && !del.delim.skip()
974 {
975 true
976 } else {
977 false
978 }
979 });
980 if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) }
981 }
982 _ => false,
983 }
984}
985
986struct FirstSets<'tt> {
999 first: FxHashMap<Span, Option<TokenSet<'tt>>>,
1006}
1007
1008impl<'tt> FirstSets<'tt> {
1009 fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
1010 use mbe::TokenTree;
1011
1012 let mut sets = FirstSets { first: FxHashMap::default() };
1013 build_recur(&mut sets, tts);
1014 return sets;
1015
1016 fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
1020 let mut first = TokenSet::empty();
1021 for tt in tts.iter().rev() {
1022 match tt {
1023 TokenTree::Token(..)
1024 | TokenTree::MetaVar(..)
1025 | TokenTree::MetaVarDecl { .. }
1026 | TokenTree::MetaVarExpr(..) => {
1027 first.replace_with(TtHandle::TtRef(tt));
1028 }
1029 TokenTree::Delimited(span, _, delimited) => {
1030 build_recur(sets, &delimited.tts);
1031 first.replace_with(TtHandle::from_token_kind(
1032 delimited.delim.as_open_token_kind(),
1033 span.open,
1034 ));
1035 }
1036 TokenTree::Sequence(sp, seq_rep) => {
1037 let subfirst = build_recur(sets, &seq_rep.tts);
1038
1039 match sets.first.entry(sp.entire()) {
1040 Entry::Vacant(vac) => {
1041 vac.insert(Some(subfirst.clone()));
1042 }
1043 Entry::Occupied(mut occ) => {
1044 occ.insert(None);
1051 }
1052 }
1053
1054 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
1058 first.add_one_maybe(TtHandle::from_token(*sep));
1059 }
1060
1061 if subfirst.maybe_empty
1063 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
1064 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
1065 {
1066 first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
1069 } else {
1070 first = subfirst;
1073 }
1074 }
1075 }
1076 }
1077
1078 first
1079 }
1080 }
1081
1082 fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
1085 use mbe::TokenTree;
1086
1087 let mut first = TokenSet::empty();
1088 for tt in tts.iter() {
1089 assert!(first.maybe_empty);
1090 match tt {
1091 TokenTree::Token(..)
1092 | TokenTree::MetaVar(..)
1093 | TokenTree::MetaVarDecl { .. }
1094 | TokenTree::MetaVarExpr(..) => {
1095 first.add_one(TtHandle::TtRef(tt));
1096 return first;
1097 }
1098 TokenTree::Delimited(span, _, delimited) => {
1099 first.add_one(TtHandle::from_token_kind(
1100 delimited.delim.as_open_token_kind(),
1101 span.open,
1102 ));
1103 return first;
1104 }
1105 TokenTree::Sequence(sp, seq_rep) => {
1106 let subfirst_owned;
1107 let subfirst = match self.first.get(&sp.entire()) {
1108 Some(Some(subfirst)) => subfirst,
1109 Some(&None) => {
1110 subfirst_owned = self.first(&seq_rep.tts);
1111 &subfirst_owned
1112 }
1113 None => {
1114 panic!("We missed a sequence during FirstSets construction");
1115 }
1116 };
1117
1118 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
1121 first.add_one_maybe(TtHandle::from_token(*sep));
1122 }
1123
1124 assert!(first.maybe_empty);
1125 first.add_all(subfirst);
1126 if subfirst.maybe_empty
1127 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
1128 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
1129 {
1130 first.maybe_empty = true;
1134 continue;
1135 } else {
1136 return first;
1137 }
1138 }
1139 }
1140 }
1141
1142 assert!(first.maybe_empty);
1145 first
1146 }
1147}
1148
1149#[derive(Debug)]
1154enum TtHandle<'tt> {
1155 TtRef(&'tt mbe::TokenTree),
1157
1158 Token(mbe::TokenTree),
1163}
1164
1165impl<'tt> TtHandle<'tt> {
1166 fn from_token(tok: Token) -> Self {
1167 TtHandle::Token(mbe::TokenTree::Token(tok))
1168 }
1169
1170 fn from_token_kind(kind: TokenKind, span: Span) -> Self {
1171 TtHandle::from_token(Token::new(kind, span))
1172 }
1173
1174 fn get(&'tt self) -> &'tt mbe::TokenTree {
1176 match self {
1177 TtHandle::TtRef(tt) => tt,
1178 TtHandle::Token(token_tt) => token_tt,
1179 }
1180 }
1181}
1182
1183impl<'tt> PartialEq for TtHandle<'tt> {
1184 fn eq(&self, other: &TtHandle<'tt>) -> bool {
1185 self.get() == other.get()
1186 }
1187}
1188
1189impl<'tt> Clone for TtHandle<'tt> {
1190 fn clone(&self) -> Self {
1191 match self {
1192 TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
1193
1194 TtHandle::Token(mbe::TokenTree::Token(tok)) => {
1197 TtHandle::Token(mbe::TokenTree::Token(*tok))
1198 }
1199
1200 _ => unreachable!(),
1201 }
1202 }
1203}
1204
1205#[derive(Clone, Debug)]
1216struct TokenSet<'tt> {
1217 tokens: Vec<TtHandle<'tt>>,
1218 maybe_empty: bool,
1219}
1220
1221impl<'tt> TokenSet<'tt> {
1222 fn empty() -> Self {
1224 TokenSet { tokens: Vec::new(), maybe_empty: true }
1225 }
1226
1227 fn singleton(tt: TtHandle<'tt>) -> Self {
1230 TokenSet { tokens: vec![tt], maybe_empty: false }
1231 }
1232
1233 fn replace_with(&mut self, tt: TtHandle<'tt>) {
1236 self.tokens.clear();
1237 self.tokens.push(tt);
1238 self.maybe_empty = false;
1239 }
1240
1241 fn replace_with_irrelevant(&mut self) {
1245 self.tokens.clear();
1246 self.maybe_empty = false;
1247 }
1248
1249 fn add_one(&mut self, tt: TtHandle<'tt>) {
1251 if !self.tokens.contains(&tt) {
1252 self.tokens.push(tt);
1253 }
1254 self.maybe_empty = false;
1255 }
1256
1257 fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
1259 if !self.tokens.contains(&tt) {
1260 self.tokens.push(tt);
1261 }
1262 }
1263
1264 fn add_all(&mut self, other: &Self) {
1272 for tt in &other.tokens {
1273 if !self.tokens.contains(tt) {
1274 self.tokens.push(tt.clone());
1275 }
1276 }
1277 if !other.maybe_empty {
1278 self.maybe_empty = false;
1279 }
1280 }
1281}
1282
1283fn check_matcher_core<'tt>(
1295 sess: &Session,
1296 node_id: NodeId,
1297 first_sets: &FirstSets<'tt>,
1298 matcher: &'tt [mbe::TokenTree],
1299 follow: &TokenSet<'tt>,
1300) -> Result<TokenSet<'tt>, ErrorGuaranteed> {
1301 use mbe::TokenTree;
1302
1303 let mut last = TokenSet::empty();
1304
1305 let mut errored = Ok(());
1306
1307 'each_token: for i in 0..matcher.len() {
1311 let token = &matcher[i];
1312 let suffix = &matcher[i + 1..];
1313
1314 let build_suffix_first = || {
1315 let mut s = first_sets.first(suffix);
1316 if s.maybe_empty {
1317 s.add_all(follow);
1318 }
1319 s
1320 };
1321
1322 let suffix_first;
1326
1327 match token {
1330 TokenTree::Token(..)
1331 | TokenTree::MetaVar(..)
1332 | TokenTree::MetaVarDecl { .. }
1333 | TokenTree::MetaVarExpr(..) => {
1334 if token_can_be_followed_by_any(token) {
1335 last.replace_with_irrelevant();
1337 continue 'each_token;
1340 } else {
1341 last.replace_with(TtHandle::TtRef(token));
1342 suffix_first = build_suffix_first();
1343 }
1344 }
1345 TokenTree::Delimited(span, _, d) => {
1346 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
1347 d.delim.as_close_token_kind(),
1348 span.close,
1349 ));
1350 check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
1351 last.replace_with_irrelevant();
1353
1354 continue 'each_token;
1357 }
1358 TokenTree::Sequence(_, seq_rep) => {
1359 suffix_first = build_suffix_first();
1360 let mut new;
1371 let my_suffix = if let Some(sep) = &seq_rep.separator {
1372 new = suffix_first.clone();
1373 new.add_one_maybe(TtHandle::from_token(*sep));
1374 &new
1375 } else {
1376 &suffix_first
1377 };
1378
1379 let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
1383 if next.maybe_empty {
1384 last.add_all(&next);
1385 } else {
1386 last = next;
1387 }
1388
1389 continue 'each_token;
1392 }
1393 }
1394
1395 for tt in &last.tokens {
1400 if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
1401 for next_token in &suffix_first.tokens {
1402 let next_token = next_token.get();
1403
1404 if is_defined_in_current_crate(node_id)
1411 && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
1412 && matches!(
1413 next_token,
1414 TokenTree::Token(token) if *token == token::Or
1415 )
1416 {
1417 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1419 span,
1420 name,
1421 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1422 });
1423 sess.psess.buffer_lint(
1424 RUST_2021_INCOMPATIBLE_OR_PATTERNS,
1425 span,
1426 ast::CRATE_NODE_ID,
1427 errors::OrPatternsBackCompat { span, suggestion },
1428 );
1429 }
1430 match is_in_follow(next_token, kind) {
1431 IsInFollow::Yes => {}
1432 IsInFollow::No(possible) => {
1433 let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
1434 {
1435 "is"
1436 } else {
1437 "may be"
1438 };
1439
1440 let sp = next_token.span();
1441 let mut err = sess.dcx().struct_span_err(
1442 sp,
1443 format!(
1444 "`${name}:{frag}` {may_be} followed by `{next}`, which \
1445 is not allowed for `{frag}` fragments",
1446 name = name,
1447 frag = kind,
1448 next = quoted_tt_to_string(next_token),
1449 may_be = may_be
1450 ),
1451 );
1452 err.span_label(sp, format!("not allowed after `{kind}` fragments"));
1453
1454 if kind == NonterminalKind::Pat(PatWithOr)
1455 && sess.psess.edition.at_least_rust_2021()
1456 && next_token.is_token(&token::Or)
1457 {
1458 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1459 span,
1460 name,
1461 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1462 });
1463 err.span_suggestion(
1464 span,
1465 "try a `pat_param` fragment specifier instead",
1466 suggestion,
1467 Applicability::MaybeIncorrect,
1468 );
1469 }
1470
1471 let msg = "allowed there are: ";
1472 match possible {
1473 &[] => {}
1474 &[t] => {
1475 err.note(format!(
1476 "only {t} is allowed after `{kind}` fragments",
1477 ));
1478 }
1479 ts => {
1480 err.note(format!(
1481 "{}{} or {}",
1482 msg,
1483 ts[..ts.len() - 1].to_vec().join(", "),
1484 ts[ts.len() - 1],
1485 ));
1486 }
1487 }
1488 errored = Err(err.emit());
1489 }
1490 }
1491 }
1492 }
1493 }
1494 }
1495 errored?;
1496 Ok(last)
1497}
1498
1499fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
1500 if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
1501 frag_can_be_followed_by_any(kind)
1502 } else {
1503 true
1505 }
1506}
1507
1508fn frag_can_be_followed_by_any(kind: NonterminalKind) -> bool {
1517 matches!(
1518 kind,
1519 NonterminalKind::Item | NonterminalKind::Block | NonterminalKind::Ident | NonterminalKind::Literal | NonterminalKind::Meta | NonterminalKind::Lifetime | NonterminalKind::TT )
1527}
1528
1529enum IsInFollow {
1530 Yes,
1531 No(&'static [&'static str]),
1532}
1533
1534fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
1543 use mbe::TokenTree;
1544
1545 if let TokenTree::Token(Token { kind, .. }) = tok
1546 && kind.close_delim().is_some()
1547 {
1548 IsInFollow::Yes
1551 } else {
1552 match kind {
1553 NonterminalKind::Item => {
1554 IsInFollow::Yes
1557 }
1558 NonterminalKind::Block => {
1559 IsInFollow::Yes
1562 }
1563 NonterminalKind::Stmt | NonterminalKind::Expr(_) => {
1564 const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
1565 match tok {
1566 TokenTree::Token(token) => match token.kind {
1567 FatArrow | Comma | Semi => IsInFollow::Yes,
1568 _ => IsInFollow::No(TOKENS),
1569 },
1570 _ => IsInFollow::No(TOKENS),
1571 }
1572 }
1573 NonterminalKind::Pat(PatParam { .. }) => {
1574 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
1575 match tok {
1576 TokenTree::Token(token) => match token.kind {
1577 FatArrow | Comma | Eq | Or => IsInFollow::Yes,
1578 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1579 IsInFollow::Yes
1580 }
1581 _ => IsInFollow::No(TOKENS),
1582 },
1583 _ => IsInFollow::No(TOKENS),
1584 }
1585 }
1586 NonterminalKind::Pat(PatWithOr) => {
1587 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
1588 match tok {
1589 TokenTree::Token(token) => match token.kind {
1590 FatArrow | Comma | Eq => IsInFollow::Yes,
1591 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1592 IsInFollow::Yes
1593 }
1594 _ => IsInFollow::No(TOKENS),
1595 },
1596 _ => IsInFollow::No(TOKENS),
1597 }
1598 }
1599 NonterminalKind::Path | NonterminalKind::Ty => {
1600 const TOKENS: &[&str] = &[
1601 "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
1602 "`where`",
1603 ];
1604 match tok {
1605 TokenTree::Token(token) => match token.kind {
1606 OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
1607 | Semi | Or => IsInFollow::Yes,
1608 Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
1609 IsInFollow::Yes
1610 }
1611 _ => IsInFollow::No(TOKENS),
1612 },
1613 TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
1614 _ => IsInFollow::No(TOKENS),
1615 }
1616 }
1617 NonterminalKind::Ident | NonterminalKind::Lifetime => {
1618 IsInFollow::Yes
1620 }
1621 NonterminalKind::Literal => {
1622 IsInFollow::Yes
1624 }
1625 NonterminalKind::Meta | NonterminalKind::TT => {
1626 IsInFollow::Yes
1629 }
1630 NonterminalKind::Vis => {
1631 const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
1633 match tok {
1634 TokenTree::Token(token) => match token.kind {
1635 Comma => IsInFollow::Yes,
1636 Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
1637 Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
1638 _ => {
1639 if token.can_begin_type() {
1640 IsInFollow::Yes
1641 } else {
1642 IsInFollow::No(TOKENS)
1643 }
1644 }
1645 },
1646 TokenTree::MetaVarDecl {
1647 kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
1648 ..
1649 } => IsInFollow::Yes,
1650 _ => IsInFollow::No(TOKENS),
1651 }
1652 }
1653 }
1654 }
1655}
1656
1657fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
1658 match tt {
1659 mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
1660 mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
1661 mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
1662 _ => panic!(
1663 "{}",
1664 "unexpected mbe::TokenTree::{Sequence or Delimited} \
1665 in follow set checker"
1666 ),
1667 }
1668}
1669
1670fn is_defined_in_current_crate(node_id: NodeId) -> bool {
1671 node_id != DUMMY_NODE_ID
1674}
1675
1676pub(super) fn parser_from_cx(
1677 psess: &ParseSess,
1678 mut tts: TokenStream,
1679 recovery: Recovery,
1680) -> Parser<'_> {
1681 tts.desugar_doc_comments();
1682 Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
1683}