1use std::borrow::Cow;
2use std::collections::hash_map::Entry;
3use std::sync::Arc;
4use std::{mem, slice};
5
6use ast::token::IdentIsRaw;
7use rustc_ast::token::NtPatKind::*;
8use rustc_ast::token::TokenKind::*;
9use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind};
10use rustc_ast::tokenstream::{self, DelimSpan, TokenStream};
11use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId, Safety};
12use rustc_ast_pretty::pprust;
13use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
14use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan};
15use rustc_feature::Features;
16use rustc_hir as hir;
17use rustc_hir::attrs::AttributeKind;
18use rustc_hir::def::MacroKinds;
19use rustc_hir::find_attr;
20use rustc_lint_defs::builtin::{
21 RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
22};
23use rustc_parse::exp;
24use rustc_parse::parser::{Parser, Recovery};
25use rustc_session::Session;
26use rustc_session::parse::{ParseSess, feature_err};
27use rustc_span::edition::Edition;
28use rustc_span::hygiene::Transparency;
29use rustc_span::{Ident, Span, Symbol, kw, sym};
30use tracing::{debug, instrument, trace, trace_span};
31
32use super::diagnostics::{FailedMacro, failed_to_match_macro};
33use super::macro_parser::{NamedMatches, NamedParseResult};
34use super::{SequenceRepetition, diagnostics};
35use crate::base::{
36 AttrProcMacro, BangProcMacro, DummyResult, ExpandResult, ExtCtxt, MacResult,
37 MacroExpanderResult, SyntaxExtension, SyntaxExtensionKind, TTMacroExpander,
38};
39use crate::errors;
40use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
41use crate::mbe::macro_check::check_meta_variables;
42use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
43use crate::mbe::quoted::{RulePart, parse_one_tt};
44use crate::mbe::transcribe::transcribe;
45use crate::mbe::{self, KleeneOp};
46
47pub(crate) struct ParserAnyMacro<'a> {
48 parser: Parser<'a>,
49
50 site_span: Span,
52 macro_ident: Ident,
54 lint_node_id: NodeId,
55 is_trailing_mac: bool,
56 arm_span: Span,
57 is_local: bool,
59}
60
61impl<'a> ParserAnyMacro<'a> {
62 pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
63 let ParserAnyMacro {
64 site_span,
65 macro_ident,
66 ref mut parser,
67 lint_node_id,
68 arm_span,
69 is_trailing_mac,
70 is_local,
71 } = *self;
72 let snapshot = &mut parser.create_snapshot_for_diagnostic();
73 let fragment = match parse_ast_fragment(parser, kind) {
74 Ok(f) => f,
75 Err(err) => {
76 let guar = diagnostics::emit_frag_parse_err(
77 err, parser, snapshot, site_span, arm_span, kind,
78 );
79 return kind.dummy(site_span, guar);
80 }
81 };
82
83 if kind == AstFragmentKind::Expr && parser.token == token::Semi {
87 if is_local {
88 parser.psess.buffer_lint(
89 SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
90 parser.token.span,
91 lint_node_id,
92 errors::TrailingMacro { is_trailing: is_trailing_mac, name: macro_ident },
93 );
94 }
95 parser.bump();
96 }
97
98 let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
100 ensure_complete_parse(parser, &path, kind.name(), site_span);
101 fragment
102 }
103
104 #[instrument(skip(cx, tts))]
105 pub(crate) fn from_tts<'cx>(
106 cx: &'cx mut ExtCtxt<'a>,
107 tts: TokenStream,
108 site_span: Span,
109 arm_span: Span,
110 is_local: bool,
111 macro_ident: Ident,
112 ) -> Self {
113 Self {
114 parser: Parser::new(&cx.sess.psess, tts, None),
115
116 site_span,
120 macro_ident,
121 lint_node_id: cx.current_expansion.lint_node_id,
122 is_trailing_mac: cx.current_expansion.is_trailing_mac,
123 arm_span,
124 is_local,
125 }
126 }
127}
128
129pub(super) enum MacroRule {
130 Func { lhs: Vec<MatcherLoc>, lhs_span: Span, rhs: mbe::TokenTree },
132 Attr {
134 unsafe_rule: bool,
135 args: Vec<MatcherLoc>,
136 args_span: Span,
137 body: Vec<MatcherLoc>,
138 body_span: Span,
139 rhs: mbe::TokenTree,
140 },
141 Derive { body: Vec<MatcherLoc>, body_span: Span, rhs: mbe::TokenTree },
143}
144
145pub struct MacroRulesMacroExpander {
146 node_id: NodeId,
147 name: Ident,
148 span: Span,
149 transparency: Transparency,
150 kinds: MacroKinds,
151 rules: Vec<MacroRule>,
152}
153
154impl MacroRulesMacroExpander {
155 pub fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, MultiSpan)> {
156 let (span, rhs) = match self.rules[rule_i] {
158 MacroRule::Func { lhs_span, ref rhs, .. } => (MultiSpan::from_span(lhs_span), rhs),
159 MacroRule::Attr { args_span, body_span, ref rhs, .. } => {
160 (MultiSpan::from_spans(vec![args_span, body_span]), rhs)
161 }
162 MacroRule::Derive { body_span, ref rhs, .. } => (MultiSpan::from_span(body_span), rhs),
163 };
164 if has_compile_error_macro(rhs) { None } else { Some((&self.name, span)) }
165 }
166
167 pub fn kinds(&self) -> MacroKinds {
168 self.kinds
169 }
170
171 pub fn expand_derive(
172 &self,
173 cx: &mut ExtCtxt<'_>,
174 sp: Span,
175 body: &TokenStream,
176 ) -> Result<TokenStream, ErrorGuaranteed> {
177 let Self { name, ref rules, node_id, .. } = *self;
180 let psess = &cx.sess.psess;
181
182 if cx.trace_macros() {
183 let msg = format!("expanding `#[derive({name})] {}`", pprust::tts_to_string(body));
184 trace_macros_note(&mut cx.expansions, sp, msg);
185 }
186
187 match try_match_macro_derive(psess, name, body, rules, &mut NoopTracker) {
188 Ok((rule_index, rule, named_matches)) => {
189 let MacroRule::Derive { rhs, .. } = rule else {
190 panic!("try_match_macro_derive returned non-derive rule");
191 };
192 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
193 cx.dcx().span_bug(sp, "malformed macro derive rhs");
194 };
195
196 let id = cx.current_expansion.id;
197 let tts = transcribe(psess, &named_matches, rhs, *rhs_span, self.transparency, id)
198 .map_err(|e| e.emit())?;
199
200 if cx.trace_macros() {
201 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
202 trace_macros_note(&mut cx.expansions, sp, msg);
203 }
204
205 if is_defined_in_current_crate(node_id) {
206 cx.resolver.record_macro_rule_usage(node_id, rule_index);
207 }
208
209 Ok(tts)
210 }
211 Err(CanRetry::No(guar)) => Err(guar),
212 Err(CanRetry::Yes) => {
213 let (_, guar) = failed_to_match_macro(
214 cx.psess(),
215 sp,
216 self.span,
217 name,
218 FailedMacro::Derive,
219 body,
220 rules,
221 );
222 cx.macro_error_and_trace_macros_diag();
223 Err(guar)
224 }
225 }
226 }
227}
228
229impl TTMacroExpander for MacroRulesMacroExpander {
230 fn expand<'cx>(
231 &self,
232 cx: &'cx mut ExtCtxt<'_>,
233 sp: Span,
234 input: TokenStream,
235 ) -> MacroExpanderResult<'cx> {
236 ExpandResult::Ready(expand_macro(
237 cx,
238 sp,
239 self.span,
240 self.node_id,
241 self.name,
242 self.transparency,
243 input,
244 &self.rules,
245 ))
246 }
247}
248
249impl AttrProcMacro for MacroRulesMacroExpander {
250 fn expand(
251 &self,
252 _cx: &mut ExtCtxt<'_>,
253 _sp: Span,
254 _args: TokenStream,
255 _body: TokenStream,
256 ) -> Result<TokenStream, ErrorGuaranteed> {
257 unreachable!("`expand` called on `MacroRulesMacroExpander`, expected `expand_with_safety`")
258 }
259
260 fn expand_with_safety(
261 &self,
262 cx: &mut ExtCtxt<'_>,
263 safety: Safety,
264 sp: Span,
265 args: TokenStream,
266 body: TokenStream,
267 ) -> Result<TokenStream, ErrorGuaranteed> {
268 expand_macro_attr(
269 cx,
270 sp,
271 self.span,
272 self.node_id,
273 self.name,
274 self.transparency,
275 safety,
276 args,
277 body,
278 &self.rules,
279 )
280 }
281}
282
283struct DummyBang(ErrorGuaranteed);
284
285impl BangProcMacro for DummyBang {
286 fn expand<'cx>(
287 &self,
288 _: &'cx mut ExtCtxt<'_>,
289 _: Span,
290 _: TokenStream,
291 ) -> Result<TokenStream, ErrorGuaranteed> {
292 Err(self.0)
293 }
294}
295
296fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
297 let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
298 cx_expansions.entry(sp).or_default().push(message);
299}
300
301pub(super) trait Tracker<'matcher> {
302 type Failure;
304
305 fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
309
310 fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
312
313 fn after_arm(&mut self, _in_body: bool, _result: &NamedParseResult<Self::Failure>) {}
316
317 fn description() -> &'static str;
319
320 fn recovery() -> Recovery {
321 Recovery::Forbidden
322 }
323}
324
325pub(super) struct NoopTracker;
328
329impl<'matcher> Tracker<'matcher> for NoopTracker {
330 type Failure = ();
331
332 fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
333
334 fn description() -> &'static str {
335 "none"
336 }
337}
338
339#[instrument(skip(cx, transparency, arg, rules))]
341fn expand_macro<'cx>(
342 cx: &'cx mut ExtCtxt<'_>,
343 sp: Span,
344 def_span: Span,
345 node_id: NodeId,
346 name: Ident,
347 transparency: Transparency,
348 arg: TokenStream,
349 rules: &[MacroRule],
350) -> Box<dyn MacResult + 'cx> {
351 let psess = &cx.sess.psess;
352
353 if cx.trace_macros() {
354 let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(&arg));
355 trace_macros_note(&mut cx.expansions, sp, msg);
356 }
357
358 let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
360
361 match try_success_result {
362 Ok((rule_index, rule, named_matches)) => {
363 let MacroRule::Func { rhs, .. } = rule else {
364 panic!("try_match_macro returned non-func rule");
365 };
366 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
367 cx.dcx().span_bug(sp, "malformed macro rhs");
368 };
369 let arm_span = rhs_span.entire();
370
371 let id = cx.current_expansion.id;
373 let tts = match transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id) {
374 Ok(tts) => tts,
375 Err(err) => {
376 let guar = err.emit();
377 return DummyResult::any(arm_span, guar);
378 }
379 };
380
381 if cx.trace_macros() {
382 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
383 trace_macros_note(&mut cx.expansions, sp, msg);
384 }
385
386 let is_local = is_defined_in_current_crate(node_id);
387 if is_local {
388 cx.resolver.record_macro_rule_usage(node_id, rule_index);
389 }
390
391 Box::new(ParserAnyMacro::from_tts(cx, tts, sp, arm_span, is_local, name))
393 }
394 Err(CanRetry::No(guar)) => {
395 debug!("Will not retry matching as an error was emitted already");
396 DummyResult::any(sp, guar)
397 }
398 Err(CanRetry::Yes) => {
399 let (span, guar) = failed_to_match_macro(
401 cx.psess(),
402 sp,
403 def_span,
404 name,
405 FailedMacro::Func,
406 &arg,
407 rules,
408 );
409 cx.macro_error_and_trace_macros_diag();
410 DummyResult::any(span, guar)
411 }
412 }
413}
414
415#[instrument(skip(cx, transparency, args, body, rules))]
417fn expand_macro_attr(
418 cx: &mut ExtCtxt<'_>,
419 sp: Span,
420 def_span: Span,
421 node_id: NodeId,
422 name: Ident,
423 transparency: Transparency,
424 safety: Safety,
425 args: TokenStream,
426 body: TokenStream,
427 rules: &[MacroRule],
428) -> Result<TokenStream, ErrorGuaranteed> {
429 let psess = &cx.sess.psess;
430 let is_local = node_id != DUMMY_NODE_ID;
433
434 if cx.trace_macros() {
435 let msg = format!(
436 "expanding `#[{name}({})] {}`",
437 pprust::tts_to_string(&args),
438 pprust::tts_to_string(&body),
439 );
440 trace_macros_note(&mut cx.expansions, sp, msg);
441 }
442
443 match try_match_macro_attr(psess, name, &args, &body, rules, &mut NoopTracker) {
445 Ok((i, rule, named_matches)) => {
446 let MacroRule::Attr { rhs, unsafe_rule, .. } = rule else {
447 panic!("try_macro_match_attr returned non-attr rule");
448 };
449 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
450 cx.dcx().span_bug(sp, "malformed macro rhs");
451 };
452
453 match (safety, unsafe_rule) {
454 (Safety::Default, false) | (Safety::Unsafe(_), true) => {}
455 (Safety::Default, true) => {
456 cx.dcx().span_err(sp, "unsafe attribute invocation requires `unsafe`");
457 }
458 (Safety::Unsafe(span), false) => {
459 cx.dcx().span_err(span, "unnecessary `unsafe` on safe attribute invocation");
460 }
461 (Safety::Safe(span), _) => {
462 cx.dcx().span_bug(span, "unexpected `safe` keyword");
463 }
464 }
465
466 let id = cx.current_expansion.id;
467 let tts = transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id)
468 .map_err(|e| e.emit())?;
469
470 if cx.trace_macros() {
471 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
472 trace_macros_note(&mut cx.expansions, sp, msg);
473 }
474
475 if is_local {
476 cx.resolver.record_macro_rule_usage(node_id, i);
477 }
478
479 Ok(tts)
480 }
481 Err(CanRetry::No(guar)) => Err(guar),
482 Err(CanRetry::Yes) => {
483 let (_, guar) = failed_to_match_macro(
485 cx.psess(),
486 sp,
487 def_span,
488 name,
489 FailedMacro::Attr(&args),
490 &body,
491 rules,
492 );
493 cx.trace_macros_diag();
494 Err(guar)
495 }
496 }
497}
498
499pub(super) enum CanRetry {
500 Yes,
501 No(ErrorGuaranteed),
503}
504
505#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
509pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
510 psess: &ParseSess,
511 name: Ident,
512 arg: &TokenStream,
513 rules: &'matcher [MacroRule],
514 track: &mut T,
515) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
516 let parser = parser_from_cx(psess, arg.clone(), T::recovery());
536 let mut tt_parser = TtParser::new(name);
538 for (i, rule) in rules.iter().enumerate() {
539 let MacroRule::Func { lhs, .. } = rule else { continue };
540 let _tracing_span = trace_span!("Matching arm", %i);
541
542 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
547
548 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track);
549
550 track.after_arm(true, &result);
551
552 match result {
553 Success(named_matches) => {
554 debug!("Parsed arm successfully");
555 psess.gated_spans.merge(gated_spans_snapshot);
558
559 return Ok((i, rule, named_matches));
560 }
561 Failure(_) => {
562 trace!("Failed to match arm, trying the next one");
563 }
565 Error(_, _) => {
566 debug!("Fatal error occurred during matching");
567 return Err(CanRetry::Yes);
569 }
570 ErrorReported(guarantee) => {
571 debug!("Fatal error occurred and was reported during matching");
572 return Err(CanRetry::No(guarantee));
574 }
575 }
576
577 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
580 }
581
582 Err(CanRetry::Yes)
583}
584
585#[instrument(level = "debug", skip(psess, attr_args, attr_body, rules, track), fields(tracking = %T::description()))]
589pub(super) fn try_match_macro_attr<'matcher, T: Tracker<'matcher>>(
590 psess: &ParseSess,
591 name: Ident,
592 attr_args: &TokenStream,
593 attr_body: &TokenStream,
594 rules: &'matcher [MacroRule],
595 track: &mut T,
596) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
597 let args_parser = parser_from_cx(psess, attr_args.clone(), T::recovery());
599 let body_parser = parser_from_cx(psess, attr_body.clone(), T::recovery());
600 let mut tt_parser = TtParser::new(name);
601 for (i, rule) in rules.iter().enumerate() {
602 let MacroRule::Attr { args, body, .. } = rule else { continue };
603
604 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
605
606 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&args_parser), args, track);
607 track.after_arm(false, &result);
608
609 let mut named_matches = match result {
610 Success(named_matches) => named_matches,
611 Failure(_) => {
612 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
613 continue;
614 }
615 Error(_, _) => return Err(CanRetry::Yes),
616 ErrorReported(guar) => return Err(CanRetry::No(guar)),
617 };
618
619 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track);
620 track.after_arm(true, &result);
621
622 match result {
623 Success(body_named_matches) => {
624 psess.gated_spans.merge(gated_spans_snapshot);
625 #[allow(rustc::potential_query_instability)]
626 named_matches.extend(body_named_matches);
627 return Ok((i, rule, named_matches));
628 }
629 Failure(_) => {
630 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut())
631 }
632 Error(_, _) => return Err(CanRetry::Yes),
633 ErrorReported(guar) => return Err(CanRetry::No(guar)),
634 }
635 }
636
637 Err(CanRetry::Yes)
638}
639
640#[instrument(level = "debug", skip(psess, body, rules, track), fields(tracking = %T::description()))]
644pub(super) fn try_match_macro_derive<'matcher, T: Tracker<'matcher>>(
645 psess: &ParseSess,
646 name: Ident,
647 body: &TokenStream,
648 rules: &'matcher [MacroRule],
649 track: &mut T,
650) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
651 let body_parser = parser_from_cx(psess, body.clone(), T::recovery());
653 let mut tt_parser = TtParser::new(name);
654 for (i, rule) in rules.iter().enumerate() {
655 let MacroRule::Derive { body, .. } = rule else { continue };
656
657 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
658
659 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track);
660 track.after_arm(true, &result);
661
662 match result {
663 Success(named_matches) => {
664 psess.gated_spans.merge(gated_spans_snapshot);
665 return Ok((i, rule, named_matches));
666 }
667 Failure(_) => {
668 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut())
669 }
670 Error(_, _) => return Err(CanRetry::Yes),
671 ErrorReported(guar) => return Err(CanRetry::No(guar)),
672 }
673 }
674
675 Err(CanRetry::Yes)
676}
677
678pub fn compile_declarative_macro(
680 sess: &Session,
681 features: &Features,
682 macro_def: &ast::MacroDef,
683 ident: Ident,
684 attrs: &[hir::Attribute],
685 span: Span,
686 node_id: NodeId,
687 edition: Edition,
688) -> (SyntaxExtension, usize) {
689 let mk_syn_ext = |kind| {
690 let is_local = is_defined_in_current_crate(node_id);
691 SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local)
692 };
693 let dummy_syn_ext =
694 |guar| (mk_syn_ext(SyntaxExtensionKind::Bang(Arc::new(DummyBang(guar)))), 0);
695
696 let macro_rules = macro_def.macro_rules;
697 let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
698
699 let body = macro_def.body.tokens.clone();
700 let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
701
702 let mut guar = None;
705 let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
706
707 let mut kinds = MacroKinds::empty();
708 let mut rules = Vec::new();
709
710 while p.token != token::Eof {
711 let unsafe_rule = p.eat_keyword_noexpect(kw::Unsafe);
712 let unsafe_keyword_span = p.prev_token.span;
713 if unsafe_rule && let Some(guar) = check_no_eof(sess, &p, "expected `attr`") {
714 return dummy_syn_ext(guar);
715 }
716 let (args, is_derive) = if p.eat_keyword_noexpect(sym::attr) {
717 kinds |= MacroKinds::ATTR;
718 if !features.macro_attr() {
719 feature_err(sess, sym::macro_attr, span, "`macro_rules!` attributes are unstable")
720 .emit();
721 }
722 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr args") {
723 return dummy_syn_ext(guar);
724 }
725 let args = p.parse_token_tree();
726 check_args_parens(sess, sym::attr, &args);
727 let args = parse_one_tt(args, RulePart::Pattern, sess, node_id, features, edition);
728 check_emission(check_lhs(sess, node_id, &args));
729 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr body") {
730 return dummy_syn_ext(guar);
731 }
732 (Some(args), false)
733 } else if p.eat_keyword_noexpect(sym::derive) {
734 kinds |= MacroKinds::DERIVE;
735 let derive_keyword_span = p.prev_token.span;
736 if !features.macro_derive() {
737 feature_err(sess, sym::macro_derive, span, "`macro_rules!` derives are unstable")
738 .emit();
739 }
740 if unsafe_rule {
741 sess.dcx()
742 .span_err(unsafe_keyword_span, "`unsafe` is only supported on `attr` rules");
743 }
744 if let Some(guar) = check_no_eof(sess, &p, "expected `()` after `derive`") {
745 return dummy_syn_ext(guar);
746 }
747 let args = p.parse_token_tree();
748 check_args_parens(sess, sym::derive, &args);
749 let args_empty_result = check_args_empty(sess, &args);
750 let args_not_empty = args_empty_result.is_err();
751 check_emission(args_empty_result);
752 if let Some(guar) = check_no_eof(sess, &p, "expected macro derive body") {
753 return dummy_syn_ext(guar);
754 }
755 if p.token == token::FatArrow {
758 let mut err = sess
759 .dcx()
760 .struct_span_err(p.token.span, "expected macro derive body, got `=>`");
761 if args_not_empty {
762 err.span_label(derive_keyword_span, "need `()` after this `derive`");
763 }
764 return dummy_syn_ext(err.emit());
765 }
766 (None, true)
767 } else {
768 kinds |= MacroKinds::BANG;
769 if unsafe_rule {
770 sess.dcx()
771 .span_err(unsafe_keyword_span, "`unsafe` is only supported on `attr` rules");
772 }
773 (None, false)
774 };
775 let lhs_tt = p.parse_token_tree();
776 let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
777 check_emission(check_lhs(sess, node_id, &lhs_tt));
778 if let Err(e) = p.expect(exp!(FatArrow)) {
779 return dummy_syn_ext(e.emit());
780 }
781 if let Some(guar) = check_no_eof(sess, &p, "expected right-hand side of macro rule") {
782 return dummy_syn_ext(guar);
783 }
784 let rhs = p.parse_token_tree();
785 let rhs = parse_one_tt(rhs, RulePart::Body, sess, node_id, features, edition);
786 check_emission(check_rhs(sess, &rhs));
787 check_emission(check_meta_variables(&sess.psess, node_id, args.as_ref(), &lhs_tt, &rhs));
788 let lhs_span = lhs_tt.span();
789 let mbe::TokenTree::Delimited(.., delimited) = lhs_tt else {
792 return dummy_syn_ext(guar.unwrap());
793 };
794 let lhs = mbe::macro_parser::compute_locs(&delimited.tts);
795 if let Some(args) = args {
796 let args_span = args.span();
797 let mbe::TokenTree::Delimited(.., delimited) = args else {
798 return dummy_syn_ext(guar.unwrap());
799 };
800 let args = mbe::macro_parser::compute_locs(&delimited.tts);
801 let body_span = lhs_span;
802 rules.push(MacroRule::Attr { unsafe_rule, args, args_span, body: lhs, body_span, rhs });
803 } else if is_derive {
804 rules.push(MacroRule::Derive { body: lhs, body_span: lhs_span, rhs });
805 } else {
806 rules.push(MacroRule::Func { lhs, lhs_span, rhs });
807 }
808 if p.token == token::Eof {
809 break;
810 }
811 if let Err(e) = p.expect(exp_sep) {
812 return dummy_syn_ext(e.emit());
813 }
814 }
815
816 if rules.is_empty() {
817 let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
818 return dummy_syn_ext(guar);
819 }
820 assert!(!kinds.is_empty());
821
822 let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
823 .unwrap_or(Transparency::fallback(macro_rules));
824
825 if let Some(guar) = guar {
826 return dummy_syn_ext(guar);
829 }
830
831 let nrules = if is_defined_in_current_crate(node_id) { rules.len() } else { 0 };
833
834 let exp = MacroRulesMacroExpander { name: ident, kinds, span, node_id, transparency, rules };
835 (mk_syn_ext(SyntaxExtensionKind::MacroRules(Arc::new(exp))), nrules)
836}
837
838fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> {
839 if p.token == token::Eof {
840 let err_sp = p.token.span.shrink_to_hi();
841 let guar = sess
842 .dcx()
843 .struct_span_err(err_sp, "macro definition ended unexpectedly")
844 .with_span_label(err_sp, msg)
845 .emit();
846 return Some(guar);
847 }
848 None
849}
850
851fn check_args_parens(sess: &Session, rule_kw: Symbol, args: &tokenstream::TokenTree) {
852 if let tokenstream::TokenTree::Delimited(dspan, _, delim, _) = args
854 && *delim != Delimiter::Parenthesis
855 {
856 sess.dcx().emit_err(errors::MacroArgsBadDelim {
857 span: dspan.entire(),
858 sugg: errors::MacroArgsBadDelimSugg { open: dspan.open, close: dspan.close },
859 rule_kw,
860 });
861 }
862}
863
864fn check_args_empty(sess: &Session, args: &tokenstream::TokenTree) -> Result<(), ErrorGuaranteed> {
865 match args {
866 tokenstream::TokenTree::Delimited(.., delimited) if delimited.is_empty() => Ok(()),
867 _ => {
868 let msg = "`derive` rules do not accept arguments; `derive` must be followed by `()`";
869 Err(sess.dcx().span_err(args.span(), msg))
870 }
871 }
872}
873
874fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
875 let e1 = check_lhs_nt_follows(sess, node_id, lhs);
876 let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
877 e1.and(e2)
878}
879
880fn check_lhs_nt_follows(
881 sess: &Session,
882 node_id: NodeId,
883 lhs: &mbe::TokenTree,
884) -> Result<(), ErrorGuaranteed> {
885 if let mbe::TokenTree::Delimited(.., delimited) = lhs {
888 check_matcher(sess, node_id, &delimited.tts)
889 } else {
890 let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
891 Err(sess.dcx().span_err(lhs.span(), msg))
892 }
893}
894
895fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
896 if seq.separator.is_some() {
897 false
898 } else {
899 let mut is_empty = true;
900 let mut iter = seq.tts.iter().peekable();
901 while let Some(tt) = iter.next() {
902 match tt {
903 mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
904 mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
905 let mut now = t;
906 while let Some(&mbe::TokenTree::Token(
907 next @ Token { kind: DocComment(..), .. },
908 )) = iter.peek()
909 {
910 now = next;
911 iter.next();
912 }
913 let span = t.span.to(now.span);
914 sess.dcx().span_note(span, "doc comments are ignored in matcher position");
915 }
916 mbe::TokenTree::Sequence(_, sub_seq)
917 if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
918 || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne) => {}
919 _ => is_empty = false,
920 }
921 }
922 is_empty
923 }
924}
925
926fn check_redundant_vis_repetition(
931 err: &mut Diag<'_>,
932 sess: &Session,
933 seq: &SequenceRepetition,
934 span: &DelimSpan,
935) {
936 if seq.kleene.op == KleeneOp::ZeroOrOne
937 && matches!(
938 seq.tts.first(),
939 Some(mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
940 )
941 {
942 err.note("a `vis` fragment can already be empty");
943 err.multipart_suggestion(
944 "remove the `$(` and `)?`",
945 vec![
946 (
947 sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
948 "".to_string(),
949 ),
950 (span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
951 ],
952 Applicability::MaybeIncorrect,
953 );
954 }
955}
956
957fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
960 use mbe::TokenTree;
961 for tt in tts {
962 match tt {
963 TokenTree::Token(..)
964 | TokenTree::MetaVar(..)
965 | TokenTree::MetaVarDecl { .. }
966 | TokenTree::MetaVarExpr(..) => (),
967 TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
968 TokenTree::Sequence(span, seq) => {
969 if is_empty_token_tree(sess, seq) {
970 let sp = span.entire();
971 let mut err =
972 sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
973 check_redundant_vis_repetition(&mut err, sess, seq, span);
974 return Err(err.emit());
975 }
976 check_lhs_no_empty_seq(sess, &seq.tts)?
977 }
978 }
979 }
980
981 Ok(())
982}
983
984fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
985 match *rhs {
986 mbe::TokenTree::Delimited(..) => Ok(()),
987 _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
988 }
989}
990
991fn check_matcher(
992 sess: &Session,
993 node_id: NodeId,
994 matcher: &[mbe::TokenTree],
995) -> Result<(), ErrorGuaranteed> {
996 let first_sets = FirstSets::new(matcher);
997 let empty_suffix = TokenSet::empty();
998 check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
999 Ok(())
1000}
1001
1002fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
1003 match rhs {
1004 mbe::TokenTree::Delimited(.., d) => {
1005 let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
1006 if let mbe::TokenTree::Token(ident) = ident
1007 && let TokenKind::Ident(ident, _) = ident.kind
1008 && ident == sym::compile_error
1009 && let mbe::TokenTree::Token(bang) = bang
1010 && let TokenKind::Bang = bang.kind
1011 && let mbe::TokenTree::Delimited(.., del) = args
1012 && !del.delim.skip()
1013 {
1014 true
1015 } else {
1016 false
1017 }
1018 });
1019 if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) }
1020 }
1021 _ => false,
1022 }
1023}
1024
1025struct FirstSets<'tt> {
1038 first: FxHashMap<Span, Option<TokenSet<'tt>>>,
1045}
1046
1047impl<'tt> FirstSets<'tt> {
1048 fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
1049 use mbe::TokenTree;
1050
1051 let mut sets = FirstSets { first: FxHashMap::default() };
1052 build_recur(&mut sets, tts);
1053 return sets;
1054
1055 fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
1059 let mut first = TokenSet::empty();
1060 for tt in tts.iter().rev() {
1061 match tt {
1062 TokenTree::Token(..)
1063 | TokenTree::MetaVar(..)
1064 | TokenTree::MetaVarDecl { .. }
1065 | TokenTree::MetaVarExpr(..) => {
1066 first.replace_with(TtHandle::TtRef(tt));
1067 }
1068 TokenTree::Delimited(span, _, delimited) => {
1069 build_recur(sets, &delimited.tts);
1070 first.replace_with(TtHandle::from_token_kind(
1071 delimited.delim.as_open_token_kind(),
1072 span.open,
1073 ));
1074 }
1075 TokenTree::Sequence(sp, seq_rep) => {
1076 let subfirst = build_recur(sets, &seq_rep.tts);
1077
1078 match sets.first.entry(sp.entire()) {
1079 Entry::Vacant(vac) => {
1080 vac.insert(Some(subfirst.clone()));
1081 }
1082 Entry::Occupied(mut occ) => {
1083 occ.insert(None);
1090 }
1091 }
1092
1093 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
1097 first.add_one_maybe(TtHandle::from_token(*sep));
1098 }
1099
1100 if subfirst.maybe_empty
1102 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
1103 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
1104 {
1105 first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
1108 } else {
1109 first = subfirst;
1112 }
1113 }
1114 }
1115 }
1116
1117 first
1118 }
1119 }
1120
1121 fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
1124 use mbe::TokenTree;
1125
1126 let mut first = TokenSet::empty();
1127 for tt in tts.iter() {
1128 assert!(first.maybe_empty);
1129 match tt {
1130 TokenTree::Token(..)
1131 | TokenTree::MetaVar(..)
1132 | TokenTree::MetaVarDecl { .. }
1133 | TokenTree::MetaVarExpr(..) => {
1134 first.add_one(TtHandle::TtRef(tt));
1135 return first;
1136 }
1137 TokenTree::Delimited(span, _, delimited) => {
1138 first.add_one(TtHandle::from_token_kind(
1139 delimited.delim.as_open_token_kind(),
1140 span.open,
1141 ));
1142 return first;
1143 }
1144 TokenTree::Sequence(sp, seq_rep) => {
1145 let subfirst_owned;
1146 let subfirst = match self.first.get(&sp.entire()) {
1147 Some(Some(subfirst)) => subfirst,
1148 Some(&None) => {
1149 subfirst_owned = self.first(&seq_rep.tts);
1150 &subfirst_owned
1151 }
1152 None => {
1153 panic!("We missed a sequence during FirstSets construction");
1154 }
1155 };
1156
1157 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
1160 first.add_one_maybe(TtHandle::from_token(*sep));
1161 }
1162
1163 assert!(first.maybe_empty);
1164 first.add_all(subfirst);
1165 if subfirst.maybe_empty
1166 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
1167 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
1168 {
1169 first.maybe_empty = true;
1173 continue;
1174 } else {
1175 return first;
1176 }
1177 }
1178 }
1179 }
1180
1181 assert!(first.maybe_empty);
1184 first
1185 }
1186}
1187
1188#[derive(Debug)]
1193enum TtHandle<'tt> {
1194 TtRef(&'tt mbe::TokenTree),
1196
1197 Token(mbe::TokenTree),
1202}
1203
1204impl<'tt> TtHandle<'tt> {
1205 fn from_token(tok: Token) -> Self {
1206 TtHandle::Token(mbe::TokenTree::Token(tok))
1207 }
1208
1209 fn from_token_kind(kind: TokenKind, span: Span) -> Self {
1210 TtHandle::from_token(Token::new(kind, span))
1211 }
1212
1213 fn get(&'tt self) -> &'tt mbe::TokenTree {
1215 match self {
1216 TtHandle::TtRef(tt) => tt,
1217 TtHandle::Token(token_tt) => token_tt,
1218 }
1219 }
1220}
1221
1222impl<'tt> PartialEq for TtHandle<'tt> {
1223 fn eq(&self, other: &TtHandle<'tt>) -> bool {
1224 self.get() == other.get()
1225 }
1226}
1227
1228impl<'tt> Clone for TtHandle<'tt> {
1229 fn clone(&self) -> Self {
1230 match self {
1231 TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
1232
1233 TtHandle::Token(mbe::TokenTree::Token(tok)) => {
1236 TtHandle::Token(mbe::TokenTree::Token(*tok))
1237 }
1238
1239 _ => unreachable!(),
1240 }
1241 }
1242}
1243
1244#[derive(Clone, Debug)]
1255struct TokenSet<'tt> {
1256 tokens: Vec<TtHandle<'tt>>,
1257 maybe_empty: bool,
1258}
1259
1260impl<'tt> TokenSet<'tt> {
1261 fn empty() -> Self {
1263 TokenSet { tokens: Vec::new(), maybe_empty: true }
1264 }
1265
1266 fn singleton(tt: TtHandle<'tt>) -> Self {
1269 TokenSet { tokens: vec![tt], maybe_empty: false }
1270 }
1271
1272 fn replace_with(&mut self, tt: TtHandle<'tt>) {
1275 self.tokens.clear();
1276 self.tokens.push(tt);
1277 self.maybe_empty = false;
1278 }
1279
1280 fn replace_with_irrelevant(&mut self) {
1284 self.tokens.clear();
1285 self.maybe_empty = false;
1286 }
1287
1288 fn add_one(&mut self, tt: TtHandle<'tt>) {
1290 if !self.tokens.contains(&tt) {
1291 self.tokens.push(tt);
1292 }
1293 self.maybe_empty = false;
1294 }
1295
1296 fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
1298 if !self.tokens.contains(&tt) {
1299 self.tokens.push(tt);
1300 }
1301 }
1302
1303 fn add_all(&mut self, other: &Self) {
1311 for tt in &other.tokens {
1312 if !self.tokens.contains(tt) {
1313 self.tokens.push(tt.clone());
1314 }
1315 }
1316 if !other.maybe_empty {
1317 self.maybe_empty = false;
1318 }
1319 }
1320}
1321
1322fn check_matcher_core<'tt>(
1334 sess: &Session,
1335 node_id: NodeId,
1336 first_sets: &FirstSets<'tt>,
1337 matcher: &'tt [mbe::TokenTree],
1338 follow: &TokenSet<'tt>,
1339) -> Result<TokenSet<'tt>, ErrorGuaranteed> {
1340 use mbe::TokenTree;
1341
1342 let mut last = TokenSet::empty();
1343
1344 let mut errored = Ok(());
1345
1346 'each_token: for i in 0..matcher.len() {
1350 let token = &matcher[i];
1351 let suffix = &matcher[i + 1..];
1352
1353 let build_suffix_first = || {
1354 let mut s = first_sets.first(suffix);
1355 if s.maybe_empty {
1356 s.add_all(follow);
1357 }
1358 s
1359 };
1360
1361 let suffix_first;
1365
1366 match token {
1369 TokenTree::Token(..)
1370 | TokenTree::MetaVar(..)
1371 | TokenTree::MetaVarDecl { .. }
1372 | TokenTree::MetaVarExpr(..) => {
1373 if token_can_be_followed_by_any(token) {
1374 last.replace_with_irrelevant();
1376 continue 'each_token;
1379 } else {
1380 last.replace_with(TtHandle::TtRef(token));
1381 suffix_first = build_suffix_first();
1382 }
1383 }
1384 TokenTree::Delimited(span, _, d) => {
1385 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
1386 d.delim.as_close_token_kind(),
1387 span.close,
1388 ));
1389 check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
1390 last.replace_with_irrelevant();
1392
1393 continue 'each_token;
1396 }
1397 TokenTree::Sequence(_, seq_rep) => {
1398 suffix_first = build_suffix_first();
1399 let mut new;
1410 let my_suffix = if let Some(sep) = &seq_rep.separator {
1411 new = suffix_first.clone();
1412 new.add_one_maybe(TtHandle::from_token(*sep));
1413 &new
1414 } else {
1415 &suffix_first
1416 };
1417
1418 let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
1422 if next.maybe_empty {
1423 last.add_all(&next);
1424 } else {
1425 last = next;
1426 }
1427
1428 continue 'each_token;
1431 }
1432 }
1433
1434 for tt in &last.tokens {
1439 if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
1440 for next_token in &suffix_first.tokens {
1441 let next_token = next_token.get();
1442
1443 if is_defined_in_current_crate(node_id)
1450 && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
1451 && matches!(
1452 next_token,
1453 TokenTree::Token(token) if *token == token::Or
1454 )
1455 {
1456 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1458 span,
1459 name,
1460 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1461 });
1462 sess.psess.buffer_lint(
1463 RUST_2021_INCOMPATIBLE_OR_PATTERNS,
1464 span,
1465 ast::CRATE_NODE_ID,
1466 errors::OrPatternsBackCompat { span, suggestion },
1467 );
1468 }
1469 match is_in_follow(next_token, kind) {
1470 IsInFollow::Yes => {}
1471 IsInFollow::No(possible) => {
1472 let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
1473 {
1474 "is"
1475 } else {
1476 "may be"
1477 };
1478
1479 let sp = next_token.span();
1480 let mut err = sess.dcx().struct_span_err(
1481 sp,
1482 format!(
1483 "`${name}:{frag}` {may_be} followed by `{next}`, which \
1484 is not allowed for `{frag}` fragments",
1485 name = name,
1486 frag = kind,
1487 next = quoted_tt_to_string(next_token),
1488 may_be = may_be
1489 ),
1490 );
1491 err.span_label(sp, format!("not allowed after `{kind}` fragments"));
1492
1493 if kind == NonterminalKind::Pat(PatWithOr)
1494 && sess.psess.edition.at_least_rust_2021()
1495 && next_token.is_token(&token::Or)
1496 {
1497 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1498 span,
1499 name,
1500 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1501 });
1502 err.span_suggestion(
1503 span,
1504 "try a `pat_param` fragment specifier instead",
1505 suggestion,
1506 Applicability::MaybeIncorrect,
1507 );
1508 }
1509
1510 let msg = "allowed there are: ";
1511 match possible {
1512 &[] => {}
1513 &[t] => {
1514 err.note(format!(
1515 "only {t} is allowed after `{kind}` fragments",
1516 ));
1517 }
1518 ts => {
1519 err.note(format!(
1520 "{}{} or {}",
1521 msg,
1522 ts[..ts.len() - 1].to_vec().join(", "),
1523 ts[ts.len() - 1],
1524 ));
1525 }
1526 }
1527 errored = Err(err.emit());
1528 }
1529 }
1530 }
1531 }
1532 }
1533 }
1534 errored?;
1535 Ok(last)
1536}
1537
1538fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
1539 if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
1540 frag_can_be_followed_by_any(kind)
1541 } else {
1542 true
1544 }
1545}
1546
1547fn frag_can_be_followed_by_any(kind: NonterminalKind) -> bool {
1556 matches!(
1557 kind,
1558 NonterminalKind::Item | NonterminalKind::Block | NonterminalKind::Ident | NonterminalKind::Literal | NonterminalKind::Meta | NonterminalKind::Lifetime | NonterminalKind::TT )
1566}
1567
1568enum IsInFollow {
1569 Yes,
1570 No(&'static [&'static str]),
1571}
1572
1573fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
1582 use mbe::TokenTree;
1583
1584 if let TokenTree::Token(Token { kind, .. }) = tok
1585 && kind.close_delim().is_some()
1586 {
1587 IsInFollow::Yes
1590 } else {
1591 match kind {
1592 NonterminalKind::Item => {
1593 IsInFollow::Yes
1596 }
1597 NonterminalKind::Block => {
1598 IsInFollow::Yes
1601 }
1602 NonterminalKind::Stmt | NonterminalKind::Expr(_) => {
1603 const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
1604 match tok {
1605 TokenTree::Token(token) => match token.kind {
1606 FatArrow | Comma | Semi => IsInFollow::Yes,
1607 _ => IsInFollow::No(TOKENS),
1608 },
1609 _ => IsInFollow::No(TOKENS),
1610 }
1611 }
1612 NonterminalKind::Pat(PatParam { .. }) => {
1613 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
1614 match tok {
1615 TokenTree::Token(token) => match token.kind {
1616 FatArrow | Comma | Eq | Or => IsInFollow::Yes,
1617 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1618 IsInFollow::Yes
1619 }
1620 _ => IsInFollow::No(TOKENS),
1621 },
1622 _ => IsInFollow::No(TOKENS),
1623 }
1624 }
1625 NonterminalKind::Pat(PatWithOr) => {
1626 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
1627 match tok {
1628 TokenTree::Token(token) => match token.kind {
1629 FatArrow | Comma | Eq => IsInFollow::Yes,
1630 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1631 IsInFollow::Yes
1632 }
1633 _ => IsInFollow::No(TOKENS),
1634 },
1635 _ => IsInFollow::No(TOKENS),
1636 }
1637 }
1638 NonterminalKind::Path | NonterminalKind::Ty => {
1639 const TOKENS: &[&str] = &[
1640 "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
1641 "`where`",
1642 ];
1643 match tok {
1644 TokenTree::Token(token) => match token.kind {
1645 OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
1646 | Semi | Or => IsInFollow::Yes,
1647 Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
1648 IsInFollow::Yes
1649 }
1650 _ => IsInFollow::No(TOKENS),
1651 },
1652 TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
1653 _ => IsInFollow::No(TOKENS),
1654 }
1655 }
1656 NonterminalKind::Ident | NonterminalKind::Lifetime => {
1657 IsInFollow::Yes
1659 }
1660 NonterminalKind::Literal => {
1661 IsInFollow::Yes
1663 }
1664 NonterminalKind::Meta | NonterminalKind::TT => {
1665 IsInFollow::Yes
1668 }
1669 NonterminalKind::Vis => {
1670 const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
1672 match tok {
1673 TokenTree::Token(token) => match token.kind {
1674 Comma => IsInFollow::Yes,
1675 Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
1676 Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
1677 _ => {
1678 if token.can_begin_type() {
1679 IsInFollow::Yes
1680 } else {
1681 IsInFollow::No(TOKENS)
1682 }
1683 }
1684 },
1685 TokenTree::MetaVarDecl {
1686 kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
1687 ..
1688 } => IsInFollow::Yes,
1689 _ => IsInFollow::No(TOKENS),
1690 }
1691 }
1692 }
1693 }
1694}
1695
1696fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
1697 match tt {
1698 mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
1699 mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
1700 mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
1701 _ => panic!(
1702 "{}",
1703 "unexpected mbe::TokenTree::{Sequence or Delimited} \
1704 in follow set checker"
1705 ),
1706 }
1707}
1708
1709fn is_defined_in_current_crate(node_id: NodeId) -> bool {
1710 node_id != DUMMY_NODE_ID
1713}
1714
1715pub(super) fn parser_from_cx(
1716 psess: &ParseSess,
1717 mut tts: TokenStream,
1718 recovery: Recovery,
1719) -> Parser<'_> {
1720 tts.desugar_doc_comments();
1721 Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
1722}