1use std::borrow::Cow;
2use std::collections::hash_map::Entry;
3use std::sync::Arc;
4use std::{mem, slice};
5
6use ast::token::IdentIsRaw;
7use rustc_ast::token::NtPatKind::*;
8use rustc_ast::token::TokenKind::*;
9use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind};
10use rustc_ast::tokenstream::{self, DelimSpan, TokenStream};
11use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId, Safety};
12use rustc_ast_pretty::pprust;
13use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
14use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan};
15use rustc_feature::Features;
16use rustc_hir as hir;
17use rustc_hir::attrs::AttributeKind;
18use rustc_hir::def::MacroKinds;
19use rustc_hir::find_attr;
20use rustc_lint_defs::builtin::{
21 RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
22};
23use rustc_parse::exp;
24use rustc_parse::parser::{Parser, Recovery};
25use rustc_session::Session;
26use rustc_session::parse::{ParseSess, feature_err};
27use rustc_span::edition::Edition;
28use rustc_span::hygiene::Transparency;
29use rustc_span::{Ident, Span, Symbol, kw, sym};
30use tracing::{debug, instrument, trace, trace_span};
31
32use super::diagnostics::{FailedMacro, failed_to_match_macro};
33use super::macro_parser::{NamedMatches, NamedParseResult};
34use super::{SequenceRepetition, diagnostics};
35use crate::base::{
36 AttrProcMacro, BangProcMacro, DummyResult, ExpandResult, ExtCtxt, MacResult,
37 MacroExpanderResult, SyntaxExtension, SyntaxExtensionKind, TTMacroExpander,
38};
39use crate::errors;
40use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
41use crate::mbe::macro_check::check_meta_variables;
42use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
43use crate::mbe::quoted::{RulePart, parse_one_tt};
44use crate::mbe::transcribe::transcribe;
45use crate::mbe::{self, KleeneOp};
46
47pub(crate) struct ParserAnyMacro<'a> {
48 parser: Parser<'a>,
49
50 site_span: Span,
52 macro_ident: Ident,
54 lint_node_id: NodeId,
55 is_trailing_mac: bool,
56 arm_span: Span,
57 is_local: bool,
59}
60
61impl<'a> ParserAnyMacro<'a> {
62 pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
63 let ParserAnyMacro {
64 site_span,
65 macro_ident,
66 ref mut parser,
67 lint_node_id,
68 arm_span,
69 is_trailing_mac,
70 is_local,
71 } = *self;
72 let snapshot = &mut parser.create_snapshot_for_diagnostic();
73 let fragment = match parse_ast_fragment(parser, kind) {
74 Ok(f) => f,
75 Err(err) => {
76 let guar = diagnostics::emit_frag_parse_err(
77 err, parser, snapshot, site_span, arm_span, kind,
78 );
79 return kind.dummy(site_span, guar);
80 }
81 };
82
83 if kind == AstFragmentKind::Expr && parser.token == token::Semi {
87 if is_local {
88 parser.psess.buffer_lint(
89 SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
90 parser.token.span,
91 lint_node_id,
92 errors::TrailingMacro { is_trailing: is_trailing_mac, name: macro_ident },
93 );
94 }
95 parser.bump();
96 }
97
98 let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
100 ensure_complete_parse(parser, &path, kind.name(), site_span);
101 fragment
102 }
103
104 #[instrument(skip(cx, tts))]
105 pub(crate) fn from_tts<'cx>(
106 cx: &'cx mut ExtCtxt<'a>,
107 tts: TokenStream,
108 site_span: Span,
109 arm_span: Span,
110 is_local: bool,
111 macro_ident: Ident,
112 ) -> Self {
113 Self {
114 parser: Parser::new(&cx.sess.psess, tts, None),
115
116 site_span,
120 macro_ident,
121 lint_node_id: cx.current_expansion.lint_node_id,
122 is_trailing_mac: cx.current_expansion.is_trailing_mac,
123 arm_span,
124 is_local,
125 }
126 }
127}
128
129pub(super) enum MacroRule {
130 Func { lhs: Vec<MatcherLoc>, lhs_span: Span, rhs: mbe::TokenTree },
132 Attr {
134 unsafe_rule: bool,
135 args: Vec<MatcherLoc>,
136 args_span: Span,
137 body: Vec<MatcherLoc>,
138 body_span: Span,
139 rhs: mbe::TokenTree,
140 },
141 Derive { body: Vec<MatcherLoc>, body_span: Span, rhs: mbe::TokenTree },
143}
144
145pub struct MacroRulesMacroExpander {
146 node_id: NodeId,
147 name: Ident,
148 span: Span,
149 transparency: Transparency,
150 kinds: MacroKinds,
151 rules: Vec<MacroRule>,
152}
153
154impl MacroRulesMacroExpander {
155 pub fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, MultiSpan)> {
156 let (span, rhs) = match self.rules[rule_i] {
158 MacroRule::Func { lhs_span, ref rhs, .. } => (MultiSpan::from_span(lhs_span), rhs),
159 MacroRule::Attr { args_span, body_span, ref rhs, .. } => {
160 (MultiSpan::from_spans(vec![args_span, body_span]), rhs)
161 }
162 MacroRule::Derive { body_span, ref rhs, .. } => (MultiSpan::from_span(body_span), rhs),
163 };
164 if has_compile_error_macro(rhs) { None } else { Some((&self.name, span)) }
165 }
166
167 pub fn kinds(&self) -> MacroKinds {
168 self.kinds
169 }
170
171 pub fn expand_derive(
172 &self,
173 cx: &mut ExtCtxt<'_>,
174 sp: Span,
175 body: &TokenStream,
176 ) -> Result<TokenStream, ErrorGuaranteed> {
177 let Self { name, ref rules, node_id, .. } = *self;
180 let psess = &cx.sess.psess;
181
182 if cx.trace_macros() {
183 let msg = format!("expanding `#[derive({name})] {}`", pprust::tts_to_string(body));
184 trace_macros_note(&mut cx.expansions, sp, msg);
185 }
186
187 match try_match_macro_derive(psess, name, body, rules, &mut NoopTracker) {
188 Ok((rule_index, rule, named_matches)) => {
189 let MacroRule::Derive { rhs, .. } = rule else {
190 panic!("try_match_macro_derive returned non-derive rule");
191 };
192 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
193 cx.dcx().span_bug(sp, "malformed macro derive rhs");
194 };
195
196 let id = cx.current_expansion.id;
197 let tts = transcribe(psess, &named_matches, rhs, *rhs_span, self.transparency, id)
198 .map_err(|e| e.emit())?;
199
200 if cx.trace_macros() {
201 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
202 trace_macros_note(&mut cx.expansions, sp, msg);
203 }
204
205 if is_defined_in_current_crate(node_id) {
206 cx.resolver.record_macro_rule_usage(node_id, rule_index);
207 }
208
209 Ok(tts)
210 }
211 Err(CanRetry::No(guar)) => Err(guar),
212 Err(CanRetry::Yes) => {
213 let (_, guar) = failed_to_match_macro(
214 cx.psess(),
215 sp,
216 self.span,
217 name,
218 FailedMacro::Derive,
219 body,
220 rules,
221 );
222 cx.macro_error_and_trace_macros_diag();
223 Err(guar)
224 }
225 }
226 }
227}
228
229impl TTMacroExpander for MacroRulesMacroExpander {
230 fn expand<'cx>(
231 &self,
232 cx: &'cx mut ExtCtxt<'_>,
233 sp: Span,
234 input: TokenStream,
235 ) -> MacroExpanderResult<'cx> {
236 ExpandResult::Ready(expand_macro(
237 cx,
238 sp,
239 self.span,
240 self.node_id,
241 self.name,
242 self.transparency,
243 input,
244 &self.rules,
245 ))
246 }
247}
248
249impl AttrProcMacro for MacroRulesMacroExpander {
250 fn expand(
251 &self,
252 _cx: &mut ExtCtxt<'_>,
253 _sp: Span,
254 _args: TokenStream,
255 _body: TokenStream,
256 ) -> Result<TokenStream, ErrorGuaranteed> {
257 unreachable!("`expand` called on `MacroRulesMacroExpander`, expected `expand_with_safety`")
258 }
259
260 fn expand_with_safety(
261 &self,
262 cx: &mut ExtCtxt<'_>,
263 safety: Safety,
264 sp: Span,
265 args: TokenStream,
266 body: TokenStream,
267 ) -> Result<TokenStream, ErrorGuaranteed> {
268 expand_macro_attr(
269 cx,
270 sp,
271 self.span,
272 self.node_id,
273 self.name,
274 self.transparency,
275 safety,
276 args,
277 body,
278 &self.rules,
279 )
280 }
281}
282
283struct DummyBang(ErrorGuaranteed);
284
285impl BangProcMacro for DummyBang {
286 fn expand<'cx>(
287 &self,
288 _: &'cx mut ExtCtxt<'_>,
289 _: Span,
290 _: TokenStream,
291 ) -> Result<TokenStream, ErrorGuaranteed> {
292 Err(self.0)
293 }
294}
295
296fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
297 let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
298 cx_expansions.entry(sp).or_default().push(message);
299}
300
301pub(super) trait Tracker<'matcher> {
302 type Failure;
304
305 fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
309
310 fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
312
313 fn after_arm(&mut self, _in_body: bool, _result: &NamedParseResult<Self::Failure>) {}
316
317 fn description() -> &'static str;
319
320 fn recovery() -> Recovery {
321 Recovery::Forbidden
322 }
323}
324
325pub(super) struct NoopTracker;
328
329impl<'matcher> Tracker<'matcher> for NoopTracker {
330 type Failure = ();
331
332 fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
333
334 fn description() -> &'static str {
335 "none"
336 }
337}
338
339#[instrument(skip(cx, transparency, arg, rules))]
341fn expand_macro<'cx>(
342 cx: &'cx mut ExtCtxt<'_>,
343 sp: Span,
344 def_span: Span,
345 node_id: NodeId,
346 name: Ident,
347 transparency: Transparency,
348 arg: TokenStream,
349 rules: &[MacroRule],
350) -> Box<dyn MacResult + 'cx> {
351 let psess = &cx.sess.psess;
352
353 if cx.trace_macros() {
354 let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(&arg));
355 trace_macros_note(&mut cx.expansions, sp, msg);
356 }
357
358 let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
360
361 match try_success_result {
362 Ok((rule_index, rule, named_matches)) => {
363 let MacroRule::Func { rhs, .. } = rule else {
364 panic!("try_match_macro returned non-func rule");
365 };
366 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
367 cx.dcx().span_bug(sp, "malformed macro rhs");
368 };
369 let arm_span = rhs_span.entire();
370
371 let id = cx.current_expansion.id;
373 let tts = match transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id) {
374 Ok(tts) => tts,
375 Err(err) => {
376 let guar = err.emit();
377 return DummyResult::any(arm_span, guar);
378 }
379 };
380
381 if cx.trace_macros() {
382 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
383 trace_macros_note(&mut cx.expansions, sp, msg);
384 }
385
386 let is_local = is_defined_in_current_crate(node_id);
387 if is_local {
388 cx.resolver.record_macro_rule_usage(node_id, rule_index);
389 }
390
391 Box::new(ParserAnyMacro::from_tts(cx, tts, sp, arm_span, is_local, name))
393 }
394 Err(CanRetry::No(guar)) => {
395 debug!("Will not retry matching as an error was emitted already");
396 DummyResult::any(sp, guar)
397 }
398 Err(CanRetry::Yes) => {
399 let (span, guar) = failed_to_match_macro(
401 cx.psess(),
402 sp,
403 def_span,
404 name,
405 FailedMacro::Func,
406 &arg,
407 rules,
408 );
409 cx.macro_error_and_trace_macros_diag();
410 DummyResult::any(span, guar)
411 }
412 }
413}
414
415#[instrument(skip(cx, transparency, args, body, rules))]
417fn expand_macro_attr(
418 cx: &mut ExtCtxt<'_>,
419 sp: Span,
420 def_span: Span,
421 node_id: NodeId,
422 name: Ident,
423 transparency: Transparency,
424 safety: Safety,
425 args: TokenStream,
426 body: TokenStream,
427 rules: &[MacroRule],
428) -> Result<TokenStream, ErrorGuaranteed> {
429 let psess = &cx.sess.psess;
430 let is_local = node_id != DUMMY_NODE_ID;
433
434 if cx.trace_macros() {
435 let msg = format!(
436 "expanding `#[{name}({})] {}`",
437 pprust::tts_to_string(&args),
438 pprust::tts_to_string(&body),
439 );
440 trace_macros_note(&mut cx.expansions, sp, msg);
441 }
442
443 match try_match_macro_attr(psess, name, &args, &body, rules, &mut NoopTracker) {
445 Ok((i, rule, named_matches)) => {
446 let MacroRule::Attr { rhs, unsafe_rule, .. } = rule else {
447 panic!("try_macro_match_attr returned non-attr rule");
448 };
449 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
450 cx.dcx().span_bug(sp, "malformed macro rhs");
451 };
452
453 match (safety, unsafe_rule) {
454 (Safety::Default, false) | (Safety::Unsafe(_), true) => {}
455 (Safety::Default, true) => {
456 cx.dcx().span_err(sp, "unsafe attribute invocation requires `unsafe`");
457 }
458 (Safety::Unsafe(span), false) => {
459 cx.dcx().span_err(span, "unnecessary `unsafe` on safe attribute invocation");
460 }
461 (Safety::Safe(span), _) => {
462 cx.dcx().span_bug(span, "unexpected `safe` keyword");
463 }
464 }
465
466 let id = cx.current_expansion.id;
467 let tts = transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id)
468 .map_err(|e| e.emit())?;
469
470 if cx.trace_macros() {
471 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
472 trace_macros_note(&mut cx.expansions, sp, msg);
473 }
474
475 if is_local {
476 cx.resolver.record_macro_rule_usage(node_id, i);
477 }
478
479 Ok(tts)
480 }
481 Err(CanRetry::No(guar)) => Err(guar),
482 Err(CanRetry::Yes) => {
483 let (_, guar) = failed_to_match_macro(
485 cx.psess(),
486 sp,
487 def_span,
488 name,
489 FailedMacro::Attr(&args),
490 &body,
491 rules,
492 );
493 cx.trace_macros_diag();
494 Err(guar)
495 }
496 }
497}
498
499pub(super) enum CanRetry {
500 Yes,
501 No(ErrorGuaranteed),
503}
504
505#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
509pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
510 psess: &ParseSess,
511 name: Ident,
512 arg: &TokenStream,
513 rules: &'matcher [MacroRule],
514 track: &mut T,
515) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
516 let parser = parser_from_cx(psess, arg.clone(), T::recovery());
536 let mut tt_parser = TtParser::new(name);
538 for (i, rule) in rules.iter().enumerate() {
539 let MacroRule::Func { lhs, .. } = rule else { continue };
540 let _tracing_span = trace_span!("Matching arm", %i);
541
542 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
547
548 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track);
549
550 track.after_arm(true, &result);
551
552 match result {
553 Success(named_matches) => {
554 debug!("Parsed arm successfully");
555 psess.gated_spans.merge(gated_spans_snapshot);
558
559 return Ok((i, rule, named_matches));
560 }
561 Failure(_) => {
562 trace!("Failed to match arm, trying the next one");
563 }
565 Error(_, _) => {
566 debug!("Fatal error occurred during matching");
567 return Err(CanRetry::Yes);
569 }
570 ErrorReported(guarantee) => {
571 debug!("Fatal error occurred and was reported during matching");
572 return Err(CanRetry::No(guarantee));
574 }
575 }
576
577 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
580 }
581
582 Err(CanRetry::Yes)
583}
584
585#[instrument(level = "debug", skip(psess, attr_args, attr_body, rules, track), fields(tracking = %T::description()))]
589pub(super) fn try_match_macro_attr<'matcher, T: Tracker<'matcher>>(
590 psess: &ParseSess,
591 name: Ident,
592 attr_args: &TokenStream,
593 attr_body: &TokenStream,
594 rules: &'matcher [MacroRule],
595 track: &mut T,
596) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
597 let args_parser = parser_from_cx(psess, attr_args.clone(), T::recovery());
599 let body_parser = parser_from_cx(psess, attr_body.clone(), T::recovery());
600 let mut tt_parser = TtParser::new(name);
601 for (i, rule) in rules.iter().enumerate() {
602 let MacroRule::Attr { args, body, .. } = rule else { continue };
603
604 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
605
606 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&args_parser), args, track);
607 track.after_arm(false, &result);
608
609 let mut named_matches = match result {
610 Success(named_matches) => named_matches,
611 Failure(_) => {
612 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
613 continue;
614 }
615 Error(_, _) => return Err(CanRetry::Yes),
616 ErrorReported(guar) => return Err(CanRetry::No(guar)),
617 };
618
619 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track);
620 track.after_arm(true, &result);
621
622 match result {
623 Success(body_named_matches) => {
624 psess.gated_spans.merge(gated_spans_snapshot);
625 #[allow(rustc::potential_query_instability)]
626 named_matches.extend(body_named_matches);
627 return Ok((i, rule, named_matches));
628 }
629 Failure(_) => {
630 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut())
631 }
632 Error(_, _) => return Err(CanRetry::Yes),
633 ErrorReported(guar) => return Err(CanRetry::No(guar)),
634 }
635 }
636
637 Err(CanRetry::Yes)
638}
639
640#[instrument(level = "debug", skip(psess, body, rules, track), fields(tracking = %T::description()))]
644pub(super) fn try_match_macro_derive<'matcher, T: Tracker<'matcher>>(
645 psess: &ParseSess,
646 name: Ident,
647 body: &TokenStream,
648 rules: &'matcher [MacroRule],
649 track: &mut T,
650) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
651 let body_parser = parser_from_cx(psess, body.clone(), T::recovery());
653 let mut tt_parser = TtParser::new(name);
654 for (i, rule) in rules.iter().enumerate() {
655 let MacroRule::Derive { body, .. } = rule else { continue };
656
657 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
658
659 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track);
660 track.after_arm(true, &result);
661
662 match result {
663 Success(named_matches) => {
664 psess.gated_spans.merge(gated_spans_snapshot);
665 return Ok((i, rule, named_matches));
666 }
667 Failure(_) => {
668 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut())
669 }
670 Error(_, _) => return Err(CanRetry::Yes),
671 ErrorReported(guar) => return Err(CanRetry::No(guar)),
672 }
673 }
674
675 Err(CanRetry::Yes)
676}
677
678pub fn compile_declarative_macro(
680 sess: &Session,
681 features: &Features,
682 macro_def: &ast::MacroDef,
683 ident: Ident,
684 attrs: &[hir::Attribute],
685 span: Span,
686 node_id: NodeId,
687 edition: Edition,
688) -> (SyntaxExtension, usize) {
689 let mk_syn_ext = |kind| {
690 let is_local = is_defined_in_current_crate(node_id);
691 SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local)
692 };
693 let dummy_syn_ext =
694 |guar| (mk_syn_ext(SyntaxExtensionKind::Bang(Arc::new(DummyBang(guar)))), 0);
695
696 let macro_rules = macro_def.macro_rules;
697 let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
698
699 let body = macro_def.body.tokens.clone();
700 let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
701
702 let mut guar = None;
705 let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
706
707 let mut kinds = MacroKinds::empty();
708 let mut rules = Vec::new();
709
710 while p.token != token::Eof {
711 let unsafe_rule = p.eat_keyword_noexpect(kw::Unsafe);
712 let unsafe_keyword_span = p.prev_token.span;
713 if unsafe_rule && let Some(guar) = check_no_eof(sess, &p, "expected `attr`") {
714 return dummy_syn_ext(guar);
715 }
716 let (args, is_derive) = if p.eat_keyword_noexpect(sym::attr) {
717 kinds |= MacroKinds::ATTR;
718 if !features.macro_attr() {
719 feature_err(sess, sym::macro_attr, span, "`macro_rules!` attributes are unstable")
720 .emit();
721 }
722 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr args") {
723 return dummy_syn_ext(guar);
724 }
725 let args = p.parse_token_tree();
726 check_args_parens(sess, sym::attr, &args);
727 let args = parse_one_tt(args, RulePart::Pattern, sess, node_id, features, edition);
728 check_emission(check_lhs(sess, node_id, &args));
729 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr body") {
730 return dummy_syn_ext(guar);
731 }
732 (Some(args), false)
733 } else if p.eat_keyword_noexpect(sym::derive) {
734 kinds |= MacroKinds::DERIVE;
735 let derive_keyword_span = p.prev_token.span;
736 if !features.macro_derive() {
737 feature_err(sess, sym::macro_derive, span, "`macro_rules!` derives are unstable")
738 .emit();
739 }
740 if unsafe_rule {
741 sess.dcx()
742 .span_err(unsafe_keyword_span, "`unsafe` is only supported on `attr` rules");
743 }
744 if let Some(guar) = check_no_eof(sess, &p, "expected `()` after `derive`") {
745 return dummy_syn_ext(guar);
746 }
747 let args = p.parse_token_tree();
748 check_args_parens(sess, sym::derive, &args);
749 let args_empty_result = check_args_empty(sess, &args);
750 let args_not_empty = args_empty_result.is_err();
751 check_emission(args_empty_result);
752 if let Some(guar) = check_no_eof(sess, &p, "expected macro derive body") {
753 return dummy_syn_ext(guar);
754 }
755 if p.token == token::FatArrow {
758 let mut err = sess
759 .dcx()
760 .struct_span_err(p.token.span, "expected macro derive body, got `=>`");
761 if args_not_empty {
762 err.span_label(derive_keyword_span, "need `()` after this `derive`");
763 }
764 return dummy_syn_ext(err.emit());
765 }
766 (None, true)
767 } else {
768 kinds |= MacroKinds::BANG;
769 if unsafe_rule {
770 sess.dcx()
771 .span_err(unsafe_keyword_span, "`unsafe` is only supported on `attr` rules");
772 }
773 (None, false)
774 };
775 let lhs_tt = p.parse_token_tree();
776 let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
777 check_emission(check_lhs(sess, node_id, &lhs_tt));
778 if let Err(e) = p.expect(exp!(FatArrow)) {
779 return dummy_syn_ext(e.emit());
780 }
781 if let Some(guar) = check_no_eof(sess, &p, "expected right-hand side of macro rule") {
782 return dummy_syn_ext(guar);
783 }
784 let rhs = p.parse_token_tree();
785 let rhs = parse_one_tt(rhs, RulePart::Body, sess, node_id, features, edition);
786 check_emission(check_rhs(sess, &rhs));
787 check_emission(check_meta_variables(&sess.psess, node_id, args.as_ref(), &lhs_tt, &rhs));
788 let lhs_span = lhs_tt.span();
789 let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
792 mbe::macro_parser::compute_locs(&delimited.tts)
793 } else {
794 return dummy_syn_ext(guar.unwrap());
795 };
796 if let Some(args) = args {
797 let args_span = args.span();
798 let mbe::TokenTree::Delimited(.., delimited) = args else {
799 return dummy_syn_ext(guar.unwrap());
800 };
801 let args = mbe::macro_parser::compute_locs(&delimited.tts);
802 let body_span = lhs_span;
803 rules.push(MacroRule::Attr { unsafe_rule, args, args_span, body: lhs, body_span, rhs });
804 } else if is_derive {
805 rules.push(MacroRule::Derive { body: lhs, body_span: lhs_span, rhs });
806 } else {
807 rules.push(MacroRule::Func { lhs, lhs_span, rhs });
808 }
809 if p.token == token::Eof {
810 break;
811 }
812 if let Err(e) = p.expect(exp_sep) {
813 return dummy_syn_ext(e.emit());
814 }
815 }
816
817 if rules.is_empty() {
818 let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
819 return dummy_syn_ext(guar);
820 }
821 assert!(!kinds.is_empty());
822
823 let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
824 .unwrap_or(Transparency::fallback(macro_rules));
825
826 if let Some(guar) = guar {
827 return dummy_syn_ext(guar);
830 }
831
832 let nrules = if is_defined_in_current_crate(node_id) { rules.len() } else { 0 };
834
835 let exp = MacroRulesMacroExpander { name: ident, kinds, span, node_id, transparency, rules };
836 (mk_syn_ext(SyntaxExtensionKind::MacroRules(Arc::new(exp))), nrules)
837}
838
839fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> {
840 if p.token == token::Eof {
841 let err_sp = p.token.span.shrink_to_hi();
842 let guar = sess
843 .dcx()
844 .struct_span_err(err_sp, "macro definition ended unexpectedly")
845 .with_span_label(err_sp, msg)
846 .emit();
847 return Some(guar);
848 }
849 None
850}
851
852fn check_args_parens(sess: &Session, rule_kw: Symbol, args: &tokenstream::TokenTree) {
853 if let tokenstream::TokenTree::Delimited(dspan, _, delim, _) = args
855 && *delim != Delimiter::Parenthesis
856 {
857 sess.dcx().emit_err(errors::MacroArgsBadDelim {
858 span: dspan.entire(),
859 sugg: errors::MacroArgsBadDelimSugg { open: dspan.open, close: dspan.close },
860 rule_kw,
861 });
862 }
863}
864
865fn check_args_empty(sess: &Session, args: &tokenstream::TokenTree) -> Result<(), ErrorGuaranteed> {
866 match args {
867 tokenstream::TokenTree::Delimited(.., delimited) if delimited.is_empty() => Ok(()),
868 _ => {
869 let msg = "`derive` rules do not accept arguments; `derive` must be followed by `()`";
870 Err(sess.dcx().span_err(args.span(), msg))
871 }
872 }
873}
874
875fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
876 let e1 = check_lhs_nt_follows(sess, node_id, lhs);
877 let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
878 e1.and(e2)
879}
880
881fn check_lhs_nt_follows(
882 sess: &Session,
883 node_id: NodeId,
884 lhs: &mbe::TokenTree,
885) -> Result<(), ErrorGuaranteed> {
886 if let mbe::TokenTree::Delimited(.., delimited) = lhs {
889 check_matcher(sess, node_id, &delimited.tts)
890 } else {
891 let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
892 Err(sess.dcx().span_err(lhs.span(), msg))
893 }
894}
895
896fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
897 if seq.separator.is_some() {
898 false
899 } else {
900 let mut is_empty = true;
901 let mut iter = seq.tts.iter().peekable();
902 while let Some(tt) = iter.next() {
903 match tt {
904 mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
905 mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
906 let mut now = t;
907 while let Some(&mbe::TokenTree::Token(
908 next @ Token { kind: DocComment(..), .. },
909 )) = iter.peek()
910 {
911 now = next;
912 iter.next();
913 }
914 let span = t.span.to(now.span);
915 sess.dcx().span_note(span, "doc comments are ignored in matcher position");
916 }
917 mbe::TokenTree::Sequence(_, sub_seq)
918 if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
919 || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne) => {}
920 _ => is_empty = false,
921 }
922 }
923 is_empty
924 }
925}
926
927fn check_redundant_vis_repetition(
932 err: &mut Diag<'_>,
933 sess: &Session,
934 seq: &SequenceRepetition,
935 span: &DelimSpan,
936) {
937 if seq.kleene.op == KleeneOp::ZeroOrOne
938 && matches!(
939 seq.tts.first(),
940 Some(mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
941 )
942 {
943 err.note("a `vis` fragment can already be empty");
944 err.multipart_suggestion(
945 "remove the `$(` and `)?`",
946 vec![
947 (
948 sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
949 "".to_string(),
950 ),
951 (span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
952 ],
953 Applicability::MaybeIncorrect,
954 );
955 }
956}
957
958fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
961 use mbe::TokenTree;
962 for tt in tts {
963 match tt {
964 TokenTree::Token(..)
965 | TokenTree::MetaVar(..)
966 | TokenTree::MetaVarDecl { .. }
967 | TokenTree::MetaVarExpr(..) => (),
968 TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
969 TokenTree::Sequence(span, seq) => {
970 if is_empty_token_tree(sess, seq) {
971 let sp = span.entire();
972 let mut err =
973 sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
974 check_redundant_vis_repetition(&mut err, sess, seq, span);
975 return Err(err.emit());
976 }
977 check_lhs_no_empty_seq(sess, &seq.tts)?
978 }
979 }
980 }
981
982 Ok(())
983}
984
985fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
986 match *rhs {
987 mbe::TokenTree::Delimited(..) => Ok(()),
988 _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
989 }
990}
991
992fn check_matcher(
993 sess: &Session,
994 node_id: NodeId,
995 matcher: &[mbe::TokenTree],
996) -> Result<(), ErrorGuaranteed> {
997 let first_sets = FirstSets::new(matcher);
998 let empty_suffix = TokenSet::empty();
999 check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
1000 Ok(())
1001}
1002
1003fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
1004 match rhs {
1005 mbe::TokenTree::Delimited(.., d) => {
1006 let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
1007 if let mbe::TokenTree::Token(ident) = ident
1008 && let TokenKind::Ident(ident, _) = ident.kind
1009 && ident == sym::compile_error
1010 && let mbe::TokenTree::Token(bang) = bang
1011 && let TokenKind::Bang = bang.kind
1012 && let mbe::TokenTree::Delimited(.., del) = args
1013 && !del.delim.skip()
1014 {
1015 true
1016 } else {
1017 false
1018 }
1019 });
1020 if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) }
1021 }
1022 _ => false,
1023 }
1024}
1025
1026struct FirstSets<'tt> {
1039 first: FxHashMap<Span, Option<TokenSet<'tt>>>,
1046}
1047
1048impl<'tt> FirstSets<'tt> {
1049 fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
1050 use mbe::TokenTree;
1051
1052 let mut sets = FirstSets { first: FxHashMap::default() };
1053 build_recur(&mut sets, tts);
1054 return sets;
1055
1056 fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
1060 let mut first = TokenSet::empty();
1061 for tt in tts.iter().rev() {
1062 match tt {
1063 TokenTree::Token(..)
1064 | TokenTree::MetaVar(..)
1065 | TokenTree::MetaVarDecl { .. }
1066 | TokenTree::MetaVarExpr(..) => {
1067 first.replace_with(TtHandle::TtRef(tt));
1068 }
1069 TokenTree::Delimited(span, _, delimited) => {
1070 build_recur(sets, &delimited.tts);
1071 first.replace_with(TtHandle::from_token_kind(
1072 delimited.delim.as_open_token_kind(),
1073 span.open,
1074 ));
1075 }
1076 TokenTree::Sequence(sp, seq_rep) => {
1077 let subfirst = build_recur(sets, &seq_rep.tts);
1078
1079 match sets.first.entry(sp.entire()) {
1080 Entry::Vacant(vac) => {
1081 vac.insert(Some(subfirst.clone()));
1082 }
1083 Entry::Occupied(mut occ) => {
1084 occ.insert(None);
1091 }
1092 }
1093
1094 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
1098 first.add_one_maybe(TtHandle::from_token(*sep));
1099 }
1100
1101 if subfirst.maybe_empty
1103 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
1104 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
1105 {
1106 first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
1109 } else {
1110 first = subfirst;
1113 }
1114 }
1115 }
1116 }
1117
1118 first
1119 }
1120 }
1121
1122 fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
1125 use mbe::TokenTree;
1126
1127 let mut first = TokenSet::empty();
1128 for tt in tts.iter() {
1129 assert!(first.maybe_empty);
1130 match tt {
1131 TokenTree::Token(..)
1132 | TokenTree::MetaVar(..)
1133 | TokenTree::MetaVarDecl { .. }
1134 | TokenTree::MetaVarExpr(..) => {
1135 first.add_one(TtHandle::TtRef(tt));
1136 return first;
1137 }
1138 TokenTree::Delimited(span, _, delimited) => {
1139 first.add_one(TtHandle::from_token_kind(
1140 delimited.delim.as_open_token_kind(),
1141 span.open,
1142 ));
1143 return first;
1144 }
1145 TokenTree::Sequence(sp, seq_rep) => {
1146 let subfirst_owned;
1147 let subfirst = match self.first.get(&sp.entire()) {
1148 Some(Some(subfirst)) => subfirst,
1149 Some(&None) => {
1150 subfirst_owned = self.first(&seq_rep.tts);
1151 &subfirst_owned
1152 }
1153 None => {
1154 panic!("We missed a sequence during FirstSets construction");
1155 }
1156 };
1157
1158 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
1161 first.add_one_maybe(TtHandle::from_token(*sep));
1162 }
1163
1164 assert!(first.maybe_empty);
1165 first.add_all(subfirst);
1166 if subfirst.maybe_empty
1167 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
1168 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
1169 {
1170 first.maybe_empty = true;
1174 continue;
1175 } else {
1176 return first;
1177 }
1178 }
1179 }
1180 }
1181
1182 assert!(first.maybe_empty);
1185 first
1186 }
1187}
1188
1189#[derive(Debug)]
1194enum TtHandle<'tt> {
1195 TtRef(&'tt mbe::TokenTree),
1197
1198 Token(mbe::TokenTree),
1203}
1204
1205impl<'tt> TtHandle<'tt> {
1206 fn from_token(tok: Token) -> Self {
1207 TtHandle::Token(mbe::TokenTree::Token(tok))
1208 }
1209
1210 fn from_token_kind(kind: TokenKind, span: Span) -> Self {
1211 TtHandle::from_token(Token::new(kind, span))
1212 }
1213
1214 fn get(&'tt self) -> &'tt mbe::TokenTree {
1216 match self {
1217 TtHandle::TtRef(tt) => tt,
1218 TtHandle::Token(token_tt) => token_tt,
1219 }
1220 }
1221}
1222
1223impl<'tt> PartialEq for TtHandle<'tt> {
1224 fn eq(&self, other: &TtHandle<'tt>) -> bool {
1225 self.get() == other.get()
1226 }
1227}
1228
1229impl<'tt> Clone for TtHandle<'tt> {
1230 fn clone(&self) -> Self {
1231 match self {
1232 TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
1233
1234 TtHandle::Token(mbe::TokenTree::Token(tok)) => {
1237 TtHandle::Token(mbe::TokenTree::Token(*tok))
1238 }
1239
1240 _ => unreachable!(),
1241 }
1242 }
1243}
1244
1245#[derive(Clone, Debug)]
1256struct TokenSet<'tt> {
1257 tokens: Vec<TtHandle<'tt>>,
1258 maybe_empty: bool,
1259}
1260
1261impl<'tt> TokenSet<'tt> {
1262 fn empty() -> Self {
1264 TokenSet { tokens: Vec::new(), maybe_empty: true }
1265 }
1266
1267 fn singleton(tt: TtHandle<'tt>) -> Self {
1270 TokenSet { tokens: vec![tt], maybe_empty: false }
1271 }
1272
1273 fn replace_with(&mut self, tt: TtHandle<'tt>) {
1276 self.tokens.clear();
1277 self.tokens.push(tt);
1278 self.maybe_empty = false;
1279 }
1280
1281 fn replace_with_irrelevant(&mut self) {
1285 self.tokens.clear();
1286 self.maybe_empty = false;
1287 }
1288
1289 fn add_one(&mut self, tt: TtHandle<'tt>) {
1291 if !self.tokens.contains(&tt) {
1292 self.tokens.push(tt);
1293 }
1294 self.maybe_empty = false;
1295 }
1296
1297 fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
1299 if !self.tokens.contains(&tt) {
1300 self.tokens.push(tt);
1301 }
1302 }
1303
1304 fn add_all(&mut self, other: &Self) {
1312 for tt in &other.tokens {
1313 if !self.tokens.contains(tt) {
1314 self.tokens.push(tt.clone());
1315 }
1316 }
1317 if !other.maybe_empty {
1318 self.maybe_empty = false;
1319 }
1320 }
1321}
1322
1323fn check_matcher_core<'tt>(
1335 sess: &Session,
1336 node_id: NodeId,
1337 first_sets: &FirstSets<'tt>,
1338 matcher: &'tt [mbe::TokenTree],
1339 follow: &TokenSet<'tt>,
1340) -> Result<TokenSet<'tt>, ErrorGuaranteed> {
1341 use mbe::TokenTree;
1342
1343 let mut last = TokenSet::empty();
1344
1345 let mut errored = Ok(());
1346
1347 'each_token: for i in 0..matcher.len() {
1351 let token = &matcher[i];
1352 let suffix = &matcher[i + 1..];
1353
1354 let build_suffix_first = || {
1355 let mut s = first_sets.first(suffix);
1356 if s.maybe_empty {
1357 s.add_all(follow);
1358 }
1359 s
1360 };
1361
1362 let suffix_first;
1366
1367 match token {
1370 TokenTree::Token(..)
1371 | TokenTree::MetaVar(..)
1372 | TokenTree::MetaVarDecl { .. }
1373 | TokenTree::MetaVarExpr(..) => {
1374 if token_can_be_followed_by_any(token) {
1375 last.replace_with_irrelevant();
1377 continue 'each_token;
1380 } else {
1381 last.replace_with(TtHandle::TtRef(token));
1382 suffix_first = build_suffix_first();
1383 }
1384 }
1385 TokenTree::Delimited(span, _, d) => {
1386 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
1387 d.delim.as_close_token_kind(),
1388 span.close,
1389 ));
1390 check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
1391 last.replace_with_irrelevant();
1393
1394 continue 'each_token;
1397 }
1398 TokenTree::Sequence(_, seq_rep) => {
1399 suffix_first = build_suffix_first();
1400 let mut new;
1411 let my_suffix = if let Some(sep) = &seq_rep.separator {
1412 new = suffix_first.clone();
1413 new.add_one_maybe(TtHandle::from_token(*sep));
1414 &new
1415 } else {
1416 &suffix_first
1417 };
1418
1419 let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
1423 if next.maybe_empty {
1424 last.add_all(&next);
1425 } else {
1426 last = next;
1427 }
1428
1429 continue 'each_token;
1432 }
1433 }
1434
1435 for tt in &last.tokens {
1440 if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
1441 for next_token in &suffix_first.tokens {
1442 let next_token = next_token.get();
1443
1444 if is_defined_in_current_crate(node_id)
1451 && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
1452 && matches!(
1453 next_token,
1454 TokenTree::Token(token) if *token == token::Or
1455 )
1456 {
1457 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1459 span,
1460 name,
1461 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1462 });
1463 sess.psess.buffer_lint(
1464 RUST_2021_INCOMPATIBLE_OR_PATTERNS,
1465 span,
1466 ast::CRATE_NODE_ID,
1467 errors::OrPatternsBackCompat { span, suggestion },
1468 );
1469 }
1470 match is_in_follow(next_token, kind) {
1471 IsInFollow::Yes => {}
1472 IsInFollow::No(possible) => {
1473 let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
1474 {
1475 "is"
1476 } else {
1477 "may be"
1478 };
1479
1480 let sp = next_token.span();
1481 let mut err = sess.dcx().struct_span_err(
1482 sp,
1483 format!(
1484 "`${name}:{frag}` {may_be} followed by `{next}`, which \
1485 is not allowed for `{frag}` fragments",
1486 name = name,
1487 frag = kind,
1488 next = quoted_tt_to_string(next_token),
1489 may_be = may_be
1490 ),
1491 );
1492 err.span_label(sp, format!("not allowed after `{kind}` fragments"));
1493
1494 if kind == NonterminalKind::Pat(PatWithOr)
1495 && sess.psess.edition.at_least_rust_2021()
1496 && next_token.is_token(&token::Or)
1497 {
1498 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1499 span,
1500 name,
1501 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1502 });
1503 err.span_suggestion(
1504 span,
1505 "try a `pat_param` fragment specifier instead",
1506 suggestion,
1507 Applicability::MaybeIncorrect,
1508 );
1509 }
1510
1511 let msg = "allowed there are: ";
1512 match possible {
1513 &[] => {}
1514 &[t] => {
1515 err.note(format!(
1516 "only {t} is allowed after `{kind}` fragments",
1517 ));
1518 }
1519 ts => {
1520 err.note(format!(
1521 "{}{} or {}",
1522 msg,
1523 ts[..ts.len() - 1].to_vec().join(", "),
1524 ts[ts.len() - 1],
1525 ));
1526 }
1527 }
1528 errored = Err(err.emit());
1529 }
1530 }
1531 }
1532 }
1533 }
1534 }
1535 errored?;
1536 Ok(last)
1537}
1538
1539fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
1540 if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
1541 frag_can_be_followed_by_any(kind)
1542 } else {
1543 true
1545 }
1546}
1547
1548fn frag_can_be_followed_by_any(kind: NonterminalKind) -> bool {
1557 matches!(
1558 kind,
1559 NonterminalKind::Item | NonterminalKind::Block | NonterminalKind::Ident | NonterminalKind::Literal | NonterminalKind::Meta | NonterminalKind::Lifetime | NonterminalKind::TT )
1567}
1568
1569enum IsInFollow {
1570 Yes,
1571 No(&'static [&'static str]),
1572}
1573
1574fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
1583 use mbe::TokenTree;
1584
1585 if let TokenTree::Token(Token { kind, .. }) = tok
1586 && kind.close_delim().is_some()
1587 {
1588 IsInFollow::Yes
1591 } else {
1592 match kind {
1593 NonterminalKind::Item => {
1594 IsInFollow::Yes
1597 }
1598 NonterminalKind::Block => {
1599 IsInFollow::Yes
1602 }
1603 NonterminalKind::Stmt | NonterminalKind::Expr(_) => {
1604 const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
1605 match tok {
1606 TokenTree::Token(token) => match token.kind {
1607 FatArrow | Comma | Semi => IsInFollow::Yes,
1608 _ => IsInFollow::No(TOKENS),
1609 },
1610 _ => IsInFollow::No(TOKENS),
1611 }
1612 }
1613 NonterminalKind::Pat(PatParam { .. }) => {
1614 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
1615 match tok {
1616 TokenTree::Token(token) => match token.kind {
1617 FatArrow | Comma | Eq | Or => IsInFollow::Yes,
1618 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1619 IsInFollow::Yes
1620 }
1621 _ => IsInFollow::No(TOKENS),
1622 },
1623 _ => IsInFollow::No(TOKENS),
1624 }
1625 }
1626 NonterminalKind::Pat(PatWithOr) => {
1627 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
1628 match tok {
1629 TokenTree::Token(token) => match token.kind {
1630 FatArrow | Comma | Eq => IsInFollow::Yes,
1631 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1632 IsInFollow::Yes
1633 }
1634 _ => IsInFollow::No(TOKENS),
1635 },
1636 _ => IsInFollow::No(TOKENS),
1637 }
1638 }
1639 NonterminalKind::Path | NonterminalKind::Ty => {
1640 const TOKENS: &[&str] = &[
1641 "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
1642 "`where`",
1643 ];
1644 match tok {
1645 TokenTree::Token(token) => match token.kind {
1646 OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
1647 | Semi | Or => IsInFollow::Yes,
1648 Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
1649 IsInFollow::Yes
1650 }
1651 _ => IsInFollow::No(TOKENS),
1652 },
1653 TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
1654 _ => IsInFollow::No(TOKENS),
1655 }
1656 }
1657 NonterminalKind::Ident | NonterminalKind::Lifetime => {
1658 IsInFollow::Yes
1660 }
1661 NonterminalKind::Literal => {
1662 IsInFollow::Yes
1664 }
1665 NonterminalKind::Meta | NonterminalKind::TT => {
1666 IsInFollow::Yes
1669 }
1670 NonterminalKind::Vis => {
1671 const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
1673 match tok {
1674 TokenTree::Token(token) => match token.kind {
1675 Comma => IsInFollow::Yes,
1676 Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
1677 Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
1678 _ => {
1679 if token.can_begin_type() {
1680 IsInFollow::Yes
1681 } else {
1682 IsInFollow::No(TOKENS)
1683 }
1684 }
1685 },
1686 TokenTree::MetaVarDecl {
1687 kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
1688 ..
1689 } => IsInFollow::Yes,
1690 _ => IsInFollow::No(TOKENS),
1691 }
1692 }
1693 }
1694 }
1695}
1696
1697fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
1698 match tt {
1699 mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
1700 mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
1701 mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
1702 _ => panic!(
1703 "{}",
1704 "unexpected mbe::TokenTree::{Sequence or Delimited} \
1705 in follow set checker"
1706 ),
1707 }
1708}
1709
1710fn is_defined_in_current_crate(node_id: NodeId) -> bool {
1711 node_id != DUMMY_NODE_ID
1714}
1715
1716pub(super) fn parser_from_cx(
1717 psess: &ParseSess,
1718 mut tts: TokenStream,
1719 recovery: Recovery,
1720) -> Parser<'_> {
1721 tts.desugar_doc_comments();
1722 Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
1723}