1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14use std::assert_matches::debug_assert_matches;
15use std::ops::Range;
16use std::sync::Arc;
17use std::{fmt, mem, slice};
18
19use attr_wrapper::{AttrWrapper, UsePreAttrPos};
20pub use diagnostics::AttemptLocalParseRecovery;
21pub(crate) use expr::ForbiddenLetReason;
22pub(crate) use item::FnParseMode;
23pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
24use path::PathStyle;
25use rustc_ast::ptr::P;
26use rustc_ast::token::{
27 self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtPatKind, Token,
28 TokenKind,
29};
30use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
31use rustc_ast::util::case::Case;
32use rustc_ast::{
33 self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
34 DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
35 Visibility, VisibilityKind,
36};
37use rustc_ast_pretty::pprust;
38use rustc_data_structures::fx::FxHashMap;
39use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
40use rustc_index::interval::IntervalSet;
41use rustc_session::parse::ParseSess;
42use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
43use thin_vec::ThinVec;
44use token_type::TokenTypeSet;
45pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
46use tracing::debug;
47
48use crate::errors::{
49 self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
50};
51use crate::exp;
52use crate::lexer::UnmatchedDelim;
53
54#[cfg(test)]
55mod tests;
56
57#[cfg(test)]
60mod tokenstream {
61 mod tests;
62}
63#[cfg(test)]
64mod mut_visit {
65 mod tests;
66}
67
68bitflags::bitflags! {
69 #[derive(Clone, Copy, Debug)]
70 struct Restrictions: u8 {
71 const STMT_EXPR = 1 << 0;
72 const NO_STRUCT_LITERAL = 1 << 1;
73 const CONST_EXPR = 1 << 2;
74 const ALLOW_LET = 1 << 3;
75 const IN_IF_GUARD = 1 << 4;
76 const IS_PAT = 1 << 5;
77 }
78}
79
80#[derive(Clone, Copy, PartialEq, Debug)]
81enum SemiColonMode {
82 Break,
83 Ignore,
84 Comma,
85}
86
87#[derive(Clone, Copy, PartialEq, Debug)]
88enum BlockMode {
89 Break,
90 Ignore,
91}
92
93#[derive(Clone, Copy, Debug, PartialEq)]
96pub enum ForceCollect {
97 Yes,
98 No,
99}
100
101#[macro_export]
102macro_rules! maybe_whole {
103 ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
104 if let token::Interpolated(nt) = &$p.token.kind
105 && let token::$constructor(x) = &**nt
106 {
107 #[allow(unused_mut)]
108 let mut $x = x.clone();
109 $p.bump();
110 return Ok($e);
111 }
112 };
113}
114
115#[macro_export]
117macro_rules! maybe_recover_from_interpolated_ty_qpath {
118 ($self: expr, $allow_qpath_recovery: expr) => {
119 if $allow_qpath_recovery
120 && $self.may_recover()
121 && let Some(mv_kind) = $self.token.is_metavar_seq()
122 && let token::MetaVarKind::Ty { .. } = mv_kind
123 && $self.check_noexpect_past_close_delim(&token::PathSep)
124 {
125 let ty = $self
127 .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
128 .expect("metavar seq ty");
129
130 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
131 }
132 };
133}
134
135#[derive(Clone, Copy, Debug)]
136pub enum Recovery {
137 Allowed,
138 Forbidden,
139}
140
141#[derive(Clone)]
142pub struct Parser<'a> {
143 pub psess: &'a ParseSess,
144 pub token: Token,
146 token_spacing: Spacing,
148 pub prev_token: Token,
150 pub capture_cfg: bool,
151 restrictions: Restrictions,
152 expected_token_types: TokenTypeSet,
153 token_cursor: TokenCursor,
154 num_bump_calls: u32,
156 break_last_token: u32,
175 unmatched_angle_bracket_count: u16,
181 angle_bracket_nesting: u16,
182
183 last_unexpected_token_span: Option<Span>,
184 subparser_name: Option<&'static str>,
186 capture_state: CaptureState,
187 current_closure: Option<ClosureSpans>,
190 recovery: Recovery,
193}
194
195#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
199rustc_data_structures::static_assert_size!(Parser<'_>, 288);
200
201#[derive(Clone, Debug)]
203struct ClosureSpans {
204 whole_closure: Span,
205 closing_pipe: Span,
206 body: Span,
207}
208
209#[derive(Clone, Debug)]
211struct ParserRange(Range<u32>);
212
213#[derive(Clone, Debug)]
217struct NodeRange(Range<u32>);
218
219type ParserReplacement = (ParserRange, Option<AttrsTarget>);
239
240type NodeReplacement = (NodeRange, Option<AttrsTarget>);
242
243impl NodeRange {
244 fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
254 assert!(!parser_range.is_empty());
255 assert!(parser_range.start >= start_pos);
256 NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
257 }
258}
259
260#[derive(Copy, Clone, Debug)]
264enum Capturing {
265 No,
267 Yes,
269}
270
271#[derive(Clone, Debug)]
273struct CaptureState {
274 capturing: Capturing,
275 parser_replacements: Vec<ParserReplacement>,
276 inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
277 seen_attrs: IntervalSet<AttrId>,
280}
281
282#[derive(Clone, Debug)]
283struct TokenTreeCursor {
284 stream: TokenStream,
285 index: usize,
289}
290
291impl TokenTreeCursor {
292 #[inline]
293 fn new(stream: TokenStream) -> Self {
294 TokenTreeCursor { stream, index: 0 }
295 }
296
297 #[inline]
298 fn curr(&self) -> Option<&TokenTree> {
299 self.stream.get(self.index)
300 }
301
302 #[inline]
303 fn bump(&mut self) {
304 self.index += 1;
305 }
306}
307
308#[derive(Clone, Debug)]
313struct TokenCursor {
314 curr: TokenTreeCursor,
320
321 stack: Vec<TokenTreeCursor>,
324}
325
326impl TokenCursor {
327 fn next(&mut self) -> (Token, Spacing) {
328 self.inlined_next()
329 }
330
331 #[inline(always)]
333 fn inlined_next(&mut self) -> (Token, Spacing) {
334 loop {
335 if let Some(tree) = self.curr.curr() {
339 match tree {
340 &TokenTree::Token(ref token, spacing) => {
341 debug_assert!(!matches!(
342 token.kind,
343 token::OpenDelim(_) | token::CloseDelim(_)
344 ));
345 let res = (token.clone(), spacing);
346 self.curr.bump();
347 return res;
348 }
349 &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
350 let trees = TokenTreeCursor::new(tts.clone());
351 self.stack.push(mem::replace(&mut self.curr, trees));
352 if !delim.skip() {
353 return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
354 }
355 }
357 };
358 } else if let Some(parent) = self.stack.pop() {
359 let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
361 panic!("parent should be Delimited")
362 };
363 self.curr = parent;
364 self.curr.bump(); if !delim.skip() {
366 return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
367 }
368 } else {
370 return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
374 }
375 }
376 }
377}
378
379#[derive(Debug)]
381struct SeqSep<'a> {
382 sep: Option<ExpTokenPair<'a>>,
384 trailing_sep_allowed: bool,
386}
387
388impl<'a> SeqSep<'a> {
389 fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
390 SeqSep { sep: Some(sep), trailing_sep_allowed: true }
391 }
392
393 fn none() -> SeqSep<'a> {
394 SeqSep { sep: None, trailing_sep_allowed: false }
395 }
396}
397
398#[derive(Debug)]
399pub enum FollowedByType {
400 Yes,
401 No,
402}
403
404#[derive(Copy, Clone, Debug)]
405enum Trailing {
406 No,
407 Yes,
408}
409
410impl From<bool> for Trailing {
411 fn from(b: bool) -> Trailing {
412 if b { Trailing::Yes } else { Trailing::No }
413 }
414}
415
416#[derive(Clone, Copy, Debug, PartialEq, Eq)]
417pub(super) enum TokenDescription {
418 ReservedIdentifier,
419 Keyword,
420 ReservedKeyword,
421 DocComment,
422
423 MetaVar(MetaVarKind),
428}
429
430impl TokenDescription {
431 pub(super) fn from_token(token: &Token) -> Option<Self> {
432 match token.kind {
433 _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
434 _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
435 _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
436 token::DocComment(..) => Some(TokenDescription::DocComment),
437 token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
438 Some(TokenDescription::MetaVar(kind))
439 }
440 _ => None,
441 }
442 }
443}
444
445pub fn token_descr(token: &Token) -> String {
446 let s = pprust::token_to_string(token).to_string();
447
448 match (TokenDescription::from_token(token), &token.kind) {
449 (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
450 (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
451 (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
452 (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
453 (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
455 (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
456 (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
457 (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()),
458 (None, _) => format!("`{s}`"),
459 }
460}
461
462impl<'a> Parser<'a> {
463 pub fn new(
464 psess: &'a ParseSess,
465 stream: TokenStream,
466 subparser_name: Option<&'static str>,
467 ) -> Self {
468 let mut parser = Parser {
469 psess,
470 token: Token::dummy(),
471 token_spacing: Spacing::Alone,
472 prev_token: Token::dummy(),
473 capture_cfg: false,
474 restrictions: Restrictions::empty(),
475 expected_token_types: TokenTypeSet::new(),
476 token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
477 num_bump_calls: 0,
478 break_last_token: 0,
479 unmatched_angle_bracket_count: 0,
480 angle_bracket_nesting: 0,
481 last_unexpected_token_span: None,
482 subparser_name,
483 capture_state: CaptureState {
484 capturing: Capturing::No,
485 parser_replacements: Vec::new(),
486 inner_attr_parser_ranges: Default::default(),
487 seen_attrs: IntervalSet::new(u32::MAX as usize),
488 },
489 current_closure: None,
490 recovery: Recovery::Allowed,
491 };
492
493 parser.bump();
495
496 parser.num_bump_calls = 0;
500
501 parser
502 }
503
504 #[inline]
505 pub fn recovery(mut self, recovery: Recovery) -> Self {
506 self.recovery = recovery;
507 self
508 }
509
510 #[inline]
518 fn may_recover(&self) -> bool {
519 matches!(self.recovery, Recovery::Allowed)
520 }
521
522 pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
525 match self.expect_one_of(&[], &[]) {
526 Err(e) => Err(e),
527 Ok(_) => FatalError.raise(),
530 }
531 }
532
533 pub fn unexpected(&mut self) -> PResult<'a, ()> {
534 self.unexpected_any()
535 }
536
537 pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
539 if self.expected_token_types.is_empty() {
540 if self.token == *exp.tok {
541 self.bump();
542 Ok(Recovered::No)
543 } else {
544 self.unexpected_try_recover(exp.tok)
545 }
546 } else {
547 self.expect_one_of(slice::from_ref(&exp), &[])
548 }
549 }
550
551 fn expect_one_of(
555 &mut self,
556 edible: &[ExpTokenPair<'_>],
557 inedible: &[ExpTokenPair<'_>],
558 ) -> PResult<'a, Recovered> {
559 if edible.iter().any(|exp| exp.tok == &self.token.kind) {
560 self.bump();
561 Ok(Recovered::No)
562 } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
563 Ok(Recovered::No)
565 } else if self.token != token::Eof
566 && self.last_unexpected_token_span == Some(self.token.span)
567 {
568 FatalError.raise();
569 } else {
570 self.expected_one_of_not_found(edible, inedible)
571 .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
572 }
573 }
574
575 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
577 self.parse_ident_common(true)
578 }
579
580 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
581 let (ident, is_raw) = self.ident_or_err(recover)?;
582
583 if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
584 let err = self.expected_ident_found_err();
585 if recover {
586 err.emit();
587 } else {
588 return Err(err);
589 }
590 }
591 self.bump();
592 Ok(ident)
593 }
594
595 fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
596 match self.token.ident() {
597 Some(ident) => Ok(ident),
598 None => self.expected_ident_found(recover),
599 }
600 }
601
602 #[inline]
607 fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
608 let is_present = self.token == *exp.tok;
609 if !is_present {
610 self.expected_token_types.insert(exp.token_type);
611 }
612 is_present
613 }
614
615 #[inline]
616 #[must_use]
617 fn check_noexpect(&self, tok: &TokenKind) -> bool {
618 self.token == *tok
619 }
620
621 fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
631 let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
632 tree_cursor.bump();
633 matches!(
634 tree_cursor.curr(),
635 Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
636 )
637 }
638
639 #[inline]
644 #[must_use]
645 fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
646 let is_present = self.check_noexpect(tok);
647 if is_present {
648 self.bump()
649 }
650 is_present
651 }
652
653 #[inline]
655 #[must_use]
656 pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
657 let is_present = self.check(exp);
658 if is_present {
659 self.bump()
660 }
661 is_present
662 }
663
664 #[inline]
667 #[must_use]
668 fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
669 let is_keyword = self.token.is_keyword(exp.kw);
670 if !is_keyword {
671 self.expected_token_types.insert(exp.token_type);
672 }
673 is_keyword
674 }
675
676 #[inline]
677 #[must_use]
678 fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
679 if self.check_keyword(exp) {
680 true
681 } else if case == Case::Insensitive
682 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
683 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
685 {
686 true
687 } else {
688 false
689 }
690 }
691
692 #[inline]
696 #[must_use]
697 pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
698 let is_keyword = self.check_keyword(exp);
699 if is_keyword {
700 self.bump();
701 }
702 is_keyword
703 }
704
705 #[inline]
709 #[must_use]
710 fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
711 if self.eat_keyword(exp) {
712 true
713 } else if case == Case::Insensitive
714 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
715 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
717 {
718 self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
719 self.bump();
720 true
721 } else {
722 false
723 }
724 }
725
726 #[inline]
730 #[must_use]
731 pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
732 let is_keyword = self.token.is_keyword(kw);
733 if is_keyword {
734 self.bump();
735 }
736 is_keyword
737 }
738
739 pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
743 if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
744 }
745
746 fn eat_metavar_seq<T>(
748 &mut self,
749 mv_kind: MetaVarKind,
750 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
751 ) -> Option<T> {
752 self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
753 }
754
755 fn eat_metavar_seq_with_matcher<T>(
759 &mut self,
760 match_mv_kind: impl Fn(MetaVarKind) -> bool,
761 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
762 ) -> Option<T> {
763 if let token::OpenDelim(delim) = self.token.kind
764 && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim
765 && match_mv_kind(mv_kind)
766 {
767 self.bump();
768 let res = f(self).expect("failed to reparse {mv_kind:?}");
769 if let token::CloseDelim(delim) = self.token.kind
770 && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim
771 && match_mv_kind(mv_kind)
772 {
773 self.bump();
774 Some(res)
775 } else {
776 panic!("no close delim when reparsing {mv_kind:?}");
777 }
778 } else {
779 None
780 }
781 }
782
783 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
785 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
786 }
787
788 #[inline]
789 fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
790 if !ok {
791 self.expected_token_types.insert(token_type);
792 }
793 ok
794 }
795
796 fn check_ident(&mut self) -> bool {
797 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
798 }
799
800 fn check_path(&mut self) -> bool {
801 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
802 }
803
804 fn check_type(&mut self) -> bool {
805 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
806 }
807
808 fn check_const_arg(&mut self) -> bool {
809 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
810 }
811
812 fn check_const_closure(&self) -> bool {
813 self.is_keyword_ahead(0, &[kw::Const])
814 && self.look_ahead(1, |t| match &t.kind {
815 token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
817 | token::OrOr
818 | token::Or => true,
819 _ => false,
820 })
821 }
822
823 fn check_inline_const(&self, dist: usize) -> bool {
824 self.is_keyword_ahead(dist, &[kw::Const])
825 && self.look_ahead(dist + 1, |t| match &t.kind {
826 token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)),
827 token::OpenDelim(Delimiter::Brace) => true,
828 _ => false,
829 })
830 }
831
832 #[inline]
835 fn check_plus(&mut self) -> bool {
836 self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
837 }
838
839 fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
843 if self.token == *exp.tok {
844 self.bump();
845 return true;
846 }
847 match self.token.kind.break_two_token_op(1) {
848 Some((first, second)) if first == *exp.tok => {
849 let first_span = self.psess.source_map().start_point(self.token.span);
850 let second_span = self.token.span.with_lo(first_span.hi());
851 self.token = Token::new(first, first_span);
852 self.break_last_token += 1;
859 self.bump_with((Token::new(second, second_span), self.token_spacing));
862 true
863 }
864 _ => {
865 self.expected_token_types.insert(exp.token_type);
866 false
867 }
868 }
869 }
870
871 fn eat_plus(&mut self) -> bool {
873 self.break_and_eat(exp!(Plus))
874 }
875
876 fn expect_and(&mut self) -> PResult<'a, ()> {
879 if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
880 }
881
882 fn expect_or(&mut self) -> PResult<'a, ()> {
885 if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
886 }
887
888 fn eat_lt(&mut self) -> bool {
890 let ate = self.break_and_eat(exp!(Lt));
891 if ate {
892 self.unmatched_angle_bracket_count += 1;
894 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
895 }
896 ate
897 }
898
899 fn expect_lt(&mut self) -> PResult<'a, ()> {
902 if self.eat_lt() { Ok(()) } else { self.unexpected() }
903 }
904
905 fn expect_gt(&mut self) -> PResult<'a, ()> {
908 if self.break_and_eat(exp!(Gt)) {
909 if self.unmatched_angle_bracket_count > 0 {
911 self.unmatched_angle_bracket_count -= 1;
912 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
913 }
914 Ok(())
915 } else {
916 self.unexpected()
917 }
918 }
919
920 fn expect_any_with_type(
922 &mut self,
923 closes_expected: &[ExpTokenPair<'_>],
924 closes_not_expected: &[&TokenKind],
925 ) -> bool {
926 closes_expected.iter().any(|&close| self.check(close))
927 || closes_not_expected.iter().any(|k| self.check_noexpect(k))
928 }
929
930 fn parse_seq_to_before_tokens<T>(
934 &mut self,
935 closes_expected: &[ExpTokenPair<'_>],
936 closes_not_expected: &[&TokenKind],
937 sep: SeqSep<'_>,
938 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
939 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
940 let mut first = true;
941 let mut recovered = Recovered::No;
942 let mut trailing = Trailing::No;
943 let mut v = ThinVec::new();
944
945 while !self.expect_any_with_type(closes_expected, closes_not_expected) {
946 if let token::CloseDelim(..) | token::Eof = self.token.kind {
947 break;
948 }
949 if let Some(exp) = sep.sep {
950 if first {
951 first = false;
953 } else {
954 match self.expect(exp) {
956 Ok(Recovered::No) => {
957 self.current_closure.take();
958 }
959 Ok(Recovered::Yes(guar)) => {
960 self.current_closure.take();
961 recovered = Recovered::Yes(guar);
962 break;
963 }
964 Err(mut expect_err) => {
965 let sp = self.prev_token.span.shrink_to_hi();
966 let token_str = pprust::token_kind_to_string(exp.tok);
967
968 match self.current_closure.take() {
969 Some(closure_spans) if self.token == TokenKind::Semi => {
970 self.recover_missing_braces_around_closure_body(
977 closure_spans,
978 expect_err,
979 )?;
980
981 continue;
982 }
983
984 _ => {
985 if exp.tok.similar_tokens().contains(&self.token.kind) {
987 self.bump();
988 }
989 }
990 }
991
992 if self.prev_token.is_ident() && self.token == token::DotDot {
996 let msg = format!(
997 "if you meant to bind the contents of the rest of the array \
998 pattern into `{}`, use `@`",
999 pprust::token_to_string(&self.prev_token)
1000 );
1001 expect_err
1002 .with_span_suggestion_verbose(
1003 self.prev_token.span.shrink_to_hi().until(self.token.span),
1004 msg,
1005 " @ ",
1006 Applicability::MaybeIncorrect,
1007 )
1008 .emit();
1009 break;
1010 }
1011
1012 self.last_unexpected_token_span = None;
1014 match f(self) {
1015 Ok(t) => {
1016 expect_err
1019 .with_span_suggestion_short(
1020 sp,
1021 format!("missing `{token_str}`"),
1022 token_str,
1023 Applicability::MaybeIncorrect,
1024 )
1025 .emit();
1026
1027 v.push(t);
1028 continue;
1029 }
1030 Err(e) => {
1031 for xx in &e.children {
1034 expect_err.children.push(xx.clone());
1037 }
1038 e.cancel();
1039 if self.token == token::Colon {
1040 return Err(expect_err);
1043 } else if let [exp] = closes_expected
1044 && exp.token_type == TokenType::CloseParen
1045 {
1046 return Err(expect_err);
1047 } else {
1048 expect_err.emit();
1049 break;
1050 }
1051 }
1052 }
1053 }
1054 }
1055 }
1056 }
1057 if sep.trailing_sep_allowed
1058 && self.expect_any_with_type(closes_expected, closes_not_expected)
1059 {
1060 trailing = Trailing::Yes;
1061 break;
1062 }
1063
1064 let t = f(self)?;
1065 v.push(t);
1066 }
1067
1068 Ok((v, trailing, recovered))
1069 }
1070
1071 fn recover_missing_braces_around_closure_body(
1072 &mut self,
1073 closure_spans: ClosureSpans,
1074 mut expect_err: Diag<'_>,
1075 ) -> PResult<'a, ()> {
1076 let initial_semicolon = self.token.span;
1077
1078 while self.eat(exp!(Semi)) {
1079 let _ = self
1080 .parse_stmt_without_recovery(false, ForceCollect::No, false)
1081 .unwrap_or_else(|e| {
1082 e.cancel();
1083 None
1084 });
1085 }
1086
1087 expect_err
1088 .primary_message("closure bodies that contain statements must be surrounded by braces");
1089
1090 let preceding_pipe_span = closure_spans.closing_pipe;
1091 let following_token_span = self.token.span;
1092
1093 let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1094 first_note.push_span_label(
1095 initial_semicolon,
1096 "this `;` turns the preceding closure into a statement",
1097 );
1098 first_note.push_span_label(
1099 closure_spans.body,
1100 "this expression is a statement because of the trailing semicolon",
1101 );
1102 expect_err.span_note(first_note, "statement found outside of a block");
1103
1104 let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1105 second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1106 second_note.push_span_label(
1107 following_token_span,
1108 "...but likely you meant the closure to end here",
1109 );
1110 expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1111
1112 expect_err.span(vec![preceding_pipe_span, following_token_span]);
1113
1114 let opening_suggestion_str = " {".to_string();
1115 let closing_suggestion_str = "}".to_string();
1116
1117 expect_err.multipart_suggestion(
1118 "try adding braces",
1119 vec![
1120 (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1121 (following_token_span.shrink_to_lo(), closing_suggestion_str),
1122 ],
1123 Applicability::MaybeIncorrect,
1124 );
1125
1126 expect_err.emit();
1127
1128 Ok(())
1129 }
1130
1131 fn parse_seq_to_before_end<T>(
1135 &mut self,
1136 close: ExpTokenPair<'_>,
1137 sep: SeqSep<'_>,
1138 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1139 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1140 self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1141 }
1142
1143 fn parse_seq_to_end<T>(
1147 &mut self,
1148 close: ExpTokenPair<'_>,
1149 sep: SeqSep<'_>,
1150 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1151 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1152 let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1153 if matches!(recovered, Recovered::No) && !self.eat(close) {
1154 self.dcx().span_delayed_bug(
1155 self.token.span,
1156 "recovered but `parse_seq_to_before_end` did not give us the close token",
1157 );
1158 }
1159 Ok((val, trailing))
1160 }
1161
1162 fn parse_unspanned_seq<T>(
1166 &mut self,
1167 open: ExpTokenPair<'_>,
1168 close: ExpTokenPair<'_>,
1169 sep: SeqSep<'_>,
1170 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1171 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1172 self.expect(open)?;
1173 self.parse_seq_to_end(close, sep, f)
1174 }
1175
1176 fn parse_delim_comma_seq<T>(
1180 &mut self,
1181 open: ExpTokenPair<'_>,
1182 close: ExpTokenPair<'_>,
1183 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1184 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1185 self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1186 }
1187
1188 fn parse_paren_comma_seq<T>(
1192 &mut self,
1193 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1194 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1195 self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1196 }
1197
1198 fn bump_with(&mut self, next: (Token, Spacing)) {
1200 self.inlined_bump_with(next)
1201 }
1202
1203 #[inline(always)]
1205 fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1206 self.prev_token = mem::replace(&mut self.token, next_token);
1208 self.token_spacing = next_spacing;
1209
1210 self.expected_token_types.clear();
1212 }
1213
1214 pub fn bump(&mut self) {
1216 let mut next = self.token_cursor.inlined_next();
1219 self.num_bump_calls += 1;
1220 self.break_last_token = 0;
1223 if next.0.span.is_dummy() {
1224 let fallback_span = self.token.span;
1226 next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1227 }
1228 debug_assert!(!matches!(
1229 next.0.kind,
1230 token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
1231 ));
1232 self.inlined_bump_with(next)
1233 }
1234
1235 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1239 if dist == 0 {
1240 return looker(&self.token);
1241 }
1242
1243 if dist == 1 {
1246 match self.token_cursor.curr.curr() {
1249 Some(tree) => {
1250 match tree {
1252 TokenTree::Token(token, _) => return looker(token),
1253 &TokenTree::Delimited(dspan, _, delim, _) => {
1254 if !delim.skip() {
1255 return looker(&Token::new(token::OpenDelim(delim), dspan.open));
1256 }
1257 }
1258 }
1259 }
1260 None => {
1261 if let Some(last) = self.token_cursor.stack.last()
1264 && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1265 && !delim.skip()
1266 {
1267 return looker(&Token::new(token::CloseDelim(delim), span.close));
1270 }
1271 }
1272 }
1273 }
1274
1275 let mut cursor = self.token_cursor.clone();
1278 let mut i = 0;
1279 let mut token = Token::dummy();
1280 while i < dist {
1281 token = cursor.next().0;
1282 if matches!(
1283 token.kind,
1284 token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
1285 ) {
1286 continue;
1287 }
1288 i += 1;
1289 }
1290 looker(&token)
1291 }
1292
1293 pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1295 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1296 }
1297
1298 fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1300 let span = self.token.uninterpolated_span();
1301 if self.eat_keyword_case(exp!(Async), case) {
1302 if self.token.uninterpolated_span().at_least_rust_2024()
1305 && self.eat_keyword_case(exp!(Gen), case)
1306 {
1307 let gen_span = self.prev_token.uninterpolated_span();
1308 Some(CoroutineKind::AsyncGen {
1309 span: span.to(gen_span),
1310 closure_id: DUMMY_NODE_ID,
1311 return_impl_trait_id: DUMMY_NODE_ID,
1312 })
1313 } else {
1314 Some(CoroutineKind::Async {
1315 span,
1316 closure_id: DUMMY_NODE_ID,
1317 return_impl_trait_id: DUMMY_NODE_ID,
1318 })
1319 }
1320 } else if self.token.uninterpolated_span().at_least_rust_2024()
1321 && self.eat_keyword_case(exp!(Gen), case)
1322 {
1323 Some(CoroutineKind::Gen {
1324 span,
1325 closure_id: DUMMY_NODE_ID,
1326 return_impl_trait_id: DUMMY_NODE_ID,
1327 })
1328 } else {
1329 None
1330 }
1331 }
1332
1333 fn parse_safety(&mut self, case: Case) -> Safety {
1335 if self.eat_keyword_case(exp!(Unsafe), case) {
1336 Safety::Unsafe(self.prev_token.uninterpolated_span())
1337 } else if self.eat_keyword_case(exp!(Safe), case) {
1338 Safety::Safe(self.prev_token.uninterpolated_span())
1339 } else {
1340 Safety::Default
1341 }
1342 }
1343
1344 fn parse_constness(&mut self, case: Case) -> Const {
1346 self.parse_constness_(case, false)
1347 }
1348
1349 fn parse_closure_constness(&mut self) -> Const {
1351 let constness = self.parse_constness_(Case::Sensitive, true);
1352 if let Const::Yes(span) = constness {
1353 self.psess.gated_spans.gate(sym::const_closures, span);
1354 }
1355 constness
1356 }
1357
1358 fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1359 if (self.check_const_closure() == is_closure)
1361 && !self
1362 .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
1363 && self.eat_keyword_case(exp!(Const), case)
1364 {
1365 Const::Yes(self.prev_token.uninterpolated_span())
1366 } else {
1367 Const::No
1368 }
1369 }
1370
1371 fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1373 if pat {
1374 self.psess.gated_spans.gate(sym::inline_const_pat, span);
1375 }
1376 self.expect_keyword(exp!(Const))?;
1377 let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
1378 let anon_const = AnonConst {
1379 id: DUMMY_NODE_ID,
1380 value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1381 };
1382 let blk_span = anon_const.value.span;
1383 Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
1384 }
1385
1386 fn parse_mutability(&mut self) -> Mutability {
1388 if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1389 }
1390
1391 fn parse_byref(&mut self) -> ByRef {
1393 if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
1394 }
1395
1396 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1398 if self.eat_keyword(exp!(Mut)) {
1399 Some(Mutability::Mut)
1400 } else if self.eat_keyword(exp!(Const)) {
1401 Some(Mutability::Not)
1402 } else {
1403 None
1404 }
1405 }
1406
1407 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1408 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1409 {
1410 if let Some(suffix) = suffix {
1411 self.expect_no_tuple_index_suffix(self.token.span, suffix);
1412 }
1413 self.bump();
1414 Ok(Ident::new(symbol, self.prev_token.span))
1415 } else {
1416 self.parse_ident_common(true)
1417 }
1418 }
1419
1420 fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
1421 if let Some(args) = self.parse_delim_args_inner() {
1422 Ok(P(args))
1423 } else {
1424 self.unexpected_any()
1425 }
1426 }
1427
1428 fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1429 Ok(if let Some(args) = self.parse_delim_args_inner() {
1430 AttrArgs::Delimited(args)
1431 } else if self.eat(exp!(Eq)) {
1432 let eq_span = self.prev_token.span;
1433 AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
1434 } else {
1435 AttrArgs::Empty
1436 })
1437 }
1438
1439 fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1440 let delimited = self.check(exp!(OpenParen))
1441 || self.check(exp!(OpenBracket))
1442 || self.check(exp!(OpenBrace));
1443
1444 delimited.then(|| {
1445 let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1446 unreachable!()
1447 };
1448 DelimArgs { dspan, delim, tokens }
1449 })
1450 }
1451
1452 pub fn parse_token_tree(&mut self) -> TokenTree {
1454 match self.token.kind {
1455 token::OpenDelim(..) => {
1456 let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1459 debug_assert_matches!(tree, TokenTree::Delimited(..));
1460
1461 let target_depth = self.token_cursor.stack.len() - 1;
1467 loop {
1468 self.bump();
1471 if self.token_cursor.stack.len() == target_depth {
1472 debug_assert_matches!(self.token.kind, token::CloseDelim(_));
1473 break;
1474 }
1475 }
1476
1477 self.bump();
1479 tree
1480 }
1481 token::CloseDelim(_) | token::Eof => unreachable!(),
1482 _ => {
1483 let prev_spacing = self.token_spacing;
1484 self.bump();
1485 TokenTree::Token(self.prev_token.clone(), prev_spacing)
1486 }
1487 }
1488 }
1489
1490 pub fn parse_tokens(&mut self) -> TokenStream {
1491 let mut result = Vec::new();
1492 loop {
1493 match self.token.kind {
1494 token::Eof | token::CloseDelim(..) => break,
1495 _ => result.push(self.parse_token_tree()),
1496 }
1497 }
1498 TokenStream::new(result)
1499 }
1500
1501 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1505 let old = self.restrictions;
1506 self.restrictions = res;
1507 let res = f(self);
1508 self.restrictions = old;
1509 res
1510 }
1511
1512 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1519 if let Some(vis) = self
1520 .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
1521 {
1522 return Ok(vis);
1523 }
1524
1525 if !self.eat_keyword(exp!(Pub)) {
1526 return Ok(Visibility {
1530 span: self.token.span.shrink_to_lo(),
1531 kind: VisibilityKind::Inherited,
1532 tokens: None,
1533 });
1534 }
1535 let lo = self.prev_token.span;
1536
1537 if self.check(exp!(OpenParen)) {
1538 if self.is_keyword_ahead(1, &[kw::In]) {
1543 self.bump(); self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1549 path: P(path),
1550 id: ast::DUMMY_NODE_ID,
1551 shorthand: false,
1552 };
1553 return Ok(Visibility {
1554 span: lo.to(self.prev_token.span),
1555 kind: vis,
1556 tokens: None,
1557 });
1558 } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
1559 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1560 {
1561 self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1566 path: P(path),
1567 id: ast::DUMMY_NODE_ID,
1568 shorthand: true,
1569 };
1570 return Ok(Visibility {
1571 span: lo.to(self.prev_token.span),
1572 kind: vis,
1573 tokens: None,
1574 });
1575 } else if let FollowedByType::No = fbt {
1576 self.recover_incorrect_vis_restriction()?;
1579 }
1581 }
1582
1583 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1584 }
1585
1586 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1588 self.bump(); let path = self.parse_path(PathStyle::Mod)?;
1590 self.expect(exp!(CloseParen))?; let path_str = pprust::path_to_string(&path);
1593 self.dcx()
1594 .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1595
1596 Ok(())
1597 }
1598
1599 fn parse_extern(&mut self, case: Case) -> Extern {
1601 if self.eat_keyword_case(exp!(Extern), case) {
1602 let mut extern_span = self.prev_token.span;
1603 let abi = self.parse_abi();
1604 if let Some(abi) = abi {
1605 extern_span = extern_span.to(abi.span);
1606 }
1607 Extern::from_abi(abi, extern_span)
1608 } else {
1609 Extern::None
1610 }
1611 }
1612
1613 fn parse_abi(&mut self) -> Option<StrLit> {
1615 match self.parse_str_lit() {
1616 Ok(str_lit) => Some(str_lit),
1617 Err(Some(lit)) => match lit.kind {
1618 ast::LitKind::Err(_) => None,
1619 _ => {
1620 self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1621 None
1622 }
1623 },
1624 Err(None) => None,
1625 }
1626 }
1627
1628 fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1629 &mut self,
1630 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1631 ) -> PResult<'a, R> {
1632 self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1635 Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1636 })
1637 }
1638
1639 fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1641 if self.check(exp!(PathSep)) {
1642 if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1643 debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1644 self.look_ahead(2, looker)
1645 } else {
1646 self.look_ahead(1, looker)
1647 }
1648 } else {
1649 false
1650 }
1651 }
1652
1653 fn is_import_coupler(&mut self) -> bool {
1655 self.check_path_sep_and_look_ahead(|t| {
1656 matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::Star)
1657 })
1658 }
1659
1660 #[allow(unused)]
1663 pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
1664 fmt::from_fn(move |f| {
1665 let mut dbg_fmt = f.debug_struct("Parser"); dbg_fmt.field("prev_token", &self.prev_token);
1669 let mut tokens = vec![];
1670 for i in 0..lookahead {
1671 let tok = self.look_ahead(i, |tok| tok.kind.clone());
1672 let is_eof = tok == TokenKind::Eof;
1673 tokens.push(tok);
1674 if is_eof {
1675 break;
1677 }
1678 }
1679 dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1680 dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1681
1682 if let Some(subparser) = self.subparser_name {
1684 dbg_fmt.field("subparser_name", &subparser);
1685 }
1686 if let Recovery::Forbidden = self.recovery {
1687 dbg_fmt.field("recovery", &self.recovery);
1688 }
1689
1690 dbg_fmt.finish_non_exhaustive()
1692 })
1693 }
1694
1695 pub fn clear_expected_token_types(&mut self) {
1696 self.expected_token_types.clear();
1697 }
1698
1699 pub fn approx_token_stream_pos(&self) -> u32 {
1700 self.num_bump_calls
1701 }
1702}
1703
1704pub(crate) fn make_unclosed_delims_error(
1705 unmatched: UnmatchedDelim,
1706 psess: &ParseSess,
1707) -> Option<Diag<'_>> {
1708 let found_delim = unmatched.found_delim?;
1711 let mut spans = vec![unmatched.found_span];
1712 if let Some(sp) = unmatched.unclosed_span {
1713 spans.push(sp);
1714 };
1715 let err = psess.dcx().create_err(MismatchedClosingDelimiter {
1716 spans,
1717 delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
1718 unmatched: unmatched.found_span,
1719 opening_candidate: unmatched.candidate_span,
1720 unclosed: unmatched.unclosed_span,
1721 });
1722 Some(err)
1723}
1724
1725#[derive(Debug, Clone)]
1731enum FlatToken {
1732 Token((Token, Spacing)),
1735 AttrsTarget(AttrsTarget),
1739 Empty,
1743}
1744
1745#[derive(Clone, Debug)]
1747pub enum ParseNtResult {
1748 Tt(TokenTree),
1749 Ident(Ident, IdentIsRaw),
1750 Lifetime(Ident, IdentIsRaw),
1751 Item(P<ast::Item>),
1752 Stmt(P<ast::Stmt>),
1753 Pat(P<ast::Pat>, NtPatKind),
1754 Ty(P<ast::Ty>),
1755 Meta(P<ast::AttrItem>),
1756 Path(P<ast::Path>),
1757 Vis(P<ast::Visibility>),
1758
1759 Nt(Arc<Nonterminal>),
1761}