1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14use std::assert_matches::debug_assert_matches;
15use std::ops::Range;
16use std::sync::Arc;
17use std::{fmt, mem, slice};
18
19use attr_wrapper::{AttrWrapper, UsePreAttrPos};
20pub use diagnostics::AttemptLocalParseRecovery;
21pub(crate) use expr::ForbiddenLetReason;
22pub(crate) use item::FnParseMode;
23pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
24use path::PathStyle;
25use rustc_ast::ptr::P;
26use rustc_ast::token::{
27 self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind,
28};
29use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
30use rustc_ast::util::case::Case;
31use rustc_ast::{
32 self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
33 DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
34 Visibility, VisibilityKind,
35};
36use rustc_ast_pretty::pprust;
37use rustc_data_structures::fx::FxHashMap;
38use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
39use rustc_index::interval::IntervalSet;
40use rustc_session::parse::ParseSess;
41use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
42use thin_vec::ThinVec;
43use token_type::TokenTypeSet;
44pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
45use tracing::debug;
46
47use crate::errors::{
48 self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
49};
50use crate::exp;
51use crate::lexer::UnmatchedDelim;
52
53#[cfg(test)]
54mod tests;
55
56#[cfg(test)]
59mod tokenstream {
60 mod tests;
61}
62#[cfg(test)]
63mod mut_visit {
64 mod tests;
65}
66
67bitflags::bitflags! {
68 #[derive(Clone, Copy, Debug)]
69 struct Restrictions: u8 {
70 const STMT_EXPR = 1 << 0;
71 const NO_STRUCT_LITERAL = 1 << 1;
72 const CONST_EXPR = 1 << 2;
73 const ALLOW_LET = 1 << 3;
74 const IN_IF_GUARD = 1 << 4;
75 const IS_PAT = 1 << 5;
76 }
77}
78
79#[derive(Clone, Copy, PartialEq, Debug)]
80enum SemiColonMode {
81 Break,
82 Ignore,
83 Comma,
84}
85
86#[derive(Clone, Copy, PartialEq, Debug)]
87enum BlockMode {
88 Break,
89 Ignore,
90}
91
92#[derive(Clone, Copy, Debug, PartialEq)]
95pub enum ForceCollect {
96 Yes,
97 No,
98}
99
100#[macro_export]
101macro_rules! maybe_whole {
102 ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
103 if let token::Interpolated(nt) = &$p.token.kind
104 && let token::$constructor(x) = &**nt
105 {
106 #[allow(unused_mut)]
107 let mut $x = x.clone();
108 $p.bump();
109 return Ok($e);
110 }
111 };
112}
113
114#[macro_export]
116macro_rules! maybe_recover_from_interpolated_ty_qpath {
117 ($self: expr, $allow_qpath_recovery: expr) => {
118 if $allow_qpath_recovery
119 && $self.may_recover()
120 && $self.look_ahead(1, |t| t == &token::PathSep)
121 && let token::Interpolated(nt) = &$self.token.kind
122 && let token::NtTy(ty) = &**nt
123 {
124 let ty = ty.clone();
125 $self.bump();
126 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
127 }
128 };
129}
130
131#[derive(Clone, Copy, Debug)]
132pub enum Recovery {
133 Allowed,
134 Forbidden,
135}
136
137#[derive(Clone)]
138pub struct Parser<'a> {
139 pub psess: &'a ParseSess,
140 pub token: Token,
142 token_spacing: Spacing,
144 pub prev_token: Token,
146 pub capture_cfg: bool,
147 restrictions: Restrictions,
148 expected_token_types: TokenTypeSet,
149 token_cursor: TokenCursor,
150 num_bump_calls: u32,
152 break_last_token: u32,
171 unmatched_angle_bracket_count: u16,
177 angle_bracket_nesting: u16,
178
179 last_unexpected_token_span: Option<Span>,
180 subparser_name: Option<&'static str>,
182 capture_state: CaptureState,
183 current_closure: Option<ClosureSpans>,
186 recovery: Recovery,
189}
190
191#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
195rustc_data_structures::static_assert_size!(Parser<'_>, 288);
196
197#[derive(Clone, Debug)]
199struct ClosureSpans {
200 whole_closure: Span,
201 closing_pipe: Span,
202 body: Span,
203}
204
205#[derive(Clone, Debug)]
207struct ParserRange(Range<u32>);
208
209#[derive(Clone, Debug)]
213struct NodeRange(Range<u32>);
214
215type ParserReplacement = (ParserRange, Option<AttrsTarget>);
235
236type NodeReplacement = (NodeRange, Option<AttrsTarget>);
238
239impl NodeRange {
240 fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
250 assert!(!parser_range.is_empty());
251 assert!(parser_range.start >= start_pos);
252 NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
253 }
254}
255
256#[derive(Copy, Clone, Debug)]
260enum Capturing {
261 No,
263 Yes,
265}
266
267#[derive(Clone, Debug)]
269struct CaptureState {
270 capturing: Capturing,
271 parser_replacements: Vec<ParserReplacement>,
272 inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
273 seen_attrs: IntervalSet<AttrId>,
276}
277
278#[derive(Clone, Debug)]
279struct TokenTreeCursor {
280 stream: TokenStream,
281 index: usize,
285}
286
287impl TokenTreeCursor {
288 #[inline]
289 fn new(stream: TokenStream) -> Self {
290 TokenTreeCursor { stream, index: 0 }
291 }
292
293 #[inline]
294 fn curr(&self) -> Option<&TokenTree> {
295 self.stream.get(self.index)
296 }
297
298 #[inline]
299 fn bump(&mut self) {
300 self.index += 1;
301 }
302}
303
304#[derive(Clone, Debug)]
309struct TokenCursor {
310 curr: TokenTreeCursor,
316
317 stack: Vec<TokenTreeCursor>,
320}
321
322impl TokenCursor {
323 fn next(&mut self) -> (Token, Spacing) {
324 self.inlined_next()
325 }
326
327 #[inline(always)]
329 fn inlined_next(&mut self) -> (Token, Spacing) {
330 loop {
331 if let Some(tree) = self.curr.curr() {
335 match tree {
336 &TokenTree::Token(ref token, spacing) => {
337 debug_assert!(!matches!(
338 token.kind,
339 token::OpenDelim(_) | token::CloseDelim(_)
340 ));
341 let res = (token.clone(), spacing);
342 self.curr.bump();
343 return res;
344 }
345 &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
346 let trees = TokenTreeCursor::new(tts.clone());
347 self.stack.push(mem::replace(&mut self.curr, trees));
348 if !delim.skip() {
349 return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
350 }
351 }
353 };
354 } else if let Some(parent) = self.stack.pop() {
355 let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
357 panic!("parent should be Delimited")
358 };
359 self.curr = parent;
360 self.curr.bump(); if !delim.skip() {
362 return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
363 }
364 } else {
366 return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
370 }
371 }
372 }
373}
374
375#[derive(Debug)]
377struct SeqSep<'a> {
378 sep: Option<ExpTokenPair<'a>>,
380 trailing_sep_allowed: bool,
382}
383
384impl<'a> SeqSep<'a> {
385 fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
386 SeqSep { sep: Some(sep), trailing_sep_allowed: true }
387 }
388
389 fn none() -> SeqSep<'a> {
390 SeqSep { sep: None, trailing_sep_allowed: false }
391 }
392}
393
394#[derive(Debug)]
395pub enum FollowedByType {
396 Yes,
397 No,
398}
399
400#[derive(Copy, Clone, Debug)]
401enum Trailing {
402 No,
403 Yes,
404}
405
406impl From<bool> for Trailing {
407 fn from(b: bool) -> Trailing {
408 if b { Trailing::Yes } else { Trailing::No }
409 }
410}
411
412#[derive(Clone, Copy, Debug, PartialEq, Eq)]
413pub(super) enum TokenDescription {
414 ReservedIdentifier,
415 Keyword,
416 ReservedKeyword,
417 DocComment,
418
419 MetaVar(MetaVarKind),
424}
425
426impl TokenDescription {
427 pub(super) fn from_token(token: &Token) -> Option<Self> {
428 match token.kind {
429 _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
430 _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
431 _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
432 token::DocComment(..) => Some(TokenDescription::DocComment),
433 token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
434 Some(TokenDescription::MetaVar(kind))
435 }
436 _ => None,
437 }
438 }
439}
440
441pub fn token_descr(token: &Token) -> String {
442 let s = pprust::token_to_string(token).to_string();
443
444 match (TokenDescription::from_token(token), &token.kind) {
445 (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
446 (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
447 (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
448 (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
449 (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
451 (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
452 (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
453 (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()),
454 (None, _) => format!("`{s}`"),
455 }
456}
457
458impl<'a> Parser<'a> {
459 pub fn new(
460 psess: &'a ParseSess,
461 stream: TokenStream,
462 subparser_name: Option<&'static str>,
463 ) -> Self {
464 let mut parser = Parser {
465 psess,
466 token: Token::dummy(),
467 token_spacing: Spacing::Alone,
468 prev_token: Token::dummy(),
469 capture_cfg: false,
470 restrictions: Restrictions::empty(),
471 expected_token_types: TokenTypeSet::new(),
472 token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
473 num_bump_calls: 0,
474 break_last_token: 0,
475 unmatched_angle_bracket_count: 0,
476 angle_bracket_nesting: 0,
477 last_unexpected_token_span: None,
478 subparser_name,
479 capture_state: CaptureState {
480 capturing: Capturing::No,
481 parser_replacements: Vec::new(),
482 inner_attr_parser_ranges: Default::default(),
483 seen_attrs: IntervalSet::new(u32::MAX as usize),
484 },
485 current_closure: None,
486 recovery: Recovery::Allowed,
487 };
488
489 parser.bump();
491
492 parser.num_bump_calls = 0;
496
497 parser
498 }
499
500 #[inline]
501 pub fn recovery(mut self, recovery: Recovery) -> Self {
502 self.recovery = recovery;
503 self
504 }
505
506 #[inline]
514 fn may_recover(&self) -> bool {
515 matches!(self.recovery, Recovery::Allowed)
516 }
517
518 pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
521 match self.expect_one_of(&[], &[]) {
522 Err(e) => Err(e),
523 Ok(_) => FatalError.raise(),
526 }
527 }
528
529 pub fn unexpected(&mut self) -> PResult<'a, ()> {
530 self.unexpected_any()
531 }
532
533 pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
535 if self.expected_token_types.is_empty() {
536 if self.token == *exp.tok {
537 self.bump();
538 Ok(Recovered::No)
539 } else {
540 self.unexpected_try_recover(exp.tok)
541 }
542 } else {
543 self.expect_one_of(slice::from_ref(&exp), &[])
544 }
545 }
546
547 fn expect_one_of(
551 &mut self,
552 edible: &[ExpTokenPair<'_>],
553 inedible: &[ExpTokenPair<'_>],
554 ) -> PResult<'a, Recovered> {
555 if edible.iter().any(|exp| exp.tok == &self.token.kind) {
556 self.bump();
557 Ok(Recovered::No)
558 } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
559 Ok(Recovered::No)
561 } else if self.token != token::Eof
562 && self.last_unexpected_token_span == Some(self.token.span)
563 {
564 FatalError.raise();
565 } else {
566 self.expected_one_of_not_found(edible, inedible)
567 .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
568 }
569 }
570
571 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
573 self.parse_ident_common(true)
574 }
575
576 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
577 let (ident, is_raw) = self.ident_or_err(recover)?;
578
579 if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
580 let err = self.expected_ident_found_err();
581 if recover {
582 err.emit();
583 } else {
584 return Err(err);
585 }
586 }
587 self.bump();
588 Ok(ident)
589 }
590
591 fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
592 match self.token.ident() {
593 Some(ident) => Ok(ident),
594 None => self.expected_ident_found(recover),
595 }
596 }
597
598 #[inline]
603 fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
604 let is_present = self.token == *exp.tok;
605 if !is_present {
606 self.expected_token_types.insert(exp.token_type);
607 }
608 is_present
609 }
610
611 #[inline]
612 #[must_use]
613 fn check_noexpect(&self, tok: &TokenKind) -> bool {
614 self.token == *tok
615 }
616
617 #[inline]
622 #[must_use]
623 fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
624 let is_present = self.check_noexpect(tok);
625 if is_present {
626 self.bump()
627 }
628 is_present
629 }
630
631 #[inline]
633 #[must_use]
634 pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
635 let is_present = self.check(exp);
636 if is_present {
637 self.bump()
638 }
639 is_present
640 }
641
642 #[inline]
645 #[must_use]
646 fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
647 let is_keyword = self.token.is_keyword(exp.kw);
648 if !is_keyword {
649 self.expected_token_types.insert(exp.token_type);
650 }
651 is_keyword
652 }
653
654 #[inline]
655 #[must_use]
656 fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
657 if self.check_keyword(exp) {
658 true
659 } else if case == Case::Insensitive
660 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
661 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
663 {
664 true
665 } else {
666 false
667 }
668 }
669
670 #[inline]
674 #[must_use]
675 pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
676 let is_keyword = self.check_keyword(exp);
677 if is_keyword {
678 self.bump();
679 }
680 is_keyword
681 }
682
683 #[inline]
687 #[must_use]
688 fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
689 if self.eat_keyword(exp) {
690 true
691 } else if case == Case::Insensitive
692 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
693 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
695 {
696 self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
697 self.bump();
698 true
699 } else {
700 false
701 }
702 }
703
704 #[inline]
708 #[must_use]
709 pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
710 let is_keyword = self.token.is_keyword(kw);
711 if is_keyword {
712 self.bump();
713 }
714 is_keyword
715 }
716
717 pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
721 if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
722 }
723
724 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
726 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
727 }
728
729 #[inline]
730 fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
731 if !ok {
732 self.expected_token_types.insert(token_type);
733 }
734 ok
735 }
736
737 fn check_ident(&mut self) -> bool {
738 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
739 }
740
741 fn check_path(&mut self) -> bool {
742 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
743 }
744
745 fn check_type(&mut self) -> bool {
746 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
747 }
748
749 fn check_const_arg(&mut self) -> bool {
750 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
751 }
752
753 fn check_const_closure(&self) -> bool {
754 self.is_keyword_ahead(0, &[kw::Const])
755 && self.look_ahead(1, |t| match &t.kind {
756 token::Ident(kw::Move | kw::Static, _) | token::OrOr | token::BinOp(token::Or) => {
758 true
759 }
760 _ => false,
761 })
762 }
763
764 fn check_inline_const(&self, dist: usize) -> bool {
765 self.is_keyword_ahead(dist, &[kw::Const])
766 && self.look_ahead(dist + 1, |t| match &t.kind {
767 token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)),
768 token::OpenDelim(Delimiter::Brace) => true,
769 _ => false,
770 })
771 }
772
773 #[inline]
776 fn check_plus(&mut self) -> bool {
777 self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
778 }
779
780 fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
784 if self.token == *exp.tok {
785 self.bump();
786 return true;
787 }
788 match self.token.kind.break_two_token_op(1) {
789 Some((first, second)) if first == *exp.tok => {
790 let first_span = self.psess.source_map().start_point(self.token.span);
791 let second_span = self.token.span.with_lo(first_span.hi());
792 self.token = Token::new(first, first_span);
793 self.break_last_token += 1;
800 self.bump_with((Token::new(second, second_span), self.token_spacing));
803 true
804 }
805 _ => {
806 self.expected_token_types.insert(exp.token_type);
807 false
808 }
809 }
810 }
811
812 fn eat_plus(&mut self) -> bool {
814 self.break_and_eat(exp!(Plus))
815 }
816
817 fn expect_and(&mut self) -> PResult<'a, ()> {
820 if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
821 }
822
823 fn expect_or(&mut self) -> PResult<'a, ()> {
826 if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
827 }
828
829 fn eat_lt(&mut self) -> bool {
831 let ate = self.break_and_eat(exp!(Lt));
832 if ate {
833 self.unmatched_angle_bracket_count += 1;
835 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
836 }
837 ate
838 }
839
840 fn expect_lt(&mut self) -> PResult<'a, ()> {
843 if self.eat_lt() { Ok(()) } else { self.unexpected() }
844 }
845
846 fn expect_gt(&mut self) -> PResult<'a, ()> {
849 if self.break_and_eat(exp!(Gt)) {
850 if self.unmatched_angle_bracket_count > 0 {
852 self.unmatched_angle_bracket_count -= 1;
853 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
854 }
855 Ok(())
856 } else {
857 self.unexpected()
858 }
859 }
860
861 fn expect_any_with_type(
863 &mut self,
864 closes_expected: &[ExpTokenPair<'_>],
865 closes_not_expected: &[&TokenKind],
866 ) -> bool {
867 closes_expected.iter().any(|&close| self.check(close))
868 || closes_not_expected.iter().any(|k| self.check_noexpect(k))
869 }
870
871 fn parse_seq_to_before_tokens<T>(
875 &mut self,
876 closes_expected: &[ExpTokenPair<'_>],
877 closes_not_expected: &[&TokenKind],
878 sep: SeqSep<'_>,
879 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
880 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
881 let mut first = true;
882 let mut recovered = Recovered::No;
883 let mut trailing = Trailing::No;
884 let mut v = ThinVec::new();
885
886 while !self.expect_any_with_type(closes_expected, closes_not_expected) {
887 if let token::CloseDelim(..) | token::Eof = self.token.kind {
888 break;
889 }
890 if let Some(exp) = sep.sep {
891 if first {
892 first = false;
894 } else {
895 match self.expect(exp) {
897 Ok(Recovered::No) => {
898 self.current_closure.take();
899 }
900 Ok(Recovered::Yes(guar)) => {
901 self.current_closure.take();
902 recovered = Recovered::Yes(guar);
903 break;
904 }
905 Err(mut expect_err) => {
906 let sp = self.prev_token.span.shrink_to_hi();
907 let token_str = pprust::token_kind_to_string(exp.tok);
908
909 match self.current_closure.take() {
910 Some(closure_spans) if self.token == TokenKind::Semi => {
911 self.recover_missing_braces_around_closure_body(
918 closure_spans,
919 expect_err,
920 )?;
921
922 continue;
923 }
924
925 _ => {
926 if exp.tok.similar_tokens().contains(&self.token.kind) {
928 self.bump();
929 }
930 }
931 }
932
933 if self.prev_token.is_ident() && self.token == token::DotDot {
937 let msg = format!(
938 "if you meant to bind the contents of the rest of the array \
939 pattern into `{}`, use `@`",
940 pprust::token_to_string(&self.prev_token)
941 );
942 expect_err
943 .with_span_suggestion_verbose(
944 self.prev_token.span.shrink_to_hi().until(self.token.span),
945 msg,
946 " @ ",
947 Applicability::MaybeIncorrect,
948 )
949 .emit();
950 break;
951 }
952
953 self.last_unexpected_token_span = None;
955 match f(self) {
956 Ok(t) => {
957 expect_err
960 .with_span_suggestion_short(
961 sp,
962 format!("missing `{token_str}`"),
963 token_str,
964 Applicability::MaybeIncorrect,
965 )
966 .emit();
967
968 v.push(t);
969 continue;
970 }
971 Err(e) => {
972 for xx in &e.children {
975 expect_err.children.push(xx.clone());
978 }
979 e.cancel();
980 if self.token == token::Colon {
981 return Err(expect_err);
984 } else if let [exp] = closes_expected
985 && exp.token_type == TokenType::CloseParen
986 {
987 return Err(expect_err);
988 } else {
989 expect_err.emit();
990 break;
991 }
992 }
993 }
994 }
995 }
996 }
997 }
998 if sep.trailing_sep_allowed
999 && self.expect_any_with_type(closes_expected, closes_not_expected)
1000 {
1001 trailing = Trailing::Yes;
1002 break;
1003 }
1004
1005 let t = f(self)?;
1006 v.push(t);
1007 }
1008
1009 Ok((v, trailing, recovered))
1010 }
1011
1012 fn recover_missing_braces_around_closure_body(
1013 &mut self,
1014 closure_spans: ClosureSpans,
1015 mut expect_err: Diag<'_>,
1016 ) -> PResult<'a, ()> {
1017 let initial_semicolon = self.token.span;
1018
1019 while self.eat(exp!(Semi)) {
1020 let _ = self.parse_stmt_without_recovery(false, ForceCollect::No).unwrap_or_else(|e| {
1021 e.cancel();
1022 None
1023 });
1024 }
1025
1026 expect_err
1027 .primary_message("closure bodies that contain statements must be surrounded by braces");
1028
1029 let preceding_pipe_span = closure_spans.closing_pipe;
1030 let following_token_span = self.token.span;
1031
1032 let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1033 first_note.push_span_label(
1034 initial_semicolon,
1035 "this `;` turns the preceding closure into a statement",
1036 );
1037 first_note.push_span_label(
1038 closure_spans.body,
1039 "this expression is a statement because of the trailing semicolon",
1040 );
1041 expect_err.span_note(first_note, "statement found outside of a block");
1042
1043 let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1044 second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1045 second_note.push_span_label(
1046 following_token_span,
1047 "...but likely you meant the closure to end here",
1048 );
1049 expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1050
1051 expect_err.span(vec![preceding_pipe_span, following_token_span]);
1052
1053 let opening_suggestion_str = " {".to_string();
1054 let closing_suggestion_str = "}".to_string();
1055
1056 expect_err.multipart_suggestion(
1057 "try adding braces",
1058 vec![
1059 (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1060 (following_token_span.shrink_to_lo(), closing_suggestion_str),
1061 ],
1062 Applicability::MaybeIncorrect,
1063 );
1064
1065 expect_err.emit();
1066
1067 Ok(())
1068 }
1069
1070 fn parse_seq_to_before_end<T>(
1074 &mut self,
1075 close: ExpTokenPair<'_>,
1076 sep: SeqSep<'_>,
1077 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1078 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1079 self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1080 }
1081
1082 fn parse_seq_to_end<T>(
1086 &mut self,
1087 close: ExpTokenPair<'_>,
1088 sep: SeqSep<'_>,
1089 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1090 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1091 let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1092 if matches!(recovered, Recovered::No) && !self.eat(close) {
1093 self.dcx().span_delayed_bug(
1094 self.token.span,
1095 "recovered but `parse_seq_to_before_end` did not give us the close token",
1096 );
1097 }
1098 Ok((val, trailing))
1099 }
1100
1101 fn parse_unspanned_seq<T>(
1105 &mut self,
1106 open: ExpTokenPair<'_>,
1107 close: ExpTokenPair<'_>,
1108 sep: SeqSep<'_>,
1109 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1110 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1111 self.expect(open)?;
1112 self.parse_seq_to_end(close, sep, f)
1113 }
1114
1115 fn parse_delim_comma_seq<T>(
1119 &mut self,
1120 open: ExpTokenPair<'_>,
1121 close: ExpTokenPair<'_>,
1122 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1123 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1124 self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1125 }
1126
1127 fn parse_paren_comma_seq<T>(
1131 &mut self,
1132 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1133 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1134 self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1135 }
1136
1137 fn bump_with(&mut self, next: (Token, Spacing)) {
1139 self.inlined_bump_with(next)
1140 }
1141
1142 #[inline(always)]
1144 fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1145 self.prev_token = mem::replace(&mut self.token, next_token);
1147 self.token_spacing = next_spacing;
1148
1149 self.expected_token_types.clear();
1151 }
1152
1153 pub fn bump(&mut self) {
1155 let mut next = self.token_cursor.inlined_next();
1158 self.num_bump_calls += 1;
1159 self.break_last_token = 0;
1162 if next.0.span.is_dummy() {
1163 let fallback_span = self.token.span;
1165 next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1166 }
1167 debug_assert!(!matches!(
1168 next.0.kind,
1169 token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
1170 ));
1171 self.inlined_bump_with(next)
1172 }
1173
1174 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1178 if dist == 0 {
1179 return looker(&self.token);
1180 }
1181
1182 if dist == 1 {
1185 match self.token_cursor.curr.curr() {
1188 Some(tree) => {
1189 match tree {
1191 TokenTree::Token(token, _) => return looker(token),
1192 &TokenTree::Delimited(dspan, _, delim, _) => {
1193 if !delim.skip() {
1194 return looker(&Token::new(token::OpenDelim(delim), dspan.open));
1195 }
1196 }
1197 }
1198 }
1199 None => {
1200 if let Some(last) = self.token_cursor.stack.last()
1203 && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1204 && !delim.skip()
1205 {
1206 return looker(&Token::new(token::CloseDelim(delim), span.close));
1209 }
1210 }
1211 }
1212 }
1213
1214 let mut cursor = self.token_cursor.clone();
1217 let mut i = 0;
1218 let mut token = Token::dummy();
1219 while i < dist {
1220 token = cursor.next().0;
1221 if matches!(
1222 token.kind,
1223 token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
1224 ) {
1225 continue;
1226 }
1227 i += 1;
1228 }
1229 looker(&token)
1230 }
1231
1232 pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1234 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1235 }
1236
1237 fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1239 let span = self.token.uninterpolated_span();
1240 if self.eat_keyword_case(exp!(Async), case) {
1241 if self.token.uninterpolated_span().at_least_rust_2024()
1244 && self.eat_keyword_case(exp!(Gen), case)
1245 {
1246 let gen_span = self.prev_token.uninterpolated_span();
1247 Some(CoroutineKind::AsyncGen {
1248 span: span.to(gen_span),
1249 closure_id: DUMMY_NODE_ID,
1250 return_impl_trait_id: DUMMY_NODE_ID,
1251 })
1252 } else {
1253 Some(CoroutineKind::Async {
1254 span,
1255 closure_id: DUMMY_NODE_ID,
1256 return_impl_trait_id: DUMMY_NODE_ID,
1257 })
1258 }
1259 } else if self.token.uninterpolated_span().at_least_rust_2024()
1260 && self.eat_keyword_case(exp!(Gen), case)
1261 {
1262 Some(CoroutineKind::Gen {
1263 span,
1264 closure_id: DUMMY_NODE_ID,
1265 return_impl_trait_id: DUMMY_NODE_ID,
1266 })
1267 } else {
1268 None
1269 }
1270 }
1271
1272 fn parse_safety(&mut self, case: Case) -> Safety {
1274 if self.eat_keyword_case(exp!(Unsafe), case) {
1275 Safety::Unsafe(self.prev_token.uninterpolated_span())
1276 } else if self.eat_keyword_case(exp!(Safe), case) {
1277 Safety::Safe(self.prev_token.uninterpolated_span())
1278 } else {
1279 Safety::Default
1280 }
1281 }
1282
1283 fn parse_constness(&mut self, case: Case) -> Const {
1285 self.parse_constness_(case, false)
1286 }
1287
1288 fn parse_closure_constness(&mut self) -> Const {
1290 let constness = self.parse_constness_(Case::Sensitive, true);
1291 if let Const::Yes(span) = constness {
1292 self.psess.gated_spans.gate(sym::const_closures, span);
1293 }
1294 constness
1295 }
1296
1297 fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1298 if (self.check_const_closure() == is_closure)
1300 && !self
1301 .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
1302 && self.eat_keyword_case(exp!(Const), case)
1303 {
1304 Const::Yes(self.prev_token.uninterpolated_span())
1305 } else {
1306 Const::No
1307 }
1308 }
1309
1310 fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1312 if pat {
1313 self.psess.gated_spans.gate(sym::inline_const_pat, span);
1314 }
1315 self.expect_keyword(exp!(Const))?;
1316 let (attrs, blk) = self.parse_inner_attrs_and_block()?;
1317 let anon_const = AnonConst {
1318 id: DUMMY_NODE_ID,
1319 value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1320 };
1321 let blk_span = anon_const.value.span;
1322 Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
1323 }
1324
1325 fn parse_mutability(&mut self) -> Mutability {
1327 if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1328 }
1329
1330 fn parse_byref(&mut self) -> ByRef {
1332 if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
1333 }
1334
1335 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1337 if self.eat_keyword(exp!(Mut)) {
1338 Some(Mutability::Mut)
1339 } else if self.eat_keyword(exp!(Const)) {
1340 Some(Mutability::Not)
1341 } else {
1342 None
1343 }
1344 }
1345
1346 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1347 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1348 {
1349 if let Some(suffix) = suffix {
1350 self.expect_no_tuple_index_suffix(self.token.span, suffix);
1351 }
1352 self.bump();
1353 Ok(Ident::new(symbol, self.prev_token.span))
1354 } else {
1355 self.parse_ident_common(true)
1356 }
1357 }
1358
1359 fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
1360 if let Some(args) = self.parse_delim_args_inner() {
1361 Ok(P(args))
1362 } else {
1363 self.unexpected_any()
1364 }
1365 }
1366
1367 fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1368 Ok(if let Some(args) = self.parse_delim_args_inner() {
1369 AttrArgs::Delimited(args)
1370 } else if self.eat(exp!(Eq)) {
1371 let eq_span = self.prev_token.span;
1372 AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
1373 } else {
1374 AttrArgs::Empty
1375 })
1376 }
1377
1378 fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1379 let delimited = self.check(exp!(OpenParen))
1380 || self.check(exp!(OpenBracket))
1381 || self.check(exp!(OpenBrace));
1382
1383 delimited.then(|| {
1384 let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1385 unreachable!()
1386 };
1387 DelimArgs { dspan, delim, tokens }
1388 })
1389 }
1390
1391 pub fn parse_token_tree(&mut self) -> TokenTree {
1393 match self.token.kind {
1394 token::OpenDelim(..) => {
1395 let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1398 debug_assert_matches!(tree, TokenTree::Delimited(..));
1399
1400 let target_depth = self.token_cursor.stack.len() - 1;
1406 loop {
1407 self.bump();
1410 if self.token_cursor.stack.len() == target_depth {
1411 debug_assert_matches!(self.token.kind, token::CloseDelim(_));
1412 break;
1413 }
1414 }
1415
1416 self.bump();
1418 tree
1419 }
1420 token::CloseDelim(_) | token::Eof => unreachable!(),
1421 _ => {
1422 let prev_spacing = self.token_spacing;
1423 self.bump();
1424 TokenTree::Token(self.prev_token.clone(), prev_spacing)
1425 }
1426 }
1427 }
1428
1429 pub fn parse_tokens(&mut self) -> TokenStream {
1430 let mut result = Vec::new();
1431 loop {
1432 match self.token.kind {
1433 token::Eof | token::CloseDelim(..) => break,
1434 _ => result.push(self.parse_token_tree()),
1435 }
1436 }
1437 TokenStream::new(result)
1438 }
1439
1440 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1444 let old = self.restrictions;
1445 self.restrictions = res;
1446 let res = f(self);
1447 self.restrictions = old;
1448 res
1449 }
1450
1451 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1458 maybe_whole!(self, NtVis, |vis| vis.into_inner());
1459
1460 if !self.eat_keyword(exp!(Pub)) {
1461 return Ok(Visibility {
1465 span: self.token.span.shrink_to_lo(),
1466 kind: VisibilityKind::Inherited,
1467 tokens: None,
1468 });
1469 }
1470 let lo = self.prev_token.span;
1471
1472 if self.check(exp!(OpenParen)) {
1473 if self.is_keyword_ahead(1, &[kw::In]) {
1478 self.bump(); self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1484 path: P(path),
1485 id: ast::DUMMY_NODE_ID,
1486 shorthand: false,
1487 };
1488 return Ok(Visibility {
1489 span: lo.to(self.prev_token.span),
1490 kind: vis,
1491 tokens: None,
1492 });
1493 } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
1494 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1495 {
1496 self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1501 path: P(path),
1502 id: ast::DUMMY_NODE_ID,
1503 shorthand: true,
1504 };
1505 return Ok(Visibility {
1506 span: lo.to(self.prev_token.span),
1507 kind: vis,
1508 tokens: None,
1509 });
1510 } else if let FollowedByType::No = fbt {
1511 self.recover_incorrect_vis_restriction()?;
1514 }
1516 }
1517
1518 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1519 }
1520
1521 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1523 self.bump(); let path = self.parse_path(PathStyle::Mod)?;
1525 self.expect(exp!(CloseParen))?; let path_str = pprust::path_to_string(&path);
1528 self.dcx()
1529 .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1530
1531 Ok(())
1532 }
1533
1534 fn parse_extern(&mut self, case: Case) -> Extern {
1536 if self.eat_keyword_case(exp!(Extern), case) {
1537 let mut extern_span = self.prev_token.span;
1538 let abi = self.parse_abi();
1539 if let Some(abi) = abi {
1540 extern_span = extern_span.to(abi.span);
1541 }
1542 Extern::from_abi(abi, extern_span)
1543 } else {
1544 Extern::None
1545 }
1546 }
1547
1548 fn parse_abi(&mut self) -> Option<StrLit> {
1550 match self.parse_str_lit() {
1551 Ok(str_lit) => Some(str_lit),
1552 Err(Some(lit)) => match lit.kind {
1553 ast::LitKind::Err(_) => None,
1554 _ => {
1555 self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1556 None
1557 }
1558 },
1559 Err(None) => None,
1560 }
1561 }
1562
1563 fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1564 &mut self,
1565 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1566 ) -> PResult<'a, R> {
1567 self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1570 Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1571 })
1572 }
1573
1574 fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1576 if self.check(exp!(PathSep)) {
1577 if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1578 debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1579 self.look_ahead(2, looker)
1580 } else {
1581 self.look_ahead(1, looker)
1582 }
1583 } else {
1584 false
1585 }
1586 }
1587
1588 fn is_import_coupler(&mut self) -> bool {
1590 self.check_path_sep_and_look_ahead(|t| {
1591 matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::BinOp(token::Star))
1592 })
1593 }
1594
1595 #[allow(unused)]
1598 pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug + '_ {
1599 fmt::from_fn(move |f| {
1600 let mut dbg_fmt = f.debug_struct("Parser"); dbg_fmt.field("prev_token", &self.prev_token);
1604 let mut tokens = vec![];
1605 for i in 0..lookahead {
1606 let tok = self.look_ahead(i, |tok| tok.kind.clone());
1607 let is_eof = tok == TokenKind::Eof;
1608 tokens.push(tok);
1609 if is_eof {
1610 break;
1612 }
1613 }
1614 dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1615 dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1616
1617 if let Some(subparser) = self.subparser_name {
1619 dbg_fmt.field("subparser_name", &subparser);
1620 }
1621 if let Recovery::Forbidden = self.recovery {
1622 dbg_fmt.field("recovery", &self.recovery);
1623 }
1624
1625 dbg_fmt.finish_non_exhaustive()
1627 })
1628 }
1629
1630 pub fn clear_expected_token_types(&mut self) {
1631 self.expected_token_types.clear();
1632 }
1633
1634 pub fn approx_token_stream_pos(&self) -> u32 {
1635 self.num_bump_calls
1636 }
1637}
1638
1639pub(crate) fn make_unclosed_delims_error(
1640 unmatched: UnmatchedDelim,
1641 psess: &ParseSess,
1642) -> Option<Diag<'_>> {
1643 let found_delim = unmatched.found_delim?;
1646 let mut spans = vec![unmatched.found_span];
1647 if let Some(sp) = unmatched.unclosed_span {
1648 spans.push(sp);
1649 };
1650 let err = psess.dcx().create_err(MismatchedClosingDelimiter {
1651 spans,
1652 delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
1653 unmatched: unmatched.found_span,
1654 opening_candidate: unmatched.candidate_span,
1655 unclosed: unmatched.unclosed_span,
1656 });
1657 Some(err)
1658}
1659
1660#[derive(Debug, Clone)]
1666enum FlatToken {
1667 Token((Token, Spacing)),
1670 AttrsTarget(AttrsTarget),
1674 Empty,
1678}
1679
1680#[derive(Clone, Debug)]
1682pub enum ParseNtResult {
1683 Tt(TokenTree),
1684 Ident(Ident, IdentIsRaw),
1685 Lifetime(Ident, IdentIsRaw),
1686
1687 Nt(Arc<Nonterminal>),
1689}