1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14pub mod asm;
17pub mod cfg_select;
18
19use std::assert_matches::debug_assert_matches;
20use std::{fmt, mem, slice};
21
22use attr_wrapper::{AttrWrapper, UsePreAttrPos};
23pub use diagnostics::AttemptLocalParseRecovery;
24pub(crate) use expr::ForbiddenLetReason;
25pub use expr::LetChainsPolicy;
27pub(crate) use item::{FnContext, FnParseMode};
28pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
29pub use path::PathStyle;
30use rustc_ast::token::{
31 self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
32};
33use rustc_ast::tokenstream::{
34 ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor,
35};
36use rustc_ast::util::case::Case;
37use rustc_ast::{
38 self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
39 DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
40 Visibility, VisibilityKind,
41};
42use rustc_ast_pretty::pprust;
43use rustc_data_structures::fx::FxHashMap;
44use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
45use rustc_index::interval::IntervalSet;
46use rustc_session::parse::ParseSess;
47use rustc_span::{Ident, Span, Symbol, kw, sym};
48use thin_vec::ThinVec;
49use token_type::TokenTypeSet;
50pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
51use tracing::debug;
52
53use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
54use crate::exp;
55
56#[cfg(test)]
57mod tests;
58
59#[cfg(test)]
62mod tokenstream {
63 mod tests;
64}
65
66bitflags::bitflags! {
67 #[derive(Clone, Copy, Debug)]
73 struct Restrictions: u8 {
74 const STMT_EXPR = 1 << 0;
85 const NO_STRUCT_LITERAL = 1 << 1;
96 const CONST_EXPR = 1 << 2;
103 const ALLOW_LET = 1 << 3;
111 const IN_IF_GUARD = 1 << 4;
117 const IS_PAT = 1 << 5;
124 }
125}
126
127#[derive(Clone, Copy, PartialEq, Debug)]
128enum SemiColonMode {
129 Break,
130 Ignore,
131 Comma,
132}
133
134#[derive(Clone, Copy, PartialEq, Debug)]
135enum BlockMode {
136 Break,
137 Ignore,
138}
139
140#[derive(Clone, Copy, Debug, PartialEq)]
143pub enum ForceCollect {
144 Yes,
145 No,
146}
147
148#[macro_export]
150macro_rules! maybe_recover_from_interpolated_ty_qpath {
151 ($self: expr, $allow_qpath_recovery: expr) => {
152 if $allow_qpath_recovery
153 && $self.may_recover()
154 && let Some(mv_kind) = $self.token.is_metavar_seq()
155 && let token::MetaVarKind::Ty { .. } = mv_kind
156 && $self.check_noexpect_past_close_delim(&token::PathSep)
157 {
158 let ty = $self
160 .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
161 .expect("metavar seq ty");
162
163 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
164 }
165 };
166}
167
168#[derive(Clone, Copy, Debug)]
169pub enum Recovery {
170 Allowed,
171 Forbidden,
172}
173
174#[derive(Clone)]
175pub struct Parser<'a> {
176 pub psess: &'a ParseSess,
177 pub token: Token,
179 token_spacing: Spacing,
181 pub prev_token: Token,
183 pub capture_cfg: bool,
184 restrictions: Restrictions,
185 expected_token_types: TokenTypeSet,
186 token_cursor: TokenCursor,
187 num_bump_calls: u32,
189 break_last_token: u32,
208 unmatched_angle_bracket_count: u16,
214 angle_bracket_nesting: u16,
215
216 last_unexpected_token_span: Option<Span>,
217 subparser_name: Option<&'static str>,
219 capture_state: CaptureState,
220 current_closure: Option<ClosureSpans>,
223 recovery: Recovery,
226}
227
228#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
232rustc_data_structures::static_assert_size!(Parser<'_>, 288);
233
234#[derive(Clone, Debug)]
236struct ClosureSpans {
237 whole_closure: Span,
238 closing_pipe: Span,
239 body: Span,
240}
241
242#[derive(Copy, Clone, Debug)]
246enum Capturing {
247 No,
249 Yes,
251}
252
253#[derive(Clone, Debug)]
255struct CaptureState {
256 capturing: Capturing,
257 parser_replacements: Vec<ParserReplacement>,
258 inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
259 seen_attrs: IntervalSet<AttrId>,
262}
263
264#[derive(Debug)]
266struct SeqSep {
267 sep: Option<ExpTokenPair>,
269 trailing_sep_allowed: bool,
271}
272
273impl SeqSep {
274 fn trailing_allowed(sep: ExpTokenPair) -> SeqSep {
275 SeqSep { sep: Some(sep), trailing_sep_allowed: true }
276 }
277
278 fn none() -> SeqSep {
279 SeqSep { sep: None, trailing_sep_allowed: false }
280 }
281}
282
283#[derive(Debug)]
284pub enum FollowedByType {
285 Yes,
286 No,
287}
288
289#[derive(Copy, Clone, Debug)]
290pub enum Trailing {
291 No,
292 Yes,
293}
294
295impl From<bool> for Trailing {
296 fn from(b: bool) -> Trailing {
297 if b { Trailing::Yes } else { Trailing::No }
298 }
299}
300
301#[derive(Clone, Copy, Debug, PartialEq, Eq)]
302pub(super) enum TokenDescription {
303 ReservedIdentifier,
304 Keyword,
305 ReservedKeyword,
306 DocComment,
307
308 MetaVar(MetaVarKind),
313}
314
315impl TokenDescription {
316 pub(super) fn from_token(token: &Token) -> Option<Self> {
317 match token.kind {
318 _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
319 _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
320 _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
321 token::DocComment(..) => Some(TokenDescription::DocComment),
322 token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => {
323 Some(TokenDescription::MetaVar(kind))
324 }
325 _ => None,
326 }
327 }
328}
329
330pub fn token_descr(token: &Token) -> String {
331 let s = pprust::token_to_string(token).to_string();
332
333 match (TokenDescription::from_token(token), &token.kind) {
334 (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
335 (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
336 (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
337 (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
338 (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
340 (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
341 (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
342 (None, _) => format!("`{s}`"),
343 }
344}
345
346impl<'a> Parser<'a> {
347 pub fn new(
348 psess: &'a ParseSess,
349 stream: TokenStream,
350 subparser_name: Option<&'static str>,
351 ) -> Self {
352 let mut parser = Parser {
353 psess,
354 token: Token::dummy(),
355 token_spacing: Spacing::Alone,
356 prev_token: Token::dummy(),
357 capture_cfg: false,
358 restrictions: Restrictions::empty(),
359 expected_token_types: TokenTypeSet::new(),
360 token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
361 num_bump_calls: 0,
362 break_last_token: 0,
363 unmatched_angle_bracket_count: 0,
364 angle_bracket_nesting: 0,
365 last_unexpected_token_span: None,
366 subparser_name,
367 capture_state: CaptureState {
368 capturing: Capturing::No,
369 parser_replacements: Vec::new(),
370 inner_attr_parser_ranges: Default::default(),
371 seen_attrs: IntervalSet::new(u32::MAX as usize),
372 },
373 current_closure: None,
374 recovery: Recovery::Allowed,
375 };
376
377 parser.bump();
379
380 parser.num_bump_calls = 0;
384
385 parser
386 }
387
388 #[inline]
389 pub fn recovery(mut self, recovery: Recovery) -> Self {
390 self.recovery = recovery;
391 self
392 }
393
394 #[inline]
395 fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T {
396 let old = mem::replace(&mut self.recovery, recovery);
397 let res = f(self);
398 self.recovery = old;
399 res
400 }
401
402 #[inline]
410 fn may_recover(&self) -> bool {
411 matches!(self.recovery, Recovery::Allowed)
412 }
413
414 pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
417 match self.expect_one_of(&[], &[]) {
418 Err(e) => Err(e),
419 Ok(_) => FatalError.raise(),
422 }
423 }
424
425 pub fn unexpected(&mut self) -> PResult<'a, ()> {
426 self.unexpected_any()
427 }
428
429 pub fn expect(&mut self, exp: ExpTokenPair) -> PResult<'a, Recovered> {
431 if self.expected_token_types.is_empty() {
432 if self.token == exp.tok {
433 self.bump();
434 Ok(Recovered::No)
435 } else {
436 self.unexpected_try_recover(&exp.tok)
437 }
438 } else {
439 self.expect_one_of(slice::from_ref(&exp), &[])
440 }
441 }
442
443 fn expect_one_of(
447 &mut self,
448 edible: &[ExpTokenPair],
449 inedible: &[ExpTokenPair],
450 ) -> PResult<'a, Recovered> {
451 if edible.iter().any(|exp| exp.tok == self.token.kind) {
452 self.bump();
453 Ok(Recovered::No)
454 } else if inedible.iter().any(|exp| exp.tok == self.token.kind) {
455 Ok(Recovered::No)
457 } else if self.token != token::Eof
458 && self.last_unexpected_token_span == Some(self.token.span)
459 {
460 FatalError.raise();
461 } else {
462 self.expected_one_of_not_found(edible, inedible)
463 .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
464 }
465 }
466
467 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
469 self.parse_ident_common(self.may_recover())
470 }
471
472 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
473 let (ident, is_raw) = self.ident_or_err(recover)?;
474
475 if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
476 let err = self.expected_ident_found_err();
477 if recover {
478 err.emit();
479 } else {
480 return Err(err);
481 }
482 }
483 self.bump();
484 Ok(ident)
485 }
486
487 fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
488 match self.token.ident() {
489 Some(ident) => Ok(ident),
490 None => self.expected_ident_found(recover),
491 }
492 }
493
494 #[inline]
499 pub fn check(&mut self, exp: ExpTokenPair) -> bool {
500 let is_present = self.token == exp.tok;
501 if !is_present {
502 self.expected_token_types.insert(exp.token_type);
503 }
504 is_present
505 }
506
507 #[inline]
508 #[must_use]
509 fn check_noexpect(&self, tok: &TokenKind) -> bool {
510 self.token == *tok
511 }
512
513 fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
522 let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
523 tree_cursor.bump();
524 matches!(
525 tree_cursor.curr(),
526 Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
527 )
528 }
529
530 #[inline]
535 #[must_use]
536 fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
537 let is_present = self.check_noexpect(tok);
538 if is_present {
539 self.bump()
540 }
541 is_present
542 }
543
544 #[inline]
546 #[must_use]
547 pub fn eat(&mut self, exp: ExpTokenPair) -> bool {
548 let is_present = self.check(exp);
549 if is_present {
550 self.bump()
551 }
552 is_present
553 }
554
555 #[inline]
558 #[must_use]
559 fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
560 let is_keyword = self.token.is_keyword(exp.kw);
561 if !is_keyword {
562 self.expected_token_types.insert(exp.token_type);
563 }
564 is_keyword
565 }
566
567 #[inline]
568 #[must_use]
569 fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
570 if self.check_keyword(exp) {
571 true
572 } else if case == Case::Insensitive
573 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
574 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
576 {
577 true
578 } else {
579 false
580 }
581 }
582
583 #[inline]
587 #[must_use]
588 pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
589 let is_keyword = self.check_keyword(exp);
590 if is_keyword {
591 self.bump();
592 }
593 is_keyword
594 }
595
596 #[inline]
600 #[must_use]
601 fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
602 if self.eat_keyword(exp) {
603 true
604 } else if case == Case::Insensitive
605 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
606 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
608 {
609 let kw = exp.kw.as_str();
610 let is_upper = kw.chars().all(char::is_uppercase);
611 let is_lower = kw.chars().all(char::is_lowercase);
612
613 let case = match (is_upper, is_lower) {
614 (true, true) => {
615 unreachable!("keyword that is both fully upper- and fully lowercase")
616 }
617 (true, false) => errors::Case::Upper,
618 (false, true) => errors::Case::Lower,
619 (false, false) => errors::Case::Mixed,
620 };
621
622 self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw, case });
623 self.bump();
624 true
625 } else {
626 false
627 }
628 }
629
630 #[inline]
634 #[must_use]
635 pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
636 let is_keyword = self.token.is_keyword(kw);
637 if is_keyword {
638 self.bump();
639 }
640 is_keyword
641 }
642
643 pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
647 if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
648 }
649
650 pub fn eat_metavar_seq<T>(
652 &mut self,
653 mv_kind: MetaVarKind,
654 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
655 ) -> Option<T> {
656 self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
657 }
658
659 fn eat_metavar_seq_with_matcher<T>(
663 &mut self,
664 match_mv_kind: impl Fn(MetaVarKind) -> bool,
665 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
666 ) -> Option<T> {
667 if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
668 && match_mv_kind(mv_kind)
669 {
670 self.bump();
671
672 let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
676
677 let res = match res {
678 Ok(res) => res,
679 Err(err) => {
680 err.delay_as_bug();
682 return None;
683 }
684 };
685
686 if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
687 && match_mv_kind(mv_kind)
688 {
689 self.bump();
690 Some(res)
691 } else {
692 self.dcx()
696 .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
697 None
698 }
699 } else {
700 None
701 }
702 }
703
704 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
706 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_non_reserved_ident())
707 }
708
709 #[inline]
710 fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
711 if !ok {
712 self.expected_token_types.insert(token_type);
713 }
714 ok
715 }
716
717 fn check_ident(&mut self) -> bool {
718 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
719 }
720
721 fn check_path(&mut self) -> bool {
722 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
723 }
724
725 fn check_type(&mut self) -> bool {
726 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
727 }
728
729 fn check_const_arg(&mut self) -> bool {
730 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
731 }
732
733 fn check_const_closure(&self) -> bool {
734 self.is_keyword_ahead(0, &[kw::Const])
735 && self.look_ahead(1, |t| match &t.kind {
736 token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
738 | token::OrOr
739 | token::Or => true,
740 _ => false,
741 })
742 }
743
744 fn check_inline_const(&self, dist: usize) -> bool {
745 self.is_keyword_ahead(dist, &[kw::Const])
746 && self.look_ahead(dist + 1, |t| match &t.kind {
747 token::OpenBrace => true,
748 token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true,
749 _ => false,
750 })
751 }
752
753 #[inline]
756 fn check_plus(&mut self) -> bool {
757 self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
758 }
759
760 fn break_and_eat(&mut self, exp: ExpTokenPair) -> bool {
764 if self.token == exp.tok {
765 self.bump();
766 return true;
767 }
768 match self.token.kind.break_two_token_op(1) {
769 Some((first, second)) if first == exp.tok => {
770 let first_span = self.psess.source_map().start_point(self.token.span);
771 let second_span = self.token.span.with_lo(first_span.hi());
772 self.token = Token::new(first, first_span);
773 self.break_last_token += 1;
780 self.bump_with((Token::new(second, second_span), self.token_spacing));
783 true
784 }
785 _ => {
786 self.expected_token_types.insert(exp.token_type);
787 false
788 }
789 }
790 }
791
792 fn eat_plus(&mut self) -> bool {
794 self.break_and_eat(exp!(Plus))
795 }
796
797 fn expect_and(&mut self) -> PResult<'a, ()> {
800 if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
801 }
802
803 fn expect_or(&mut self) -> PResult<'a, ()> {
806 if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
807 }
808
809 fn eat_lt(&mut self) -> bool {
811 let ate = self.break_and_eat(exp!(Lt));
812 if ate {
813 self.unmatched_angle_bracket_count += 1;
815 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
816 }
817 ate
818 }
819
820 fn expect_lt(&mut self) -> PResult<'a, ()> {
823 if self.eat_lt() { Ok(()) } else { self.unexpected() }
824 }
825
826 fn expect_gt(&mut self) -> PResult<'a, ()> {
829 if self.break_and_eat(exp!(Gt)) {
830 if self.unmatched_angle_bracket_count > 0 {
832 self.unmatched_angle_bracket_count -= 1;
833 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
834 }
835 Ok(())
836 } else {
837 self.unexpected()
838 }
839 }
840
841 fn expect_any_with_type(
843 &mut self,
844 closes_expected: &[ExpTokenPair],
845 closes_not_expected: &[&TokenKind],
846 ) -> bool {
847 closes_expected.iter().any(|&close| self.check(close))
848 || closes_not_expected.iter().any(|k| self.check_noexpect(k))
849 }
850
851 fn parse_seq_to_before_tokens<T>(
855 &mut self,
856 closes_expected: &[ExpTokenPair],
857 closes_not_expected: &[&TokenKind],
858 sep: SeqSep,
859 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
860 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
861 let mut first = true;
862 let mut recovered = Recovered::No;
863 let mut trailing = Trailing::No;
864 let mut v = ThinVec::new();
865
866 while !self.expect_any_with_type(closes_expected, closes_not_expected) {
867 if self.token.kind.is_close_delim_or_eof() {
868 break;
869 }
870 if let Some(exp) = sep.sep {
871 if first {
872 first = false;
874 } else {
875 match self.expect(exp) {
877 Ok(Recovered::No) => {
878 self.current_closure.take();
879 }
880 Ok(Recovered::Yes(guar)) => {
881 self.current_closure.take();
882 recovered = Recovered::Yes(guar);
883 break;
884 }
885 Err(mut expect_err) => {
886 let sp = self.prev_token.span.shrink_to_hi();
887 let token_str = pprust::token_kind_to_string(&exp.tok);
888
889 match self.current_closure.take() {
890 Some(closure_spans) if self.token == TokenKind::Semi => {
891 self.recover_missing_braces_around_closure_body(
898 closure_spans,
899 expect_err,
900 )?;
901
902 continue;
903 }
904
905 _ => {
906 if exp.tok.similar_tokens().contains(&self.token.kind) {
908 self.bump();
909 }
910 }
911 }
912
913 if self.prev_token.is_ident() && self.token == token::DotDot {
917 let msg = format!(
918 "if you meant to bind the contents of the rest of the array \
919 pattern into `{}`, use `@`",
920 pprust::token_to_string(&self.prev_token)
921 );
922 expect_err
923 .with_span_suggestion_verbose(
924 self.prev_token.span.shrink_to_hi().until(self.token.span),
925 msg,
926 " @ ",
927 Applicability::MaybeIncorrect,
928 )
929 .emit();
930 break;
931 }
932
933 self.last_unexpected_token_span = None;
935 match f(self) {
936 Ok(t) => {
937 expect_err
940 .with_span_suggestion_short(
941 sp,
942 format!("missing `{token_str}`"),
943 token_str,
944 Applicability::MaybeIncorrect,
945 )
946 .emit();
947
948 v.push(t);
949 continue;
950 }
951 Err(e) => {
952 for xx in &e.children {
955 expect_err.children.push(xx.clone());
958 }
959 e.cancel();
960 if self.token == token::Colon {
961 return Err(expect_err);
964 } else if let [exp] = closes_expected
965 && exp.token_type == TokenType::CloseParen
966 {
967 return Err(expect_err);
968 } else {
969 expect_err.emit();
970 break;
971 }
972 }
973 }
974 }
975 }
976 }
977 }
978 if sep.trailing_sep_allowed
979 && self.expect_any_with_type(closes_expected, closes_not_expected)
980 {
981 trailing = Trailing::Yes;
982 break;
983 }
984
985 let t = f(self)?;
986 v.push(t);
987 }
988
989 Ok((v, trailing, recovered))
990 }
991
992 fn recover_missing_braces_around_closure_body(
993 &mut self,
994 closure_spans: ClosureSpans,
995 mut expect_err: Diag<'_>,
996 ) -> PResult<'a, ()> {
997 let initial_semicolon = self.token.span;
998
999 while self.eat(exp!(Semi)) {
1000 let _ = self
1001 .parse_stmt_without_recovery(false, ForceCollect::No, false)
1002 .unwrap_or_else(|e| {
1003 e.cancel();
1004 None
1005 });
1006 }
1007
1008 expect_err
1009 .primary_message("closure bodies that contain statements must be surrounded by braces");
1010
1011 let preceding_pipe_span = closure_spans.closing_pipe;
1012 let following_token_span = self.token.span;
1013
1014 let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1015 first_note.push_span_label(
1016 initial_semicolon,
1017 "this `;` turns the preceding closure into a statement",
1018 );
1019 first_note.push_span_label(
1020 closure_spans.body,
1021 "this expression is a statement because of the trailing semicolon",
1022 );
1023 expect_err.span_note(first_note, "statement found outside of a block");
1024
1025 let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1026 second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1027 second_note.push_span_label(
1028 following_token_span,
1029 "...but likely you meant the closure to end here",
1030 );
1031 expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1032
1033 expect_err.span(vec![preceding_pipe_span, following_token_span]);
1034
1035 let opening_suggestion_str = " {".to_string();
1036 let closing_suggestion_str = "}".to_string();
1037
1038 expect_err.multipart_suggestion(
1039 "try adding braces",
1040 vec![
1041 (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1042 (following_token_span.shrink_to_lo(), closing_suggestion_str),
1043 ],
1044 Applicability::MaybeIncorrect,
1045 );
1046
1047 expect_err.emit();
1048
1049 Ok(())
1050 }
1051
1052 fn parse_seq_to_before_end<T>(
1056 &mut self,
1057 close: ExpTokenPair,
1058 sep: SeqSep,
1059 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1060 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1061 self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1062 }
1063
1064 fn parse_seq_to_end<T>(
1068 &mut self,
1069 close: ExpTokenPair,
1070 sep: SeqSep,
1071 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1072 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1073 let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1074 if matches!(recovered, Recovered::No) && !self.eat(close) {
1075 self.dcx().span_delayed_bug(
1076 self.token.span,
1077 "recovered but `parse_seq_to_before_end` did not give us the close token",
1078 );
1079 }
1080 Ok((val, trailing))
1081 }
1082
1083 fn parse_unspanned_seq<T>(
1087 &mut self,
1088 open: ExpTokenPair,
1089 close: ExpTokenPair,
1090 sep: SeqSep,
1091 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1092 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1093 self.expect(open)?;
1094 self.parse_seq_to_end(close, sep, f)
1095 }
1096
1097 fn parse_delim_comma_seq<T>(
1101 &mut self,
1102 open: ExpTokenPair,
1103 close: ExpTokenPair,
1104 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1105 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1106 self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1107 }
1108
1109 pub fn parse_paren_comma_seq<T>(
1113 &mut self,
1114 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1115 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1116 self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1117 }
1118
1119 fn bump_with(&mut self, next: (Token, Spacing)) {
1121 self.inlined_bump_with(next)
1122 }
1123
1124 #[inline(always)]
1126 fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1127 self.prev_token = mem::replace(&mut self.token, next_token);
1129 self.token_spacing = next_spacing;
1130
1131 self.expected_token_types.clear();
1133 }
1134
1135 pub fn bump(&mut self) {
1137 let mut next = self.token_cursor.inlined_next();
1140 self.num_bump_calls += 1;
1141 self.break_last_token = 0;
1144 if next.0.span.is_dummy() {
1145 let fallback_span = self.token.span;
1147 next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1148 }
1149 debug_assert!(!matches!(
1150 next.0.kind,
1151 token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1152 ));
1153 self.inlined_bump_with(next)
1154 }
1155
1156 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1160 if dist == 0 {
1161 return looker(&self.token);
1162 }
1163
1164 if dist == 1 {
1167 match self.token_cursor.curr.curr() {
1170 Some(tree) => {
1171 match tree {
1173 TokenTree::Token(token, _) => return looker(token),
1174 &TokenTree::Delimited(dspan, _, delim, _) => {
1175 if !delim.skip() {
1176 return looker(&Token::new(delim.as_open_token_kind(), dspan.open));
1177 }
1178 }
1179 }
1180 }
1181 None => {
1182 if let Some(last) = self.token_cursor.stack.last()
1185 && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1186 && !delim.skip()
1187 {
1188 return looker(&Token::new(delim.as_close_token_kind(), span.close));
1191 }
1192 }
1193 }
1194 }
1195
1196 let mut cursor = self.token_cursor.clone();
1199 let mut i = 0;
1200 let mut token = Token::dummy();
1201 while i < dist {
1202 token = cursor.next().0;
1203 if matches!(
1204 token.kind,
1205 token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1206 ) {
1207 continue;
1208 }
1209 i += 1;
1210 }
1211 looker(&token)
1212 }
1213
1214 pub fn tree_look_ahead<R>(
1217 &self,
1218 dist: usize,
1219 looker: impl FnOnce(&TokenTree) -> R,
1220 ) -> Option<R> {
1221 assert_ne!(dist, 0);
1222 self.token_cursor.curr.look_ahead(dist - 1).map(looker)
1223 }
1224
1225 pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1227 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1228 }
1229
1230 fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1232 let span = self.token_uninterpolated_span();
1233 if self.eat_keyword_case(exp!(Async), case) {
1234 if self.token_uninterpolated_span().at_least_rust_2024()
1237 && self.eat_keyword_case(exp!(Gen), case)
1238 {
1239 let gen_span = self.prev_token_uninterpolated_span();
1240 Some(CoroutineKind::AsyncGen {
1241 span: span.to(gen_span),
1242 closure_id: DUMMY_NODE_ID,
1243 return_impl_trait_id: DUMMY_NODE_ID,
1244 })
1245 } else {
1246 Some(CoroutineKind::Async {
1247 span,
1248 closure_id: DUMMY_NODE_ID,
1249 return_impl_trait_id: DUMMY_NODE_ID,
1250 })
1251 }
1252 } else if self.token_uninterpolated_span().at_least_rust_2024()
1253 && self.eat_keyword_case(exp!(Gen), case)
1254 {
1255 Some(CoroutineKind::Gen {
1256 span,
1257 closure_id: DUMMY_NODE_ID,
1258 return_impl_trait_id: DUMMY_NODE_ID,
1259 })
1260 } else {
1261 None
1262 }
1263 }
1264
1265 fn parse_safety(&mut self, case: Case) -> Safety {
1267 if self.eat_keyword_case(exp!(Unsafe), case) {
1268 Safety::Unsafe(self.prev_token_uninterpolated_span())
1269 } else if self.eat_keyword_case(exp!(Safe), case) {
1270 Safety::Safe(self.prev_token_uninterpolated_span())
1271 } else {
1272 Safety::Default
1273 }
1274 }
1275
1276 fn parse_constness(&mut self, case: Case) -> Const {
1278 self.parse_constness_(case, false)
1279 }
1280
1281 fn parse_closure_constness(&mut self) -> Const {
1283 let constness = self.parse_constness_(Case::Sensitive, true);
1284 if let Const::Yes(span) = constness {
1285 self.psess.gated_spans.gate(sym::const_closures, span);
1286 }
1287 constness
1288 }
1289
1290 fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1291 if (self.check_const_closure() == is_closure)
1293 && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
1294 && self.eat_keyword_case(exp!(Const), case)
1295 {
1296 Const::Yes(self.prev_token_uninterpolated_span())
1297 } else {
1298 Const::No
1299 }
1300 }
1301
1302 fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, Box<Expr>> {
1304 self.expect_keyword(exp!(Const))?;
1305 let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
1306 let anon_const = AnonConst {
1307 id: DUMMY_NODE_ID,
1308 value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1309 };
1310 let blk_span = anon_const.value.span;
1311 let kind = if pat {
1312 let guar = self
1313 .dcx()
1314 .struct_span_err(blk_span, "const blocks cannot be used as patterns")
1315 .with_help(
1316 "use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead",
1317 )
1318 .emit();
1319 ExprKind::Err(guar)
1320 } else {
1321 ExprKind::ConstBlock(anon_const)
1322 };
1323 Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs))
1324 }
1325
1326 fn parse_mutability(&mut self) -> Mutability {
1328 if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1329 }
1330
1331 fn parse_byref(&mut self) -> ByRef {
1333 if self.eat_keyword(exp!(Ref)) {
1334 let (pinnedness, mutability) = self.parse_pin_and_mut();
1335 ByRef::Yes(pinnedness, mutability)
1336 } else {
1337 ByRef::No
1338 }
1339 }
1340
1341 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1343 if self.eat_keyword(exp!(Mut)) {
1344 Some(Mutability::Mut)
1345 } else if self.eat_keyword(exp!(Const)) {
1346 Some(Mutability::Not)
1347 } else {
1348 None
1349 }
1350 }
1351
1352 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1353 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1354 {
1355 if let Some(suffix) = suffix {
1356 self.dcx().emit_err(errors::InvalidLiteralSuffixOnTupleIndex {
1357 span: self.token.span,
1358 suffix,
1359 });
1360 }
1361 self.bump();
1362 Ok(Ident::new(symbol, self.prev_token.span))
1363 } else {
1364 self.parse_ident_common(true)
1365 }
1366 }
1367
1368 fn parse_delim_args(&mut self) -> PResult<'a, Box<DelimArgs>> {
1369 if let Some(args) = self.parse_delim_args_inner() {
1370 Ok(Box::new(args))
1371 } else {
1372 self.unexpected_any()
1373 }
1374 }
1375
1376 fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1377 Ok(if let Some(args) = self.parse_delim_args_inner() {
1378 AttrArgs::Delimited(args)
1379 } else if self.eat(exp!(Eq)) {
1380 let eq_span = self.prev_token.span;
1381 let expr = self.parse_expr_force_collect()?;
1382 AttrArgs::Eq { eq_span, expr }
1383 } else {
1384 AttrArgs::Empty
1385 })
1386 }
1387
1388 fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1389 let delimited = self.check(exp!(OpenParen))
1390 || self.check(exp!(OpenBracket))
1391 || self.check(exp!(OpenBrace));
1392
1393 delimited.then(|| {
1394 let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1395 unreachable!()
1396 };
1397 DelimArgs { dspan, delim, tokens }
1398 })
1399 }
1400
1401 pub fn parse_token_tree(&mut self) -> TokenTree {
1403 if self.token.kind.open_delim().is_some() {
1404 let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1407 debug_assert_matches!(tree, TokenTree::Delimited(..));
1408
1409 let target_depth = self.token_cursor.stack.len() - 1;
1415
1416 if let Capturing::No = self.capture_state.capturing {
1417 self.token_cursor.curr.bump_to_end();
1422 self.bump();
1423 debug_assert_eq!(self.token_cursor.stack.len(), target_depth);
1424 } else {
1425 loop {
1426 self.bump();
1429 if self.token_cursor.stack.len() == target_depth {
1430 break;
1431 }
1432 }
1433 }
1434 debug_assert!(self.token.kind.close_delim().is_some());
1435
1436 self.bump();
1438 tree
1439 } else {
1440 assert!(!self.token.kind.is_close_delim_or_eof());
1441 let prev_spacing = self.token_spacing;
1442 self.bump();
1443 TokenTree::Token(self.prev_token, prev_spacing)
1444 }
1445 }
1446
1447 pub fn parse_tokens(&mut self) -> TokenStream {
1448 let mut result = Vec::new();
1449 loop {
1450 if self.token.kind.is_close_delim_or_eof() {
1451 break;
1452 } else {
1453 result.push(self.parse_token_tree());
1454 }
1455 }
1456 TokenStream::new(result)
1457 }
1458
1459 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1463 let old = self.restrictions;
1464 self.restrictions = res;
1465 let res = f(self);
1466 self.restrictions = old;
1467 res
1468 }
1469
1470 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1477 if let Some(vis) = self
1478 .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
1479 {
1480 return Ok(vis);
1481 }
1482
1483 if !self.eat_keyword(exp!(Pub)) {
1484 return Ok(Visibility {
1488 span: self.token.span.shrink_to_lo(),
1489 kind: VisibilityKind::Inherited,
1490 tokens: None,
1491 });
1492 }
1493 let lo = self.prev_token.span;
1494
1495 if self.check(exp!(OpenParen)) {
1496 if self.is_keyword_ahead(1, &[kw::In]) {
1501 self.bump(); self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1507 path: Box::new(path),
1508 id: ast::DUMMY_NODE_ID,
1509 shorthand: false,
1510 };
1511 return Ok(Visibility {
1512 span: lo.to(self.prev_token.span),
1513 kind: vis,
1514 tokens: None,
1515 });
1516 } else if self.look_ahead(2, |t| t == &token::CloseParen)
1517 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1518 {
1519 self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1524 path: Box::new(path),
1525 id: ast::DUMMY_NODE_ID,
1526 shorthand: true,
1527 };
1528 return Ok(Visibility {
1529 span: lo.to(self.prev_token.span),
1530 kind: vis,
1531 tokens: None,
1532 });
1533 } else if let FollowedByType::No = fbt {
1534 self.recover_incorrect_vis_restriction()?;
1537 }
1539 }
1540
1541 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1542 }
1543
1544 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1546 self.bump(); let path = self.parse_path(PathStyle::Mod)?;
1548 self.expect(exp!(CloseParen))?; let path_str = pprust::path_to_string(&path);
1551 self.dcx()
1552 .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1553
1554 Ok(())
1555 }
1556
1557 fn parse_extern(&mut self, case: Case) -> Extern {
1559 if self.eat_keyword_case(exp!(Extern), case) {
1560 let mut extern_span = self.prev_token.span;
1561 let abi = self.parse_abi();
1562 if let Some(abi) = abi {
1563 extern_span = extern_span.to(abi.span);
1564 }
1565 Extern::from_abi(abi, extern_span)
1566 } else {
1567 Extern::None
1568 }
1569 }
1570
1571 fn parse_abi(&mut self) -> Option<StrLit> {
1573 match self.parse_str_lit() {
1574 Ok(str_lit) => Some(str_lit),
1575 Err(Some(lit)) => match lit.kind {
1576 ast::LitKind::Err(_) => None,
1577 _ => {
1578 self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1579 None
1580 }
1581 },
1582 Err(None) => None,
1583 }
1584 }
1585
1586 fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1587 &mut self,
1588 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1589 ) -> PResult<'a, R> {
1590 self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1593 Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1594 })
1595 }
1596
1597 fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1599 if self.check(exp!(PathSep)) {
1600 if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1601 debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1602 self.look_ahead(2, looker)
1603 } else {
1604 self.look_ahead(1, looker)
1605 }
1606 } else {
1607 false
1608 }
1609 }
1610
1611 fn is_import_coupler(&mut self) -> bool {
1613 self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star))
1614 }
1615
1616 #[allow(unused)]
1619 pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
1620 fmt::from_fn(move |f| {
1621 let mut dbg_fmt = f.debug_struct("Parser"); dbg_fmt.field("prev_token", &self.prev_token);
1625 let mut tokens = vec![];
1626 for i in 0..lookahead {
1627 let tok = self.look_ahead(i, |tok| tok.kind);
1628 let is_eof = tok == TokenKind::Eof;
1629 tokens.push(tok);
1630 if is_eof {
1631 break;
1633 }
1634 }
1635 dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1636 dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1637
1638 if let Some(subparser) = self.subparser_name {
1640 dbg_fmt.field("subparser_name", &subparser);
1641 }
1642 if let Recovery::Forbidden = self.recovery {
1643 dbg_fmt.field("recovery", &self.recovery);
1644 }
1645
1646 dbg_fmt.finish_non_exhaustive()
1648 })
1649 }
1650
1651 pub fn clear_expected_token_types(&mut self) {
1652 self.expected_token_types.clear();
1653 }
1654
1655 pub fn approx_token_stream_pos(&self) -> u32 {
1656 self.num_bump_calls
1657 }
1658
1659 pub fn token_uninterpolated_span(&self) -> Span {
1666 match &self.token.kind {
1667 token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1668 token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span),
1669 _ => self.token.span,
1670 }
1671 }
1672
1673 pub fn prev_token_uninterpolated_span(&self) -> Span {
1675 match &self.prev_token.kind {
1676 token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1677 token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span),
1678 _ => self.prev_token.span,
1679 }
1680 }
1681}
1682
1683#[derive(Clone, Debug)]
1685pub enum ParseNtResult {
1686 Tt(TokenTree),
1687 Ident(Ident, IdentIsRaw),
1688 Lifetime(Ident, IdentIsRaw),
1689 Item(Box<ast::Item>),
1690 Block(Box<ast::Block>),
1691 Stmt(Box<ast::Stmt>),
1692 Pat(Box<ast::Pat>, NtPatKind),
1693 Expr(Box<ast::Expr>, NtExprKind),
1694 Literal(Box<ast::Expr>),
1695 Ty(Box<ast::Ty>),
1696 Meta(Box<ast::AttrItem>),
1697 Path(Box<ast::Path>),
1698 Vis(Box<ast::Visibility>),
1699}