1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14pub mod asm;
17pub mod cfg_select;
18
19use std::assert_matches::debug_assert_matches;
20use std::{fmt, mem, slice};
21
22use attr_wrapper::{AttrWrapper, UsePreAttrPos};
23pub use diagnostics::AttemptLocalParseRecovery;
24pub(crate) use expr::ForbiddenLetReason;
25pub use expr::LetChainsPolicy;
27pub(crate) use item::{FnContext, FnParseMode};
28pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
29pub use path::PathStyle;
30use rustc_ast::token::{
31 self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
32};
33use rustc_ast::tokenstream::{
34 ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor,
35};
36use rustc_ast::util::case::Case;
37use rustc_ast::{
38 self as ast, AnonConst, AttrArgs, AttrId, BlockCheckMode, ByRef, Const, CoroutineKind,
39 DUMMY_NODE_ID, DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, MgcaDisambiguation,
40 Mutability, Recovered, Safety, StrLit, Visibility, VisibilityKind,
41};
42use rustc_ast_pretty::pprust;
43use rustc_data_structures::fx::FxHashMap;
44use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
45use rustc_index::interval::IntervalSet;
46use rustc_session::parse::ParseSess;
47use rustc_span::{Ident, Span, Symbol, kw, sym};
48use thin_vec::ThinVec;
49use token_type::TokenTypeSet;
50pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
51use tracing::debug;
52
53use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
54use crate::exp;
55
56#[cfg(test)]
57mod tests;
58
59#[cfg(test)]
62mod tokenstream {
63 mod tests;
64}
65
66bitflags::bitflags! {
67 #[derive(Clone, Copy, Debug)]
73 struct Restrictions: u8 {
74 const STMT_EXPR = 1 << 0;
85 const NO_STRUCT_LITERAL = 1 << 1;
96 const CONST_EXPR = 1 << 2;
103 const ALLOW_LET = 1 << 3;
111 const IN_IF_GUARD = 1 << 4;
117 const IS_PAT = 1 << 5;
124 }
125}
126
127#[derive(Clone, Copy, PartialEq, Debug)]
128enum SemiColonMode {
129 Break,
130 Ignore,
131 Comma,
132}
133
134#[derive(Clone, Copy, PartialEq, Debug)]
135enum BlockMode {
136 Break,
137 Ignore,
138}
139
140#[derive(Clone, Copy, Debug, PartialEq)]
143pub enum ForceCollect {
144 Yes,
145 No,
146}
147
148#[macro_export]
150macro_rules! maybe_recover_from_interpolated_ty_qpath {
151 ($self: expr, $allow_qpath_recovery: expr) => {
152 if $allow_qpath_recovery
153 && $self.may_recover()
154 && let Some(mv_kind) = $self.token.is_metavar_seq()
155 && let token::MetaVarKind::Ty { .. } = mv_kind
156 && $self.check_noexpect_past_close_delim(&token::PathSep)
157 {
158 let ty = $self
160 .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
161 .expect("metavar seq ty");
162
163 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
164 }
165 };
166}
167
168#[derive(Clone, Copy, Debug)]
169pub enum Recovery {
170 Allowed,
171 Forbidden,
172}
173
174#[derive(Clone)]
175pub struct Parser<'a> {
176 pub psess: &'a ParseSess,
177 pub token: Token,
179 token_spacing: Spacing,
181 pub prev_token: Token,
183 pub capture_cfg: bool,
184 restrictions: Restrictions,
185 expected_token_types: TokenTypeSet,
186 token_cursor: TokenCursor,
187 num_bump_calls: u32,
189 break_last_token: u32,
208 unmatched_angle_bracket_count: u16,
214 angle_bracket_nesting: u16,
215
216 last_unexpected_token_span: Option<Span>,
217 subparser_name: Option<&'static str>,
219 capture_state: CaptureState,
220 current_closure: Option<ClosureSpans>,
223 recovery: Recovery,
226}
227
228#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
232rustc_data_structures::static_assert_size!(Parser<'_>, 288);
233
234#[derive(Clone, Debug)]
236struct ClosureSpans {
237 whole_closure: Span,
238 closing_pipe: Span,
239 body: Span,
240}
241
242#[derive(Copy, Clone, Debug)]
246enum Capturing {
247 No,
249 Yes,
251}
252
253#[derive(Clone, Debug)]
255struct CaptureState {
256 capturing: Capturing,
257 parser_replacements: Vec<ParserReplacement>,
258 inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
259 seen_attrs: IntervalSet<AttrId>,
262}
263
264#[derive(Debug)]
266struct SeqSep {
267 sep: Option<ExpTokenPair>,
269 trailing_sep_allowed: bool,
271}
272
273impl SeqSep {
274 fn trailing_allowed(sep: ExpTokenPair) -> SeqSep {
275 SeqSep { sep: Some(sep), trailing_sep_allowed: true }
276 }
277
278 fn none() -> SeqSep {
279 SeqSep { sep: None, trailing_sep_allowed: false }
280 }
281}
282
283#[derive(Debug)]
284pub enum FollowedByType {
285 Yes,
286 No,
287}
288
289#[derive(Copy, Clone, Debug)]
290pub enum Trailing {
291 No,
292 Yes,
293}
294
295impl From<bool> for Trailing {
296 fn from(b: bool) -> Trailing {
297 if b { Trailing::Yes } else { Trailing::No }
298 }
299}
300
301#[derive(Clone, Copy, Debug, PartialEq, Eq)]
302pub(super) enum TokenDescription {
303 ReservedIdentifier,
304 Keyword,
305 ReservedKeyword,
306 DocComment,
307
308 MetaVar(MetaVarKind),
313}
314
315impl TokenDescription {
316 pub(super) fn from_token(token: &Token) -> Option<Self> {
317 match token.kind {
318 _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
319 _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
320 _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
321 token::DocComment(..) => Some(TokenDescription::DocComment),
322 token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => {
323 Some(TokenDescription::MetaVar(kind))
324 }
325 _ => None,
326 }
327 }
328}
329
330pub fn token_descr(token: &Token) -> String {
331 let s = pprust::token_to_string(token).to_string();
332
333 match (TokenDescription::from_token(token), &token.kind) {
334 (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
335 (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
336 (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
337 (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
338 (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
340 (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
341 (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
342 (None, _) => format!("`{s}`"),
343 }
344}
345
346impl<'a> Parser<'a> {
347 pub fn new(
348 psess: &'a ParseSess,
349 stream: TokenStream,
350 subparser_name: Option<&'static str>,
351 ) -> Self {
352 let mut parser = Parser {
353 psess,
354 token: Token::dummy(),
355 token_spacing: Spacing::Alone,
356 prev_token: Token::dummy(),
357 capture_cfg: false,
358 restrictions: Restrictions::empty(),
359 expected_token_types: TokenTypeSet::new(),
360 token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
361 num_bump_calls: 0,
362 break_last_token: 0,
363 unmatched_angle_bracket_count: 0,
364 angle_bracket_nesting: 0,
365 last_unexpected_token_span: None,
366 subparser_name,
367 capture_state: CaptureState {
368 capturing: Capturing::No,
369 parser_replacements: Vec::new(),
370 inner_attr_parser_ranges: Default::default(),
371 seen_attrs: IntervalSet::new(u32::MAX as usize),
372 },
373 current_closure: None,
374 recovery: Recovery::Allowed,
375 };
376
377 parser.bump();
379
380 parser.num_bump_calls = 0;
384
385 parser
386 }
387
388 #[inline]
389 pub fn recovery(mut self, recovery: Recovery) -> Self {
390 self.recovery = recovery;
391 self
392 }
393
394 #[inline]
395 fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T {
396 let old = mem::replace(&mut self.recovery, recovery);
397 let res = f(self);
398 self.recovery = old;
399 res
400 }
401
402 #[inline]
410 fn may_recover(&self) -> bool {
411 matches!(self.recovery, Recovery::Allowed)
412 }
413
414 pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
417 match self.expect_one_of(&[], &[]) {
418 Err(e) => Err(e),
419 Ok(_) => FatalError.raise(),
422 }
423 }
424
425 pub fn unexpected(&mut self) -> PResult<'a, ()> {
426 self.unexpected_any()
427 }
428
429 pub fn expect(&mut self, exp: ExpTokenPair) -> PResult<'a, Recovered> {
431 if self.expected_token_types.is_empty() {
432 if self.token == exp.tok {
433 self.bump();
434 Ok(Recovered::No)
435 } else {
436 self.unexpected_try_recover(&exp.tok)
437 }
438 } else {
439 self.expect_one_of(slice::from_ref(&exp), &[])
440 }
441 }
442
443 fn expect_one_of(
447 &mut self,
448 edible: &[ExpTokenPair],
449 inedible: &[ExpTokenPair],
450 ) -> PResult<'a, Recovered> {
451 if edible.iter().any(|exp| exp.tok == self.token.kind) {
452 self.bump();
453 Ok(Recovered::No)
454 } else if inedible.iter().any(|exp| exp.tok == self.token.kind) {
455 Ok(Recovered::No)
457 } else if self.token != token::Eof
458 && self.last_unexpected_token_span == Some(self.token.span)
459 {
460 FatalError.raise();
461 } else {
462 self.expected_one_of_not_found(edible, inedible)
463 .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
464 }
465 }
466
467 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
469 self.parse_ident_common(self.may_recover())
470 }
471
472 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
473 let (ident, is_raw) = self.ident_or_err(recover)?;
474
475 if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
476 let err = self.expected_ident_found_err();
477 if recover {
478 err.emit();
479 } else {
480 return Err(err);
481 }
482 }
483 self.bump();
484 Ok(ident)
485 }
486
487 fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
488 match self.token.ident() {
489 Some(ident) => Ok(ident),
490 None => self.expected_ident_found(recover),
491 }
492 }
493
494 #[inline]
499 pub fn check(&mut self, exp: ExpTokenPair) -> bool {
500 let is_present = self.token == exp.tok;
501 if !is_present {
502 self.expected_token_types.insert(exp.token_type);
503 }
504 is_present
505 }
506
507 #[inline]
508 #[must_use]
509 fn check_noexpect(&self, tok: &TokenKind) -> bool {
510 self.token == *tok
511 }
512
513 fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
522 let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
523 tree_cursor.bump();
524 matches!(
525 tree_cursor.curr(),
526 Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
527 )
528 }
529
530 #[inline]
535 #[must_use]
536 fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
537 let is_present = self.check_noexpect(tok);
538 if is_present {
539 self.bump()
540 }
541 is_present
542 }
543
544 #[inline]
546 #[must_use]
547 pub fn eat(&mut self, exp: ExpTokenPair) -> bool {
548 let is_present = self.check(exp);
549 if is_present {
550 self.bump()
551 }
552 is_present
553 }
554
555 #[inline]
558 #[must_use]
559 fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
560 let is_keyword = self.token.is_keyword(exp.kw);
561 if !is_keyword {
562 self.expected_token_types.insert(exp.token_type);
563 }
564 is_keyword
565 }
566
567 #[inline]
568 #[must_use]
569 fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
570 if self.check_keyword(exp) {
571 true
572 } else if case == Case::Insensitive
573 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
574 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
576 {
577 true
578 } else {
579 false
580 }
581 }
582
583 #[inline]
587 #[must_use]
588 pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
589 let is_keyword = self.check_keyword(exp);
590 if is_keyword {
591 self.bump();
592 }
593 is_keyword
594 }
595
596 #[inline]
600 #[must_use]
601 fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
602 if self.eat_keyword(exp) {
603 true
604 } else if case == Case::Insensitive
605 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
606 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
608 {
609 let kw = exp.kw.as_str();
610 let is_upper = kw.chars().all(char::is_uppercase);
611 let is_lower = kw.chars().all(char::is_lowercase);
612
613 let case = match (is_upper, is_lower) {
614 (true, true) => {
615 unreachable!("keyword that is both fully upper- and fully lowercase")
616 }
617 (true, false) => errors::Case::Upper,
618 (false, true) => errors::Case::Lower,
619 (false, false) => errors::Case::Mixed,
620 };
621
622 self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw, case });
623 self.bump();
624 true
625 } else {
626 false
627 }
628 }
629
630 #[inline]
634 #[must_use]
635 pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
636 let is_keyword = self.token.is_keyword(kw);
637 if is_keyword {
638 self.bump();
639 }
640 is_keyword
641 }
642
643 pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
647 if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
648 }
649
650 pub fn eat_metavar_seq<T>(
652 &mut self,
653 mv_kind: MetaVarKind,
654 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
655 ) -> Option<T> {
656 self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
657 }
658
659 fn eat_metavar_seq_with_matcher<T>(
663 &mut self,
664 match_mv_kind: impl Fn(MetaVarKind) -> bool,
665 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
666 ) -> Option<T> {
667 if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
668 && match_mv_kind(mv_kind)
669 {
670 self.bump();
671
672 let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
676
677 let res = match res {
678 Ok(res) => res,
679 Err(err) => {
680 err.delay_as_bug();
682 return None;
683 }
684 };
685
686 if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
687 && match_mv_kind(mv_kind)
688 {
689 self.bump();
690 Some(res)
691 } else {
692 self.dcx()
696 .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
697 None
698 }
699 } else {
700 None
701 }
702 }
703
704 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
706 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_non_reserved_ident())
707 }
708
709 #[inline]
710 fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
711 if !ok {
712 self.expected_token_types.insert(token_type);
713 }
714 ok
715 }
716
717 fn check_ident(&mut self) -> bool {
718 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
719 }
720
721 fn check_path(&mut self) -> bool {
722 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
723 }
724
725 fn check_type(&mut self) -> bool {
726 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
727 }
728
729 fn check_const_arg(&mut self) -> bool {
730 let is_mcg_arg = self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const);
731 let is_mgca_arg = self.is_keyword_ahead(0, &[kw::Const])
732 && self.look_ahead(1, |t| *t == token::OpenBrace);
733 is_mcg_arg || is_mgca_arg
734 }
735
736 fn check_const_closure(&self) -> bool {
737 self.is_keyword_ahead(0, &[kw::Const])
738 && self.look_ahead(1, |t| match &t.kind {
739 token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
741 | token::OrOr
742 | token::Or => true,
743 _ => false,
744 })
745 }
746
747 fn check_inline_const(&self, dist: usize) -> bool {
748 self.is_keyword_ahead(dist, &[kw::Const])
749 && self.look_ahead(dist + 1, |t| match &t.kind {
750 token::OpenBrace => true,
751 token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true,
752 _ => false,
753 })
754 }
755
756 #[inline]
759 fn check_plus(&mut self) -> bool {
760 self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
761 }
762
763 fn break_and_eat(&mut self, exp: ExpTokenPair) -> bool {
767 if self.token == exp.tok {
768 self.bump();
769 return true;
770 }
771 match self.token.kind.break_two_token_op(1) {
772 Some((first, second)) if first == exp.tok => {
773 let first_span = self.psess.source_map().start_point(self.token.span);
774 let second_span = self.token.span.with_lo(first_span.hi());
775 self.token = Token::new(first, first_span);
776 self.break_last_token += 1;
783 self.bump_with((Token::new(second, second_span), self.token_spacing));
786 true
787 }
788 _ => {
789 self.expected_token_types.insert(exp.token_type);
790 false
791 }
792 }
793 }
794
795 fn eat_plus(&mut self) -> bool {
797 self.break_and_eat(exp!(Plus))
798 }
799
800 fn expect_and(&mut self) -> PResult<'a, ()> {
803 if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
804 }
805
806 fn expect_or(&mut self) -> PResult<'a, ()> {
809 if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
810 }
811
812 fn eat_lt(&mut self) -> bool {
814 let ate = self.break_and_eat(exp!(Lt));
815 if ate {
816 self.unmatched_angle_bracket_count += 1;
818 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
819 }
820 ate
821 }
822
823 fn expect_lt(&mut self) -> PResult<'a, ()> {
826 if self.eat_lt() { Ok(()) } else { self.unexpected() }
827 }
828
829 fn expect_gt(&mut self) -> PResult<'a, ()> {
832 if self.break_and_eat(exp!(Gt)) {
833 if self.unmatched_angle_bracket_count > 0 {
835 self.unmatched_angle_bracket_count -= 1;
836 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
837 }
838 Ok(())
839 } else {
840 self.unexpected()
841 }
842 }
843
844 fn expect_any_with_type(
846 &mut self,
847 closes_expected: &[ExpTokenPair],
848 closes_not_expected: &[&TokenKind],
849 ) -> bool {
850 closes_expected.iter().any(|&close| self.check(close))
851 || closes_not_expected.iter().any(|k| self.check_noexpect(k))
852 }
853
854 fn parse_seq_to_before_tokens<T>(
858 &mut self,
859 closes_expected: &[ExpTokenPair],
860 closes_not_expected: &[&TokenKind],
861 sep: SeqSep,
862 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
863 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
864 let mut first = true;
865 let mut recovered = Recovered::No;
866 let mut trailing = Trailing::No;
867 let mut v = ThinVec::new();
868
869 while !self.expect_any_with_type(closes_expected, closes_not_expected) {
870 if self.token.kind.is_close_delim_or_eof() {
871 break;
872 }
873 if let Some(exp) = sep.sep {
874 if first {
875 first = false;
877 } else {
878 match self.expect(exp) {
880 Ok(Recovered::No) => {
881 self.current_closure.take();
882 }
883 Ok(Recovered::Yes(guar)) => {
884 self.current_closure.take();
885 recovered = Recovered::Yes(guar);
886 break;
887 }
888 Err(mut expect_err) => {
889 let sp = self.prev_token.span.shrink_to_hi();
890 let token_str = pprust::token_kind_to_string(&exp.tok);
891
892 match self.current_closure.take() {
893 Some(closure_spans) if self.token == TokenKind::Semi => {
894 self.recover_missing_braces_around_closure_body(
901 closure_spans,
902 expect_err,
903 )?;
904
905 continue;
906 }
907
908 _ => {
909 if exp.tok.similar_tokens().contains(&self.token.kind) {
911 self.bump();
912 }
913 }
914 }
915
916 if self.prev_token.is_ident() && self.token == token::DotDot {
920 let msg = format!(
921 "if you meant to bind the contents of the rest of the array \
922 pattern into `{}`, use `@`",
923 pprust::token_to_string(&self.prev_token)
924 );
925 expect_err
926 .with_span_suggestion_verbose(
927 self.prev_token.span.shrink_to_hi().until(self.token.span),
928 msg,
929 " @ ",
930 Applicability::MaybeIncorrect,
931 )
932 .emit();
933 break;
934 }
935
936 self.last_unexpected_token_span = None;
938 match f(self) {
939 Ok(t) => {
940 expect_err
943 .with_span_suggestion_short(
944 sp,
945 format!("missing `{token_str}`"),
946 token_str,
947 Applicability::MaybeIncorrect,
948 )
949 .emit();
950
951 v.push(t);
952 continue;
953 }
954 Err(e) => {
955 for xx in &e.children {
958 expect_err.children.push(xx.clone());
961 }
962 e.cancel();
963 if self.token == token::Colon {
964 return Err(expect_err);
967 } else if let [exp] = closes_expected
968 && exp.token_type == TokenType::CloseParen
969 {
970 return Err(expect_err);
971 } else {
972 expect_err.emit();
973 break;
974 }
975 }
976 }
977 }
978 }
979 }
980 }
981 if sep.trailing_sep_allowed
982 && self.expect_any_with_type(closes_expected, closes_not_expected)
983 {
984 trailing = Trailing::Yes;
985 break;
986 }
987
988 let t = f(self)?;
989 v.push(t);
990 }
991
992 Ok((v, trailing, recovered))
993 }
994
995 fn recover_missing_braces_around_closure_body(
996 &mut self,
997 closure_spans: ClosureSpans,
998 mut expect_err: Diag<'_>,
999 ) -> PResult<'a, ()> {
1000 let initial_semicolon = self.token.span;
1001
1002 while self.eat(exp!(Semi)) {
1003 let _ = self
1004 .parse_stmt_without_recovery(false, ForceCollect::No, false)
1005 .unwrap_or_else(|e| {
1006 e.cancel();
1007 None
1008 });
1009 }
1010
1011 expect_err
1012 .primary_message("closure bodies that contain statements must be surrounded by braces");
1013
1014 let preceding_pipe_span = closure_spans.closing_pipe;
1015 let following_token_span = self.token.span;
1016
1017 let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1018 first_note.push_span_label(
1019 initial_semicolon,
1020 "this `;` turns the preceding closure into a statement",
1021 );
1022 first_note.push_span_label(
1023 closure_spans.body,
1024 "this expression is a statement because of the trailing semicolon",
1025 );
1026 expect_err.span_note(first_note, "statement found outside of a block");
1027
1028 let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1029 second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1030 second_note.push_span_label(
1031 following_token_span,
1032 "...but likely you meant the closure to end here",
1033 );
1034 expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1035
1036 expect_err.span(vec![preceding_pipe_span, following_token_span]);
1037
1038 let opening_suggestion_str = " {".to_string();
1039 let closing_suggestion_str = "}".to_string();
1040
1041 expect_err.multipart_suggestion(
1042 "try adding braces",
1043 vec![
1044 (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1045 (following_token_span.shrink_to_lo(), closing_suggestion_str),
1046 ],
1047 Applicability::MaybeIncorrect,
1048 );
1049
1050 expect_err.emit();
1051
1052 Ok(())
1053 }
1054
1055 fn parse_seq_to_before_end<T>(
1059 &mut self,
1060 close: ExpTokenPair,
1061 sep: SeqSep,
1062 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1063 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1064 self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1065 }
1066
1067 fn parse_seq_to_end<T>(
1071 &mut self,
1072 close: ExpTokenPair,
1073 sep: SeqSep,
1074 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1075 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1076 let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1077 if matches!(recovered, Recovered::No) && !self.eat(close) {
1078 self.dcx().span_delayed_bug(
1079 self.token.span,
1080 "recovered but `parse_seq_to_before_end` did not give us the close token",
1081 );
1082 }
1083 Ok((val, trailing))
1084 }
1085
1086 fn parse_unspanned_seq<T>(
1090 &mut self,
1091 open: ExpTokenPair,
1092 close: ExpTokenPair,
1093 sep: SeqSep,
1094 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1095 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1096 self.expect(open)?;
1097 self.parse_seq_to_end(close, sep, f)
1098 }
1099
1100 fn parse_delim_comma_seq<T>(
1104 &mut self,
1105 open: ExpTokenPair,
1106 close: ExpTokenPair,
1107 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1108 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1109 self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1110 }
1111
1112 pub fn parse_paren_comma_seq<T>(
1116 &mut self,
1117 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1118 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1119 self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1120 }
1121
1122 fn bump_with(&mut self, next: (Token, Spacing)) {
1124 self.inlined_bump_with(next)
1125 }
1126
1127 #[inline(always)]
1129 fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1130 self.prev_token = mem::replace(&mut self.token, next_token);
1132 self.token_spacing = next_spacing;
1133
1134 self.expected_token_types.clear();
1136 }
1137
1138 pub fn bump(&mut self) {
1140 let mut next = self.token_cursor.inlined_next();
1143 self.num_bump_calls += 1;
1144 self.break_last_token = 0;
1147 if next.0.span.is_dummy() {
1148 let fallback_span = self.token.span;
1150 next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1151 }
1152 debug_assert!(!matches!(
1153 next.0.kind,
1154 token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1155 ));
1156 self.inlined_bump_with(next)
1157 }
1158
1159 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1163 if dist == 0 {
1164 return looker(&self.token);
1165 }
1166
1167 if dist == 1 {
1170 match self.token_cursor.curr.curr() {
1173 Some(tree) => {
1174 match tree {
1176 TokenTree::Token(token, _) => return looker(token),
1177 &TokenTree::Delimited(dspan, _, delim, _) => {
1178 if !delim.skip() {
1179 return looker(&Token::new(delim.as_open_token_kind(), dspan.open));
1180 }
1181 }
1182 }
1183 }
1184 None => {
1185 if let Some(last) = self.token_cursor.stack.last()
1188 && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1189 && !delim.skip()
1190 {
1191 return looker(&Token::new(delim.as_close_token_kind(), span.close));
1194 }
1195 }
1196 }
1197 }
1198
1199 let mut cursor = self.token_cursor.clone();
1202 let mut i = 0;
1203 let mut token = Token::dummy();
1204 while i < dist {
1205 token = cursor.next().0;
1206 if matches!(
1207 token.kind,
1208 token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1209 ) {
1210 continue;
1211 }
1212 i += 1;
1213 }
1214 looker(&token)
1215 }
1216
1217 pub fn tree_look_ahead<R>(
1220 &self,
1221 dist: usize,
1222 looker: impl FnOnce(&TokenTree) -> R,
1223 ) -> Option<R> {
1224 assert_ne!(dist, 0);
1225 self.token_cursor.curr.look_ahead(dist - 1).map(looker)
1226 }
1227
1228 pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1230 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1231 }
1232
1233 fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1235 let span = self.token_uninterpolated_span();
1236 if self.eat_keyword_case(exp!(Async), case) {
1237 if self.token_uninterpolated_span().at_least_rust_2024()
1240 && self.eat_keyword_case(exp!(Gen), case)
1241 {
1242 let gen_span = self.prev_token_uninterpolated_span();
1243 Some(CoroutineKind::AsyncGen {
1244 span: span.to(gen_span),
1245 closure_id: DUMMY_NODE_ID,
1246 return_impl_trait_id: DUMMY_NODE_ID,
1247 })
1248 } else {
1249 Some(CoroutineKind::Async {
1250 span,
1251 closure_id: DUMMY_NODE_ID,
1252 return_impl_trait_id: DUMMY_NODE_ID,
1253 })
1254 }
1255 } else if self.token_uninterpolated_span().at_least_rust_2024()
1256 && self.eat_keyword_case(exp!(Gen), case)
1257 {
1258 Some(CoroutineKind::Gen {
1259 span,
1260 closure_id: DUMMY_NODE_ID,
1261 return_impl_trait_id: DUMMY_NODE_ID,
1262 })
1263 } else {
1264 None
1265 }
1266 }
1267
1268 fn parse_safety(&mut self, case: Case) -> Safety {
1270 if self.eat_keyword_case(exp!(Unsafe), case) {
1271 Safety::Unsafe(self.prev_token_uninterpolated_span())
1272 } else if self.eat_keyword_case(exp!(Safe), case) {
1273 Safety::Safe(self.prev_token_uninterpolated_span())
1274 } else {
1275 Safety::Default
1276 }
1277 }
1278
1279 fn parse_constness(&mut self, case: Case) -> Const {
1281 self.parse_constness_(case, false)
1282 }
1283
1284 fn parse_closure_constness(&mut self) -> Const {
1286 let constness = self.parse_constness_(Case::Sensitive, true);
1287 if let Const::Yes(span) = constness {
1288 self.psess.gated_spans.gate(sym::const_closures, span);
1289 }
1290 constness
1291 }
1292
1293 fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1294 if (self.check_const_closure() == is_closure)
1296 && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
1297 && self.eat_keyword_case(exp!(Const), case)
1298 {
1299 Const::Yes(self.prev_token_uninterpolated_span())
1300 } else {
1301 Const::No
1302 }
1303 }
1304
1305 fn parse_mgca_const_block(&mut self, gate_syntax: bool) -> PResult<'a, AnonConst> {
1306 self.expect_keyword(exp!(Const))?;
1307 let kw_span = self.token.span;
1308 let value = self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)?;
1309 if gate_syntax {
1310 self.psess.gated_spans.gate(sym::min_generic_const_args, kw_span.to(value.span));
1311 }
1312 Ok(AnonConst {
1313 id: ast::DUMMY_NODE_ID,
1314 value,
1315 mgca_disambiguation: MgcaDisambiguation::AnonConst,
1316 })
1317 }
1318
1319 fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, Box<Expr>> {
1321 self.expect_keyword(exp!(Const))?;
1322 let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
1323 let anon_const = AnonConst {
1324 id: DUMMY_NODE_ID,
1325 value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1326 mgca_disambiguation: MgcaDisambiguation::AnonConst,
1327 };
1328 let blk_span = anon_const.value.span;
1329 let kind = if pat {
1330 let guar = self
1331 .dcx()
1332 .struct_span_err(blk_span, "const blocks cannot be used as patterns")
1333 .with_help(
1334 "use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead",
1335 )
1336 .emit();
1337 ExprKind::Err(guar)
1338 } else {
1339 ExprKind::ConstBlock(anon_const)
1340 };
1341 Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs))
1342 }
1343
1344 fn parse_mutability(&mut self) -> Mutability {
1346 if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1347 }
1348
1349 fn parse_byref(&mut self) -> ByRef {
1351 if self.eat_keyword(exp!(Ref)) {
1352 let (pinnedness, mutability) = self.parse_pin_and_mut();
1353 ByRef::Yes(pinnedness, mutability)
1354 } else {
1355 ByRef::No
1356 }
1357 }
1358
1359 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1361 if self.eat_keyword(exp!(Mut)) {
1362 Some(Mutability::Mut)
1363 } else if self.eat_keyword(exp!(Const)) {
1364 Some(Mutability::Not)
1365 } else {
1366 None
1367 }
1368 }
1369
1370 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1371 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1372 {
1373 if let Some(suffix) = suffix {
1374 self.dcx().emit_err(errors::InvalidLiteralSuffixOnTupleIndex {
1375 span: self.token.span,
1376 suffix,
1377 });
1378 }
1379 self.bump();
1380 Ok(Ident::new(symbol, self.prev_token.span))
1381 } else {
1382 self.parse_ident_common(true)
1383 }
1384 }
1385
1386 fn parse_delim_args(&mut self) -> PResult<'a, Box<DelimArgs>> {
1387 if let Some(args) = self.parse_delim_args_inner() {
1388 Ok(Box::new(args))
1389 } else {
1390 self.unexpected_any()
1391 }
1392 }
1393
1394 fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1395 Ok(if let Some(args) = self.parse_delim_args_inner() {
1396 AttrArgs::Delimited(args)
1397 } else if self.eat(exp!(Eq)) {
1398 let eq_span = self.prev_token.span;
1399 let expr = self.parse_expr_force_collect()?;
1400 AttrArgs::Eq { eq_span, expr }
1401 } else {
1402 AttrArgs::Empty
1403 })
1404 }
1405
1406 fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1407 let delimited = self.check(exp!(OpenParen))
1408 || self.check(exp!(OpenBracket))
1409 || self.check(exp!(OpenBrace));
1410
1411 delimited.then(|| {
1412 let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1413 unreachable!()
1414 };
1415 DelimArgs { dspan, delim, tokens }
1416 })
1417 }
1418
1419 pub fn parse_token_tree(&mut self) -> TokenTree {
1421 if self.token.kind.open_delim().is_some() {
1422 let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1425 debug_assert_matches!(tree, TokenTree::Delimited(..));
1426
1427 let target_depth = self.token_cursor.stack.len() - 1;
1433
1434 if let Capturing::No = self.capture_state.capturing {
1435 self.token_cursor.curr.bump_to_end();
1440 self.bump();
1441 debug_assert_eq!(self.token_cursor.stack.len(), target_depth);
1442 } else {
1443 loop {
1444 self.bump();
1447 if self.token_cursor.stack.len() == target_depth {
1448 break;
1449 }
1450 }
1451 }
1452 debug_assert!(self.token.kind.close_delim().is_some());
1453
1454 self.bump();
1456 tree
1457 } else {
1458 assert!(!self.token.kind.is_close_delim_or_eof());
1459 let prev_spacing = self.token_spacing;
1460 self.bump();
1461 TokenTree::Token(self.prev_token, prev_spacing)
1462 }
1463 }
1464
1465 pub fn parse_tokens(&mut self) -> TokenStream {
1466 let mut result = Vec::new();
1467 loop {
1468 if self.token.kind.is_close_delim_or_eof() {
1469 break;
1470 } else {
1471 result.push(self.parse_token_tree());
1472 }
1473 }
1474 TokenStream::new(result)
1475 }
1476
1477 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1481 let old = self.restrictions;
1482 self.restrictions = res;
1483 let res = f(self);
1484 self.restrictions = old;
1485 res
1486 }
1487
1488 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1495 if let Some(vis) = self
1496 .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
1497 {
1498 return Ok(vis);
1499 }
1500
1501 if !self.eat_keyword(exp!(Pub)) {
1502 return Ok(Visibility {
1506 span: self.token.span.shrink_to_lo(),
1507 kind: VisibilityKind::Inherited,
1508 tokens: None,
1509 });
1510 }
1511 let lo = self.prev_token.span;
1512
1513 if self.check(exp!(OpenParen)) {
1514 if self.is_keyword_ahead(1, &[kw::In]) {
1519 self.bump(); self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1525 path: Box::new(path),
1526 id: ast::DUMMY_NODE_ID,
1527 shorthand: false,
1528 };
1529 return Ok(Visibility {
1530 span: lo.to(self.prev_token.span),
1531 kind: vis,
1532 tokens: None,
1533 });
1534 } else if self.look_ahead(2, |t| t == &token::CloseParen)
1535 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1536 {
1537 self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1542 path: Box::new(path),
1543 id: ast::DUMMY_NODE_ID,
1544 shorthand: true,
1545 };
1546 return Ok(Visibility {
1547 span: lo.to(self.prev_token.span),
1548 kind: vis,
1549 tokens: None,
1550 });
1551 } else if let FollowedByType::No = fbt {
1552 self.recover_incorrect_vis_restriction()?;
1555 }
1557 }
1558
1559 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1560 }
1561
1562 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1564 self.bump(); let path = self.parse_path(PathStyle::Mod)?;
1566 self.expect(exp!(CloseParen))?; let path_str = pprust::path_to_string(&path);
1569 self.dcx()
1570 .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1571
1572 Ok(())
1573 }
1574
1575 fn parse_extern(&mut self, case: Case) -> Extern {
1577 if self.eat_keyword_case(exp!(Extern), case) {
1578 let mut extern_span = self.prev_token.span;
1579 let abi = self.parse_abi();
1580 if let Some(abi) = abi {
1581 extern_span = extern_span.to(abi.span);
1582 }
1583 Extern::from_abi(abi, extern_span)
1584 } else {
1585 Extern::None
1586 }
1587 }
1588
1589 fn parse_abi(&mut self) -> Option<StrLit> {
1591 match self.parse_str_lit() {
1592 Ok(str_lit) => Some(str_lit),
1593 Err(Some(lit)) => match lit.kind {
1594 ast::LitKind::Err(_) => None,
1595 _ => {
1596 self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1597 None
1598 }
1599 },
1600 Err(None) => None,
1601 }
1602 }
1603
1604 fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1605 &mut self,
1606 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1607 ) -> PResult<'a, R> {
1608 self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1611 Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1612 })
1613 }
1614
1615 fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1617 if self.check(exp!(PathSep)) {
1618 if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1619 debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1620 self.look_ahead(2, looker)
1621 } else {
1622 self.look_ahead(1, looker)
1623 }
1624 } else {
1625 false
1626 }
1627 }
1628
1629 fn is_import_coupler(&mut self) -> bool {
1631 self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star))
1632 }
1633
1634 #[allow(unused)]
1637 pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
1638 fmt::from_fn(move |f| {
1639 let mut dbg_fmt = f.debug_struct("Parser"); dbg_fmt.field("prev_token", &self.prev_token);
1643 let mut tokens = vec![];
1644 for i in 0..lookahead {
1645 let tok = self.look_ahead(i, |tok| tok.kind);
1646 let is_eof = tok == TokenKind::Eof;
1647 tokens.push(tok);
1648 if is_eof {
1649 break;
1651 }
1652 }
1653 dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1654 dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1655
1656 if let Some(subparser) = self.subparser_name {
1658 dbg_fmt.field("subparser_name", &subparser);
1659 }
1660 if let Recovery::Forbidden = self.recovery {
1661 dbg_fmt.field("recovery", &self.recovery);
1662 }
1663
1664 dbg_fmt.finish_non_exhaustive()
1666 })
1667 }
1668
1669 pub fn clear_expected_token_types(&mut self) {
1670 self.expected_token_types.clear();
1671 }
1672
1673 pub fn approx_token_stream_pos(&self) -> u32 {
1674 self.num_bump_calls
1675 }
1676
1677 pub fn token_uninterpolated_span(&self) -> Span {
1684 match &self.token.kind {
1685 token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1686 token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span),
1687 _ => self.token.span,
1688 }
1689 }
1690
1691 pub fn prev_token_uninterpolated_span(&self) -> Span {
1693 match &self.prev_token.kind {
1694 token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1695 token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span),
1696 _ => self.prev_token.span,
1697 }
1698 }
1699}
1700
1701#[derive(Clone, Debug)]
1703pub enum ParseNtResult {
1704 Tt(TokenTree),
1705 Ident(Ident, IdentIsRaw),
1706 Lifetime(Ident, IdentIsRaw),
1707 Item(Box<ast::Item>),
1708 Block(Box<ast::Block>),
1709 Stmt(Box<ast::Stmt>),
1710 Pat(Box<ast::Pat>, NtPatKind),
1711 Expr(Box<ast::Expr>, NtExprKind),
1712 Literal(Box<ast::Expr>),
1713 Ty(Box<ast::Ty>),
1714 Meta(Box<ast::AttrItem>),
1715 Path(Box<ast::Path>),
1716 Vis(Box<ast::Visibility>),
1717}