rustc_parse/parser/
mod.rs

1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14// Parsers for non-functionlike builtin macros are defined in rustc_parse so they can be used by
15// both rustc_builtin_macros and rustfmt.
16pub mod asm;
17pub mod cfg_select;
18
19use std::assert_matches::debug_assert_matches;
20use std::{fmt, mem, slice};
21
22use attr_wrapper::{AttrWrapper, UsePreAttrPos};
23pub use diagnostics::AttemptLocalParseRecovery;
24pub(crate) use expr::ForbiddenLetReason;
25// Public to use it for custom `if` expressions in rustfmt forks like https://github.com/tucant/rustfmt
26pub use expr::LetChainsPolicy;
27pub(crate) use item::{FnContext, FnParseMode};
28pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
29pub use path::PathStyle;
30use rustc_ast::token::{
31    self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
32};
33use rustc_ast::tokenstream::{
34    ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor,
35};
36use rustc_ast::util::case::Case;
37use rustc_ast::{
38    self as ast, AnonConst, AttrArgs, AttrId, BlockCheckMode, ByRef, Const, CoroutineKind,
39    DUMMY_NODE_ID, DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, MgcaDisambiguation,
40    Mutability, Recovered, Safety, StrLit, Visibility, VisibilityKind,
41};
42use rustc_ast_pretty::pprust;
43use rustc_data_structures::fx::FxHashMap;
44use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
45use rustc_index::interval::IntervalSet;
46use rustc_session::parse::ParseSess;
47use rustc_span::{Ident, Span, Symbol, kw, sym};
48use thin_vec::ThinVec;
49use token_type::TokenTypeSet;
50pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
51use tracing::debug;
52
53use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
54use crate::exp;
55
56#[cfg(test)]
57mod tests;
58
59// Ideally, these tests would be in `rustc_ast`. But they depend on having a
60// parser, so they are here.
61#[cfg(test)]
62mod tokenstream {
63    mod tests;
64}
65
66bitflags::bitflags! {
67    /// Restrictions applied while parsing.
68    ///
69    /// The parser maintains a bitset of restrictions it will honor while
70    /// parsing. This is essentially used as a way of tracking state of what
71    /// is being parsed and to change behavior based on that.
72    #[derive(Clone, Copy, Debug)]
73    struct Restrictions: u8 {
74        /// Restricts expressions for use in statement position.
75        ///
76        /// When expressions are used in various places, like statements or
77        /// match arms, this is used to stop parsing once certain tokens are
78        /// reached.
79        ///
80        /// For example, `if true {} & 1` with `STMT_EXPR` in effect is parsed
81        /// as two separate expression statements (`if` and a reference to 1).
82        /// Otherwise it is parsed as a bitwise AND where `if` is on the left
83        /// and 1 is on the right.
84        const STMT_EXPR         = 1 << 0;
85        /// Do not allow struct literals.
86        ///
87        /// There are several places in the grammar where we don't want to
88        /// allow struct literals because they can require lookahead, or
89        /// otherwise could be ambiguous or cause confusion. For example,
90        /// `if Foo {} {}` isn't clear if it is `Foo{}` struct literal, or
91        /// just `Foo` is the condition, followed by a consequent block,
92        /// followed by an empty block.
93        ///
94        /// See [RFC 92](https://rust-lang.github.io/rfcs/0092-struct-grammar.html).
95        const NO_STRUCT_LITERAL = 1 << 1;
96        /// Used to provide better error messages for const generic arguments.
97        ///
98        /// An un-braced const generic argument is limited to a very small
99        /// subset of expressions. This is used to detect the situation where
100        /// an expression outside of that subset is used, and to suggest to
101        /// wrap the expression in braces.
102        const CONST_EXPR        = 1 << 2;
103        /// Allows `let` expressions.
104        ///
105        /// `let pattern = scrutinee` is parsed as an expression, but it is
106        /// only allowed in let chains (`if` and `while` conditions).
107        /// Otherwise it is not an expression (note that `let` in statement
108        /// positions is treated as a `StmtKind::Let` statement, which has a
109        /// slightly different grammar).
110        const ALLOW_LET         = 1 << 3;
111        /// Used to detect a missing `=>` in a match guard.
112        ///
113        /// This is used for error handling in a match guard to give a better
114        /// error message if the `=>` is missing. It is set when parsing the
115        /// guard expression.
116        const IN_IF_GUARD       = 1 << 4;
117        /// Used to detect the incorrect use of expressions in patterns.
118        ///
119        /// This is used for error handling while parsing a pattern. During
120        /// error recovery, this will be set to try to parse the pattern as an
121        /// expression, but halts parsing the expression when reaching certain
122        /// tokens like `=`.
123        const IS_PAT            = 1 << 5;
124    }
125}
126
127#[derive(Clone, Copy, PartialEq, Debug)]
128enum SemiColonMode {
129    Break,
130    Ignore,
131    Comma,
132}
133
134#[derive(Clone, Copy, PartialEq, Debug)]
135enum BlockMode {
136    Break,
137    Ignore,
138}
139
140/// Whether or not we should force collection of tokens for an AST node,
141/// regardless of whether or not it has attributes
142#[derive(Clone, Copy, Debug, PartialEq)]
143pub enum ForceCollect {
144    Yes,
145    No,
146}
147
148/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
149#[macro_export]
150macro_rules! maybe_recover_from_interpolated_ty_qpath {
151    ($self: expr, $allow_qpath_recovery: expr) => {
152        if $allow_qpath_recovery
153            && $self.may_recover()
154            && let Some(mv_kind) = $self.token.is_metavar_seq()
155            && let token::MetaVarKind::Ty { .. } = mv_kind
156            && $self.check_noexpect_past_close_delim(&token::PathSep)
157        {
158            // Reparse the type, then move to recovery.
159            let ty = $self
160                .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
161                .expect("metavar seq ty");
162
163            return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
164        }
165    };
166}
167
168#[derive(Clone, Copy, Debug)]
169pub enum Recovery {
170    Allowed,
171    Forbidden,
172}
173
174#[derive(Clone)]
175pub struct Parser<'a> {
176    pub psess: &'a ParseSess,
177    /// The current token.
178    pub token: Token,
179    /// The spacing for the current token.
180    token_spacing: Spacing,
181    /// The previous token.
182    pub prev_token: Token,
183    pub capture_cfg: bool,
184    restrictions: Restrictions,
185    expected_token_types: TokenTypeSet,
186    token_cursor: TokenCursor,
187    // The number of calls to `bump`, i.e. the position in the token stream.
188    num_bump_calls: u32,
189    // During parsing we may sometimes need to "unglue" a glued token into two
190    // or three component tokens (e.g. `>>` into `>` and `>`, or `>>=` into `>`
191    // and `>` and `=`), so the parser can consume them one at a time. This
192    // process bypasses the normal capturing mechanism (e.g. `num_bump_calls`
193    // will not be incremented), since the "unglued" tokens due not exist in
194    // the original `TokenStream`.
195    //
196    // If we end up consuming all the component tokens, this is not an issue,
197    // because we'll end up capturing the single "glued" token.
198    //
199    // However, sometimes we may want to capture not all of the original
200    // token. For example, capturing the `Vec<u8>` in `Option<Vec<u8>>`
201    // requires us to unglue the trailing `>>` token. The `break_last_token`
202    // field is used to track these tokens. They get appended to the captured
203    // stream when we evaluate a `LazyAttrTokenStream`.
204    //
205    // This value is always 0, 1, or 2. It can only reach 2 when splitting
206    // `>>=` or `<<=`.
207    break_last_token: u32,
208    /// This field is used to keep track of how many left angle brackets we have seen. This is
209    /// required in order to detect extra leading left angle brackets (`<` characters) and error
210    /// appropriately.
211    ///
212    /// See the comments in the `parse_path_segment` function for more details.
213    unmatched_angle_bracket_count: u16,
214    angle_bracket_nesting: u16,
215
216    last_unexpected_token_span: Option<Span>,
217    /// If present, this `Parser` is not parsing Rust code but rather a macro call.
218    subparser_name: Option<&'static str>,
219    capture_state: CaptureState,
220    /// This allows us to recover when the user forget to add braces around
221    /// multiple statements in the closure body.
222    current_closure: Option<ClosureSpans>,
223    /// Whether the parser is allowed to do recovery.
224    /// This is disabled when parsing macro arguments, see #103534
225    recovery: Recovery,
226}
227
228// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with
229// nonterminals. Make sure it doesn't unintentionally get bigger. We only check a few arches
230// though, because `TokenTypeSet(u128)` alignment varies on others, changing the total size.
231#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
232rustc_data_structures::static_assert_size!(Parser<'_>, 288);
233
234/// Stores span information about a closure.
235#[derive(Clone, Debug)]
236struct ClosureSpans {
237    whole_closure: Span,
238    closing_pipe: Span,
239    body: Span,
240}
241
242/// Controls how we capture tokens. Capturing can be expensive,
243/// so we try to avoid performing capturing in cases where
244/// we will never need an `AttrTokenStream`.
245#[derive(Copy, Clone, Debug)]
246enum Capturing {
247    /// We aren't performing any capturing - this is the default mode.
248    No,
249    /// We are capturing tokens
250    Yes,
251}
252
253// This state is used by `Parser::collect_tokens`.
254#[derive(Clone, Debug)]
255struct CaptureState {
256    capturing: Capturing,
257    parser_replacements: Vec<ParserReplacement>,
258    inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
259    // `IntervalSet` is good for perf because attrs are mostly added to this
260    // set in contiguous ranges.
261    seen_attrs: IntervalSet<AttrId>,
262}
263
264/// A sequence separator.
265#[derive(Debug)]
266struct SeqSep {
267    /// The separator token.
268    sep: Option<ExpTokenPair>,
269    /// `true` if a trailing separator is allowed.
270    trailing_sep_allowed: bool,
271}
272
273impl SeqSep {
274    fn trailing_allowed(sep: ExpTokenPair) -> SeqSep {
275        SeqSep { sep: Some(sep), trailing_sep_allowed: true }
276    }
277
278    fn none() -> SeqSep {
279        SeqSep { sep: None, trailing_sep_allowed: false }
280    }
281}
282
283#[derive(Debug)]
284pub enum FollowedByType {
285    Yes,
286    No,
287}
288
289#[derive(Copy, Clone, Debug)]
290pub enum Trailing {
291    No,
292    Yes,
293}
294
295impl From<bool> for Trailing {
296    fn from(b: bool) -> Trailing {
297        if b { Trailing::Yes } else { Trailing::No }
298    }
299}
300
301#[derive(Clone, Copy, Debug, PartialEq, Eq)]
302pub(super) enum TokenDescription {
303    ReservedIdentifier,
304    Keyword,
305    ReservedKeyword,
306    DocComment,
307
308    // Expanded metavariables are wrapped in invisible delimiters which aren't
309    // pretty-printed. In error messages we must handle these specially
310    // otherwise we get confusing things in messages like "expected `(`, found
311    // ``". It's better to say e.g. "expected `(`, found type metavariable".
312    MetaVar(MetaVarKind),
313}
314
315impl TokenDescription {
316    pub(super) fn from_token(token: &Token) -> Option<Self> {
317        match token.kind {
318            _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
319            _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
320            _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
321            token::DocComment(..) => Some(TokenDescription::DocComment),
322            token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => {
323                Some(TokenDescription::MetaVar(kind))
324            }
325            _ => None,
326        }
327    }
328}
329
330pub fn token_descr(token: &Token) -> String {
331    let s = pprust::token_to_string(token).to_string();
332
333    match (TokenDescription::from_token(token), &token.kind) {
334        (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
335        (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
336        (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
337        (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
338        // Deliberately doesn't print `s`, which is empty.
339        (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
340        (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
341        (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
342        (None, _) => format!("`{s}`"),
343    }
344}
345
346impl<'a> Parser<'a> {
347    pub fn new(
348        psess: &'a ParseSess,
349        stream: TokenStream,
350        subparser_name: Option<&'static str>,
351    ) -> Self {
352        let mut parser = Parser {
353            psess,
354            token: Token::dummy(),
355            token_spacing: Spacing::Alone,
356            prev_token: Token::dummy(),
357            capture_cfg: false,
358            restrictions: Restrictions::empty(),
359            expected_token_types: TokenTypeSet::new(),
360            token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
361            num_bump_calls: 0,
362            break_last_token: 0,
363            unmatched_angle_bracket_count: 0,
364            angle_bracket_nesting: 0,
365            last_unexpected_token_span: None,
366            subparser_name,
367            capture_state: CaptureState {
368                capturing: Capturing::No,
369                parser_replacements: Vec::new(),
370                inner_attr_parser_ranges: Default::default(),
371                seen_attrs: IntervalSet::new(u32::MAX as usize),
372            },
373            current_closure: None,
374            recovery: Recovery::Allowed,
375        };
376
377        // Make parser point to the first token.
378        parser.bump();
379
380        // Change this from 1 back to 0 after the bump. This eases debugging of
381        // `Parser::collect_tokens` because 0-indexed token positions are nicer
382        // than 1-indexed token positions.
383        parser.num_bump_calls = 0;
384
385        parser
386    }
387
388    #[inline]
389    pub fn recovery(mut self, recovery: Recovery) -> Self {
390        self.recovery = recovery;
391        self
392    }
393
394    #[inline]
395    fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T {
396        let old = mem::replace(&mut self.recovery, recovery);
397        let res = f(self);
398        self.recovery = old;
399        res
400    }
401
402    /// Whether the parser is allowed to recover from broken code.
403    ///
404    /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
405    /// is not allowed. All recovery done by the parser must be gated behind this check.
406    ///
407    /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
408    /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
409    #[inline]
410    fn may_recover(&self) -> bool {
411        matches!(self.recovery, Recovery::Allowed)
412    }
413
414    /// Version of [`unexpected`](Parser::unexpected) that "returns" any type in the `Ok`
415    /// (both those functions never return "Ok", and so can lie like that in the type).
416    pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
417        match self.expect_one_of(&[], &[]) {
418            Err(e) => Err(e),
419            // We can get `Ok(true)` from `recover_closing_delimiter`
420            // which is called in `expected_one_of_not_found`.
421            Ok(_) => FatalError.raise(),
422        }
423    }
424
425    pub fn unexpected(&mut self) -> PResult<'a, ()> {
426        self.unexpected_any()
427    }
428
429    /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
430    pub fn expect(&mut self, exp: ExpTokenPair) -> PResult<'a, Recovered> {
431        if self.expected_token_types.is_empty() {
432            if self.token == exp.tok {
433                self.bump();
434                Ok(Recovered::No)
435            } else {
436                self.unexpected_try_recover(&exp.tok)
437            }
438        } else {
439            self.expect_one_of(slice::from_ref(&exp), &[])
440        }
441    }
442
443    /// Expect next token to be edible or inedible token. If edible,
444    /// then consume it; if inedible, then return without consuming
445    /// anything. Signal a fatal error if next token is unexpected.
446    fn expect_one_of(
447        &mut self,
448        edible: &[ExpTokenPair],
449        inedible: &[ExpTokenPair],
450    ) -> PResult<'a, Recovered> {
451        if edible.iter().any(|exp| exp.tok == self.token.kind) {
452            self.bump();
453            Ok(Recovered::No)
454        } else if inedible.iter().any(|exp| exp.tok == self.token.kind) {
455            // leave it in the input
456            Ok(Recovered::No)
457        } else if self.token != token::Eof
458            && self.last_unexpected_token_span == Some(self.token.span)
459        {
460            FatalError.raise();
461        } else {
462            self.expected_one_of_not_found(edible, inedible)
463                .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
464        }
465    }
466
467    // Public for rustfmt usage.
468    pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
469        self.parse_ident_common(self.may_recover())
470    }
471
472    fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
473        let (ident, is_raw) = self.ident_or_err(recover)?;
474
475        if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
476            let err = self.expected_ident_found_err();
477            if recover {
478                err.emit();
479            } else {
480                return Err(err);
481            }
482        }
483        self.bump();
484        Ok(ident)
485    }
486
487    fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
488        match self.token.ident() {
489            Some(ident) => Ok(ident),
490            None => self.expected_ident_found(recover),
491        }
492    }
493
494    /// Checks if the next token is `tok`, and returns `true` if so.
495    ///
496    /// This method will automatically add `tok` to `expected_token_types` if `tok` is not
497    /// encountered.
498    #[inline]
499    pub fn check(&mut self, exp: ExpTokenPair) -> bool {
500        let is_present = self.token == exp.tok;
501        if !is_present {
502            self.expected_token_types.insert(exp.token_type);
503        }
504        is_present
505    }
506
507    #[inline]
508    #[must_use]
509    fn check_noexpect(&self, tok: &TokenKind) -> bool {
510        self.token == *tok
511    }
512
513    // Check the first token after the delimiter that closes the current
514    // delimited sequence. (Panics if used in the outermost token stream, which
515    // has no delimiters.) It uses a clone of the relevant tree cursor to skip
516    // past the entire `TokenTree::Delimited` in a single step, avoiding the
517    // need for unbounded token lookahead.
518    //
519    // Primarily used when `self.token` matches `OpenInvisible(_))`, to look
520    // ahead through the current metavar expansion.
521    fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
522        let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
523        tree_cursor.bump();
524        matches!(
525            tree_cursor.curr(),
526            Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
527        )
528    }
529
530    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
531    ///
532    /// the main purpose of this function is to reduce the cluttering of the suggestions list
533    /// which using the normal eat method could introduce in some cases.
534    #[inline]
535    #[must_use]
536    fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
537        let is_present = self.check_noexpect(tok);
538        if is_present {
539            self.bump()
540        }
541        is_present
542    }
543
544    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
545    #[inline]
546    #[must_use]
547    pub fn eat(&mut self, exp: ExpTokenPair) -> bool {
548        let is_present = self.check(exp);
549        if is_present {
550            self.bump()
551        }
552        is_present
553    }
554
555    /// If the next token is the given keyword, returns `true` without eating it.
556    /// An expectation is also added for diagnostics purposes.
557    #[inline]
558    #[must_use]
559    fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
560        let is_keyword = self.token.is_keyword(exp.kw);
561        if !is_keyword {
562            self.expected_token_types.insert(exp.token_type);
563        }
564        is_keyword
565    }
566
567    #[inline]
568    #[must_use]
569    fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
570        if self.check_keyword(exp) {
571            true
572        } else if case == Case::Insensitive
573            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
574            // Do an ASCII case-insensitive match, because all keywords are ASCII.
575            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
576        {
577            true
578        } else {
579            false
580        }
581    }
582
583    /// If the next token is the given keyword, eats it and returns `true`.
584    /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
585    // Public for rustc_builtin_macros and rustfmt usage.
586    #[inline]
587    #[must_use]
588    pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
589        let is_keyword = self.check_keyword(exp);
590        if is_keyword {
591            self.bump();
592        }
593        is_keyword
594    }
595
596    /// Eats a keyword, optionally ignoring the case.
597    /// If the case differs (and is ignored) an error is issued.
598    /// This is useful for recovery.
599    #[inline]
600    #[must_use]
601    fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
602        if self.eat_keyword(exp) {
603            true
604        } else if case == Case::Insensitive
605            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
606            // Do an ASCII case-insensitive match, because all keywords are ASCII.
607            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
608        {
609            let kw = exp.kw.as_str();
610            let is_upper = kw.chars().all(char::is_uppercase);
611            let is_lower = kw.chars().all(char::is_lowercase);
612
613            let case = match (is_upper, is_lower) {
614                (true, true) => {
615                    unreachable!("keyword that is both fully upper- and fully lowercase")
616                }
617                (true, false) => errors::Case::Upper,
618                (false, true) => errors::Case::Lower,
619                (false, false) => errors::Case::Mixed,
620            };
621
622            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw, case });
623            self.bump();
624            true
625        } else {
626            false
627        }
628    }
629
630    /// If the next token is the given keyword, eats it and returns `true`.
631    /// Otherwise, returns `false`. No expectation is added.
632    // Public for rustc_builtin_macros usage.
633    #[inline]
634    #[must_use]
635    pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
636        let is_keyword = self.token.is_keyword(kw);
637        if is_keyword {
638            self.bump();
639        }
640        is_keyword
641    }
642
643    /// If the given word is not a keyword, signals an error.
644    /// If the next token is not the given word, signals an error.
645    /// Otherwise, eats it.
646    pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
647        if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
648    }
649
650    /// Consume a sequence produced by a metavar expansion, if present.
651    pub fn eat_metavar_seq<T>(
652        &mut self,
653        mv_kind: MetaVarKind,
654        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
655    ) -> Option<T> {
656        self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
657    }
658
659    /// A slightly more general form of `eat_metavar_seq`, for use with the
660    /// `MetaVarKind` variants that have parameters, where an exact match isn't
661    /// desired.
662    fn eat_metavar_seq_with_matcher<T>(
663        &mut self,
664        match_mv_kind: impl Fn(MetaVarKind) -> bool,
665        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
666    ) -> Option<T> {
667        if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
668            && match_mv_kind(mv_kind)
669        {
670            self.bump();
671
672            // Recovery is disabled when parsing macro arguments, so it must
673            // also be disabled when reparsing pasted macro arguments,
674            // otherwise we get inconsistent results (e.g. #137874).
675            let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
676
677            let res = match res {
678                Ok(res) => res,
679                Err(err) => {
680                    // This can occur in unusual error cases, e.g. #139445.
681                    err.delay_as_bug();
682                    return None;
683                }
684            };
685
686            if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
687                && match_mv_kind(mv_kind)
688            {
689                self.bump();
690                Some(res)
691            } else {
692                // This can occur when invalid syntax is passed to a decl macro. E.g. see #139248,
693                // where the reparse attempt of an invalid expr consumed the trailing invisible
694                // delimiter.
695                self.dcx()
696                    .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
697                None
698            }
699        } else {
700            None
701        }
702    }
703
704    /// Is the given keyword `kw` followed by a non-reserved identifier?
705    fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
706        self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_non_reserved_ident())
707    }
708
709    #[inline]
710    fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
711        if !ok {
712            self.expected_token_types.insert(token_type);
713        }
714        ok
715    }
716
717    fn check_ident(&mut self) -> bool {
718        self.check_or_expected(self.token.is_ident(), TokenType::Ident)
719    }
720
721    fn check_path(&mut self) -> bool {
722        self.check_or_expected(self.token.is_path_start(), TokenType::Path)
723    }
724
725    fn check_type(&mut self) -> bool {
726        self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
727    }
728
729    fn check_const_arg(&mut self) -> bool {
730        let is_mcg_arg = self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const);
731        let is_mgca_arg = self.is_keyword_ahead(0, &[kw::Const])
732            && self.look_ahead(1, |t| *t == token::OpenBrace);
733        is_mcg_arg || is_mgca_arg
734    }
735
736    fn check_const_closure(&self) -> bool {
737        self.is_keyword_ahead(0, &[kw::Const])
738            && self.look_ahead(1, |t| match &t.kind {
739                // async closures do not work with const closures, so we do not parse that here.
740                token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
741                | token::OrOr
742                | token::Or => true,
743                _ => false,
744            })
745    }
746
747    fn check_inline_const(&self, dist: usize) -> bool {
748        self.is_keyword_ahead(dist, &[kw::Const])
749            && self.look_ahead(dist + 1, |t| match &t.kind {
750                token::OpenBrace => true,
751                token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true,
752                _ => false,
753            })
754    }
755
756    /// Checks to see if the next token is either `+` or `+=`.
757    /// Otherwise returns `false`.
758    #[inline]
759    fn check_plus(&mut self) -> bool {
760        self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
761    }
762
763    /// Eats the expected token if it's present possibly breaking
764    /// compound tokens like multi-character operators in process.
765    /// Returns `true` if the token was eaten.
766    fn break_and_eat(&mut self, exp: ExpTokenPair) -> bool {
767        if self.token == exp.tok {
768            self.bump();
769            return true;
770        }
771        match self.token.kind.break_two_token_op(1) {
772            Some((first, second)) if first == exp.tok => {
773                let first_span = self.psess.source_map().start_point(self.token.span);
774                let second_span = self.token.span.with_lo(first_span.hi());
775                self.token = Token::new(first, first_span);
776                // Keep track of this token - if we end token capturing now,
777                // we'll want to append this token to the captured stream.
778                //
779                // If we consume any additional tokens, then this token
780                // is not needed (we'll capture the entire 'glued' token),
781                // and `bump` will set this field to 0.
782                self.break_last_token += 1;
783                // Use the spacing of the glued token as the spacing of the
784                // unglued second token.
785                self.bump_with((Token::new(second, second_span), self.token_spacing));
786                true
787            }
788            _ => {
789                self.expected_token_types.insert(exp.token_type);
790                false
791            }
792        }
793    }
794
795    /// Eats `+` possibly breaking tokens like `+=` in process.
796    fn eat_plus(&mut self) -> bool {
797        self.break_and_eat(exp!(Plus))
798    }
799
800    /// Eats `&` possibly breaking tokens like `&&` in process.
801    /// Signals an error if `&` is not eaten.
802    fn expect_and(&mut self) -> PResult<'a, ()> {
803        if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
804    }
805
806    /// Eats `|` possibly breaking tokens like `||` in process.
807    /// Signals an error if `|` was not eaten.
808    fn expect_or(&mut self) -> PResult<'a, ()> {
809        if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
810    }
811
812    /// Eats `<` possibly breaking tokens like `<<` in process.
813    fn eat_lt(&mut self) -> bool {
814        let ate = self.break_and_eat(exp!(Lt));
815        if ate {
816            // See doc comment for `unmatched_angle_bracket_count`.
817            self.unmatched_angle_bracket_count += 1;
818            debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
819        }
820        ate
821    }
822
823    /// Eats `<` possibly breaking tokens like `<<` in process.
824    /// Signals an error if `<` was not eaten.
825    fn expect_lt(&mut self) -> PResult<'a, ()> {
826        if self.eat_lt() { Ok(()) } else { self.unexpected() }
827    }
828
829    /// Eats `>` possibly breaking tokens like `>>` in process.
830    /// Signals an error if `>` was not eaten.
831    fn expect_gt(&mut self) -> PResult<'a, ()> {
832        if self.break_and_eat(exp!(Gt)) {
833            // See doc comment for `unmatched_angle_bracket_count`.
834            if self.unmatched_angle_bracket_count > 0 {
835                self.unmatched_angle_bracket_count -= 1;
836                debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
837            }
838            Ok(())
839        } else {
840            self.unexpected()
841        }
842    }
843
844    /// Checks if the next token is contained within `closes`, and returns `true` if so.
845    fn expect_any_with_type(
846        &mut self,
847        closes_expected: &[ExpTokenPair],
848        closes_not_expected: &[&TokenKind],
849    ) -> bool {
850        closes_expected.iter().any(|&close| self.check(close))
851            || closes_not_expected.iter().any(|k| self.check_noexpect(k))
852    }
853
854    /// Parses a sequence until the specified delimiters. The function
855    /// `f` must consume tokens until reaching the next separator or
856    /// closing bracket.
857    fn parse_seq_to_before_tokens<T>(
858        &mut self,
859        closes_expected: &[ExpTokenPair],
860        closes_not_expected: &[&TokenKind],
861        sep: SeqSep,
862        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
863    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
864        let mut first = true;
865        let mut recovered = Recovered::No;
866        let mut trailing = Trailing::No;
867        let mut v = ThinVec::new();
868
869        while !self.expect_any_with_type(closes_expected, closes_not_expected) {
870            if self.token.kind.is_close_delim_or_eof() {
871                break;
872            }
873            if let Some(exp) = sep.sep {
874                if first {
875                    // no separator for the first element
876                    first = false;
877                } else {
878                    // check for separator
879                    match self.expect(exp) {
880                        Ok(Recovered::No) => {
881                            self.current_closure.take();
882                        }
883                        Ok(Recovered::Yes(guar)) => {
884                            self.current_closure.take();
885                            recovered = Recovered::Yes(guar);
886                            break;
887                        }
888                        Err(mut expect_err) => {
889                            let sp = self.prev_token.span.shrink_to_hi();
890                            let token_str = pprust::token_kind_to_string(&exp.tok);
891
892                            match self.current_closure.take() {
893                                Some(closure_spans) if self.token == TokenKind::Semi => {
894                                    // Finding a semicolon instead of a comma
895                                    // after a closure body indicates that the
896                                    // closure body may be a block but the user
897                                    // forgot to put braces around its
898                                    // statements.
899
900                                    self.recover_missing_braces_around_closure_body(
901                                        closure_spans,
902                                        expect_err,
903                                    )?;
904
905                                    continue;
906                                }
907
908                                _ => {
909                                    // Attempt to keep parsing if it was a similar separator.
910                                    if exp.tok.similar_tokens().contains(&self.token.kind) {
911                                        self.bump();
912                                    }
913                                }
914                            }
915
916                            // If this was a missing `@` in a binding pattern
917                            // bail with a suggestion
918                            // https://github.com/rust-lang/rust/issues/72373
919                            if self.prev_token.is_ident() && self.token == token::DotDot {
920                                let msg = format!(
921                                    "if you meant to bind the contents of the rest of the array \
922                                     pattern into `{}`, use `@`",
923                                    pprust::token_to_string(&self.prev_token)
924                                );
925                                expect_err
926                                    .with_span_suggestion_verbose(
927                                        self.prev_token.span.shrink_to_hi().until(self.token.span),
928                                        msg,
929                                        " @ ",
930                                        Applicability::MaybeIncorrect,
931                                    )
932                                    .emit();
933                                break;
934                            }
935
936                            // Attempt to keep parsing if it was an omitted separator.
937                            self.last_unexpected_token_span = None;
938                            match f(self) {
939                                Ok(t) => {
940                                    // Parsed successfully, therefore most probably the code only
941                                    // misses a separator.
942                                    expect_err
943                                        .with_span_suggestion_short(
944                                            sp,
945                                            format!("missing `{token_str}`"),
946                                            token_str,
947                                            Applicability::MaybeIncorrect,
948                                        )
949                                        .emit();
950
951                                    v.push(t);
952                                    continue;
953                                }
954                                Err(e) => {
955                                    // Parsing failed, therefore it must be something more serious
956                                    // than just a missing separator.
957                                    for xx in &e.children {
958                                        // Propagate the help message from sub error `e` to main
959                                        // error `expect_err`.
960                                        expect_err.children.push(xx.clone());
961                                    }
962                                    e.cancel();
963                                    if self.token == token::Colon {
964                                        // We will try to recover in
965                                        // `maybe_recover_struct_lit_bad_delims`.
966                                        return Err(expect_err);
967                                    } else if let [exp] = closes_expected
968                                        && exp.token_type == TokenType::CloseParen
969                                    {
970                                        return Err(expect_err);
971                                    } else {
972                                        expect_err.emit();
973                                        break;
974                                    }
975                                }
976                            }
977                        }
978                    }
979                }
980            }
981            if sep.trailing_sep_allowed
982                && self.expect_any_with_type(closes_expected, closes_not_expected)
983            {
984                trailing = Trailing::Yes;
985                break;
986            }
987
988            let t = f(self)?;
989            v.push(t);
990        }
991
992        Ok((v, trailing, recovered))
993    }
994
995    fn recover_missing_braces_around_closure_body(
996        &mut self,
997        closure_spans: ClosureSpans,
998        mut expect_err: Diag<'_>,
999    ) -> PResult<'a, ()> {
1000        let initial_semicolon = self.token.span;
1001
1002        while self.eat(exp!(Semi)) {
1003            let _ = self
1004                .parse_stmt_without_recovery(false, ForceCollect::No, false)
1005                .unwrap_or_else(|e| {
1006                    e.cancel();
1007                    None
1008                });
1009        }
1010
1011        expect_err
1012            .primary_message("closure bodies that contain statements must be surrounded by braces");
1013
1014        let preceding_pipe_span = closure_spans.closing_pipe;
1015        let following_token_span = self.token.span;
1016
1017        let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1018        first_note.push_span_label(
1019            initial_semicolon,
1020            "this `;` turns the preceding closure into a statement",
1021        );
1022        first_note.push_span_label(
1023            closure_spans.body,
1024            "this expression is a statement because of the trailing semicolon",
1025        );
1026        expect_err.span_note(first_note, "statement found outside of a block");
1027
1028        let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1029        second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1030        second_note.push_span_label(
1031            following_token_span,
1032            "...but likely you meant the closure to end here",
1033        );
1034        expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1035
1036        expect_err.span(vec![preceding_pipe_span, following_token_span]);
1037
1038        let opening_suggestion_str = " {".to_string();
1039        let closing_suggestion_str = "}".to_string();
1040
1041        expect_err.multipart_suggestion(
1042            "try adding braces",
1043            vec![
1044                (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1045                (following_token_span.shrink_to_lo(), closing_suggestion_str),
1046            ],
1047            Applicability::MaybeIncorrect,
1048        );
1049
1050        expect_err.emit();
1051
1052        Ok(())
1053    }
1054
1055    /// Parses a sequence, not including the delimiters. The function
1056    /// `f` must consume tokens until reaching the next separator or
1057    /// closing bracket.
1058    fn parse_seq_to_before_end<T>(
1059        &mut self,
1060        close: ExpTokenPair,
1061        sep: SeqSep,
1062        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1063    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1064        self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1065    }
1066
1067    /// Parses a sequence, including only the closing delimiter. The function
1068    /// `f` must consume tokens until reaching the next separator or
1069    /// closing bracket.
1070    fn parse_seq_to_end<T>(
1071        &mut self,
1072        close: ExpTokenPair,
1073        sep: SeqSep,
1074        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1075    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1076        let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1077        if matches!(recovered, Recovered::No) && !self.eat(close) {
1078            self.dcx().span_delayed_bug(
1079                self.token.span,
1080                "recovered but `parse_seq_to_before_end` did not give us the close token",
1081            );
1082        }
1083        Ok((val, trailing))
1084    }
1085
1086    /// Parses a sequence, including both delimiters. The function
1087    /// `f` must consume tokens until reaching the next separator or
1088    /// closing bracket.
1089    fn parse_unspanned_seq<T>(
1090        &mut self,
1091        open: ExpTokenPair,
1092        close: ExpTokenPair,
1093        sep: SeqSep,
1094        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1095    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1096        self.expect(open)?;
1097        self.parse_seq_to_end(close, sep, f)
1098    }
1099
1100    /// Parses a comma-separated sequence, including both delimiters.
1101    /// The function `f` must consume tokens until reaching the next separator or
1102    /// closing bracket.
1103    fn parse_delim_comma_seq<T>(
1104        &mut self,
1105        open: ExpTokenPair,
1106        close: ExpTokenPair,
1107        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1108    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1109        self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1110    }
1111
1112    /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
1113    /// The function `f` must consume tokens until reaching the next separator or
1114    /// closing bracket.
1115    pub fn parse_paren_comma_seq<T>(
1116        &mut self,
1117        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1118    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1119        self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1120    }
1121
1122    /// Advance the parser by one token using provided token as the next one.
1123    fn bump_with(&mut self, next: (Token, Spacing)) {
1124        self.inlined_bump_with(next)
1125    }
1126
1127    /// This always-inlined version should only be used on hot code paths.
1128    #[inline(always)]
1129    fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1130        // Update the current and previous tokens.
1131        self.prev_token = mem::replace(&mut self.token, next_token);
1132        self.token_spacing = next_spacing;
1133
1134        // Diagnostics.
1135        self.expected_token_types.clear();
1136    }
1137
1138    /// Advance the parser by one token.
1139    pub fn bump(&mut self) {
1140        // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
1141        // than `.0`/`.1` access.
1142        let mut next = self.token_cursor.inlined_next();
1143        self.num_bump_calls += 1;
1144        // We got a token from the underlying cursor and no longer need to
1145        // worry about an unglued token. See `break_and_eat` for more details.
1146        self.break_last_token = 0;
1147        if next.0.span.is_dummy() {
1148            // Tweak the location for better diagnostics, but keep syntactic context intact.
1149            let fallback_span = self.token.span;
1150            next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1151        }
1152        debug_assert!(!matches!(
1153            next.0.kind,
1154            token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1155        ));
1156        self.inlined_bump_with(next)
1157    }
1158
1159    /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
1160    /// When `dist == 0` then the current token is looked at. `Eof` will be
1161    /// returned if the look-ahead is any distance past the end of the tokens.
1162    pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1163        if dist == 0 {
1164            return looker(&self.token);
1165        }
1166
1167        // Typically around 98% of the `dist > 0` cases have `dist == 1`, so we
1168        // have a fast special case for that.
1169        if dist == 1 {
1170            // The index is zero because the tree cursor's index always points
1171            // to the next token to be gotten.
1172            match self.token_cursor.curr.curr() {
1173                Some(tree) => {
1174                    // Indexing stayed within the current token tree.
1175                    match tree {
1176                        TokenTree::Token(token, _) => return looker(token),
1177                        &TokenTree::Delimited(dspan, _, delim, _) => {
1178                            if !delim.skip() {
1179                                return looker(&Token::new(delim.as_open_token_kind(), dspan.open));
1180                            }
1181                        }
1182                    }
1183                }
1184                None => {
1185                    // The tree cursor lookahead went (one) past the end of the
1186                    // current token tree. Try to return a close delimiter.
1187                    if let Some(last) = self.token_cursor.stack.last()
1188                        && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1189                        && !delim.skip()
1190                    {
1191                        // We are not in the outermost token stream, so we have
1192                        // delimiters. Also, those delimiters are not skipped.
1193                        return looker(&Token::new(delim.as_close_token_kind(), span.close));
1194                    }
1195                }
1196            }
1197        }
1198
1199        // Just clone the token cursor and use `next`, skipping delimiters as
1200        // necessary. Slow but simple.
1201        let mut cursor = self.token_cursor.clone();
1202        let mut i = 0;
1203        let mut token = Token::dummy();
1204        while i < dist {
1205            token = cursor.next().0;
1206            if matches!(
1207                token.kind,
1208                token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1209            ) {
1210                continue;
1211            }
1212            i += 1;
1213        }
1214        looker(&token)
1215    }
1216
1217    /// Like `lookahead`, but skips over token trees rather than tokens. Useful
1218    /// when looking past possible metavariable pasting sites.
1219    pub fn tree_look_ahead<R>(
1220        &self,
1221        dist: usize,
1222        looker: impl FnOnce(&TokenTree) -> R,
1223    ) -> Option<R> {
1224        assert_ne!(dist, 0);
1225        self.token_cursor.curr.look_ahead(dist - 1).map(looker)
1226    }
1227
1228    /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
1229    pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1230        self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1231    }
1232
1233    /// Parses asyncness: `async` or nothing.
1234    fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1235        let span = self.token_uninterpolated_span();
1236        if self.eat_keyword_case(exp!(Async), case) {
1237            // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
1238            // error if edition <= 2024, like we do with async and edition <= 2018?
1239            if self.token_uninterpolated_span().at_least_rust_2024()
1240                && self.eat_keyword_case(exp!(Gen), case)
1241            {
1242                let gen_span = self.prev_token_uninterpolated_span();
1243                Some(CoroutineKind::AsyncGen {
1244                    span: span.to(gen_span),
1245                    closure_id: DUMMY_NODE_ID,
1246                    return_impl_trait_id: DUMMY_NODE_ID,
1247                })
1248            } else {
1249                Some(CoroutineKind::Async {
1250                    span,
1251                    closure_id: DUMMY_NODE_ID,
1252                    return_impl_trait_id: DUMMY_NODE_ID,
1253                })
1254            }
1255        } else if self.token_uninterpolated_span().at_least_rust_2024()
1256            && self.eat_keyword_case(exp!(Gen), case)
1257        {
1258            Some(CoroutineKind::Gen {
1259                span,
1260                closure_id: DUMMY_NODE_ID,
1261                return_impl_trait_id: DUMMY_NODE_ID,
1262            })
1263        } else {
1264            None
1265        }
1266    }
1267
1268    /// Parses fn unsafety: `unsafe`, `safe` or nothing.
1269    fn parse_safety(&mut self, case: Case) -> Safety {
1270        if self.eat_keyword_case(exp!(Unsafe), case) {
1271            Safety::Unsafe(self.prev_token_uninterpolated_span())
1272        } else if self.eat_keyword_case(exp!(Safe), case) {
1273            Safety::Safe(self.prev_token_uninterpolated_span())
1274        } else {
1275            Safety::Default
1276        }
1277    }
1278
1279    /// Parses constness: `const` or nothing.
1280    fn parse_constness(&mut self, case: Case) -> Const {
1281        self.parse_constness_(case, false)
1282    }
1283
1284    /// Parses constness for closures (case sensitive, feature-gated)
1285    fn parse_closure_constness(&mut self) -> Const {
1286        let constness = self.parse_constness_(Case::Sensitive, true);
1287        if let Const::Yes(span) = constness {
1288            self.psess.gated_spans.gate(sym::const_closures, span);
1289        }
1290        constness
1291    }
1292
1293    fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1294        // Avoid const blocks and const closures to be parsed as const items
1295        if (self.check_const_closure() == is_closure)
1296            && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
1297            && self.eat_keyword_case(exp!(Const), case)
1298        {
1299            Const::Yes(self.prev_token_uninterpolated_span())
1300        } else {
1301            Const::No
1302        }
1303    }
1304
1305    fn parse_mgca_const_block(&mut self, gate_syntax: bool) -> PResult<'a, AnonConst> {
1306        self.expect_keyword(exp!(Const))?;
1307        let kw_span = self.token.span;
1308        let value = self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)?;
1309        if gate_syntax {
1310            self.psess.gated_spans.gate(sym::min_generic_const_args, kw_span.to(value.span));
1311        }
1312        Ok(AnonConst {
1313            id: ast::DUMMY_NODE_ID,
1314            value,
1315            mgca_disambiguation: MgcaDisambiguation::AnonConst,
1316        })
1317    }
1318
1319    /// Parses inline const expressions.
1320    fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, Box<Expr>> {
1321        self.expect_keyword(exp!(Const))?;
1322        let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
1323        let anon_const = AnonConst {
1324            id: DUMMY_NODE_ID,
1325            value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1326            mgca_disambiguation: MgcaDisambiguation::AnonConst,
1327        };
1328        let blk_span = anon_const.value.span;
1329        let kind = if pat {
1330            let guar = self
1331                .dcx()
1332                .struct_span_err(blk_span, "const blocks cannot be used as patterns")
1333                .with_help(
1334                    "use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead",
1335                )
1336                .emit();
1337            ExprKind::Err(guar)
1338        } else {
1339            ExprKind::ConstBlock(anon_const)
1340        };
1341        Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs))
1342    }
1343
1344    /// Parses mutability (`mut` or nothing).
1345    fn parse_mutability(&mut self) -> Mutability {
1346        if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1347    }
1348
1349    /// Parses reference binding mode (`ref`, `ref mut`, `ref pin const`, `ref pin mut`, or nothing).
1350    fn parse_byref(&mut self) -> ByRef {
1351        if self.eat_keyword(exp!(Ref)) {
1352            let (pinnedness, mutability) = self.parse_pin_and_mut();
1353            ByRef::Yes(pinnedness, mutability)
1354        } else {
1355            ByRef::No
1356        }
1357    }
1358
1359    /// Possibly parses mutability (`const` or `mut`).
1360    fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1361        if self.eat_keyword(exp!(Mut)) {
1362            Some(Mutability::Mut)
1363        } else if self.eat_keyword(exp!(Const)) {
1364            Some(Mutability::Not)
1365        } else {
1366            None
1367        }
1368    }
1369
1370    fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1371        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1372        {
1373            if let Some(suffix) = suffix {
1374                self.dcx().emit_err(errors::InvalidLiteralSuffixOnTupleIndex {
1375                    span: self.token.span,
1376                    suffix,
1377                });
1378            }
1379            self.bump();
1380            Ok(Ident::new(symbol, self.prev_token.span))
1381        } else {
1382            self.parse_ident_common(true)
1383        }
1384    }
1385
1386    fn parse_delim_args(&mut self) -> PResult<'a, Box<DelimArgs>> {
1387        if let Some(args) = self.parse_delim_args_inner() {
1388            Ok(Box::new(args))
1389        } else {
1390            self.unexpected_any()
1391        }
1392    }
1393
1394    fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1395        Ok(if let Some(args) = self.parse_delim_args_inner() {
1396            AttrArgs::Delimited(args)
1397        } else if self.eat(exp!(Eq)) {
1398            let eq_span = self.prev_token.span;
1399            let expr = self.parse_expr_force_collect()?;
1400            AttrArgs::Eq { eq_span, expr }
1401        } else {
1402            AttrArgs::Empty
1403        })
1404    }
1405
1406    fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1407        let delimited = self.check(exp!(OpenParen))
1408            || self.check(exp!(OpenBracket))
1409            || self.check(exp!(OpenBrace));
1410
1411        delimited.then(|| {
1412            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1413                unreachable!()
1414            };
1415            DelimArgs { dspan, delim, tokens }
1416        })
1417    }
1418
1419    /// Parses a single token tree from the input.
1420    pub fn parse_token_tree(&mut self) -> TokenTree {
1421        if self.token.kind.open_delim().is_some() {
1422            // Clone the `TokenTree::Delimited` that we are currently
1423            // within. That's what we are going to return.
1424            let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1425            debug_assert_matches!(tree, TokenTree::Delimited(..));
1426
1427            // Advance the token cursor through the entire delimited
1428            // sequence. After getting the `OpenDelim` we are *within* the
1429            // delimited sequence, i.e. at depth `d`. After getting the
1430            // matching `CloseDelim` we are *after* the delimited sequence,
1431            // i.e. at depth `d - 1`.
1432            let target_depth = self.token_cursor.stack.len() - 1;
1433
1434            if let Capturing::No = self.capture_state.capturing {
1435                // We are not capturing tokens, so skip to the end of the
1436                // delimited sequence. This is a perf win when dealing with
1437                // declarative macros that pass large `tt` fragments through
1438                // multiple rules, as seen in the uom-0.37.0 crate.
1439                self.token_cursor.curr.bump_to_end();
1440                self.bump();
1441                debug_assert_eq!(self.token_cursor.stack.len(), target_depth);
1442            } else {
1443                loop {
1444                    // Advance one token at a time, so `TokenCursor::next()`
1445                    // can capture these tokens if necessary.
1446                    self.bump();
1447                    if self.token_cursor.stack.len() == target_depth {
1448                        break;
1449                    }
1450                }
1451            }
1452            debug_assert!(self.token.kind.close_delim().is_some());
1453
1454            // Consume close delimiter
1455            self.bump();
1456            tree
1457        } else {
1458            assert!(!self.token.kind.is_close_delim_or_eof());
1459            let prev_spacing = self.token_spacing;
1460            self.bump();
1461            TokenTree::Token(self.prev_token, prev_spacing)
1462        }
1463    }
1464
1465    pub fn parse_tokens(&mut self) -> TokenStream {
1466        let mut result = Vec::new();
1467        loop {
1468            if self.token.kind.is_close_delim_or_eof() {
1469                break;
1470            } else {
1471                result.push(self.parse_token_tree());
1472            }
1473        }
1474        TokenStream::new(result)
1475    }
1476
1477    /// Evaluates the closure with restrictions in place.
1478    ///
1479    /// Afters the closure is evaluated, restrictions are reset.
1480    fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1481        let old = self.restrictions;
1482        self.restrictions = res;
1483        let res = f(self);
1484        self.restrictions = old;
1485        res
1486    }
1487
1488    /// Parses `pub` and `pub(in path)` plus shortcuts `pub(crate)` for `pub(in crate)`, `pub(self)`
1489    /// for `pub(in self)` and `pub(super)` for `pub(in super)`.
1490    /// If the following element can't be a tuple (i.e., it's a function definition), then
1491    /// it's not a tuple struct field), and the contents within the parentheses aren't valid,
1492    /// so emit a proper diagnostic.
1493    // Public for rustfmt usage.
1494    pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1495        if let Some(vis) = self
1496            .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
1497        {
1498            return Ok(vis);
1499        }
1500
1501        if !self.eat_keyword(exp!(Pub)) {
1502            // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1503            // keyword to grab a span from for inherited visibility; an empty span at the
1504            // beginning of the current token would seem to be the "Schelling span".
1505            return Ok(Visibility {
1506                span: self.token.span.shrink_to_lo(),
1507                kind: VisibilityKind::Inherited,
1508                tokens: None,
1509            });
1510        }
1511        let lo = self.prev_token.span;
1512
1513        if self.check(exp!(OpenParen)) {
1514            // We don't `self.bump()` the `(` yet because this might be a struct definition where
1515            // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1516            // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1517            // by the following tokens.
1518            if self.is_keyword_ahead(1, &[kw::In]) {
1519                // Parse `pub(in path)`.
1520                self.bump(); // `(`
1521                self.bump(); // `in`
1522                let path = self.parse_path(PathStyle::Mod)?; // `path`
1523                self.expect(exp!(CloseParen))?; // `)`
1524                let vis = VisibilityKind::Restricted {
1525                    path: Box::new(path),
1526                    id: ast::DUMMY_NODE_ID,
1527                    shorthand: false,
1528                };
1529                return Ok(Visibility {
1530                    span: lo.to(self.prev_token.span),
1531                    kind: vis,
1532                    tokens: None,
1533                });
1534            } else if self.look_ahead(2, |t| t == &token::CloseParen)
1535                && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1536            {
1537                // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
1538                self.bump(); // `(`
1539                let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
1540                self.expect(exp!(CloseParen))?; // `)`
1541                let vis = VisibilityKind::Restricted {
1542                    path: Box::new(path),
1543                    id: ast::DUMMY_NODE_ID,
1544                    shorthand: true,
1545                };
1546                return Ok(Visibility {
1547                    span: lo.to(self.prev_token.span),
1548                    kind: vis,
1549                    tokens: None,
1550                });
1551            } else if let FollowedByType::No = fbt {
1552                // Provide this diagnostic if a type cannot follow;
1553                // in particular, if this is not a tuple struct.
1554                self.recover_incorrect_vis_restriction()?;
1555                // Emit diagnostic, but continue with public visibility.
1556            }
1557        }
1558
1559        Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1560    }
1561
1562    /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1563    fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1564        self.bump(); // `(`
1565        let path = self.parse_path(PathStyle::Mod)?;
1566        self.expect(exp!(CloseParen))?; // `)`
1567
1568        let path_str = pprust::path_to_string(&path);
1569        self.dcx()
1570            .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1571
1572        Ok(())
1573    }
1574
1575    /// Parses `extern string_literal?`.
1576    fn parse_extern(&mut self, case: Case) -> Extern {
1577        if self.eat_keyword_case(exp!(Extern), case) {
1578            let mut extern_span = self.prev_token.span;
1579            let abi = self.parse_abi();
1580            if let Some(abi) = abi {
1581                extern_span = extern_span.to(abi.span);
1582            }
1583            Extern::from_abi(abi, extern_span)
1584        } else {
1585            Extern::None
1586        }
1587    }
1588
1589    /// Parses a string literal as an ABI spec.
1590    fn parse_abi(&mut self) -> Option<StrLit> {
1591        match self.parse_str_lit() {
1592            Ok(str_lit) => Some(str_lit),
1593            Err(Some(lit)) => match lit.kind {
1594                ast::LitKind::Err(_) => None,
1595                _ => {
1596                    self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1597                    None
1598                }
1599            },
1600            Err(None) => None,
1601        }
1602    }
1603
1604    fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1605        &mut self,
1606        f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1607    ) -> PResult<'a, R> {
1608        // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1609        // `ForceCollect::Yes`
1610        self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1611            Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1612        })
1613    }
1614
1615    /// Checks for `::` or, potentially, `:::` and then look ahead after it.
1616    fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1617        if self.check(exp!(PathSep)) {
1618            if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1619                debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1620                self.look_ahead(2, looker)
1621            } else {
1622                self.look_ahead(1, looker)
1623            }
1624        } else {
1625            false
1626        }
1627    }
1628
1629    /// `::{` or `::*`
1630    fn is_import_coupler(&mut self) -> bool {
1631        self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star))
1632    }
1633
1634    // Debug view of the parser's token stream, up to `{lookahead}` tokens.
1635    // Only used when debugging.
1636    #[allow(unused)]
1637    pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
1638        fmt::from_fn(move |f| {
1639            let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of
1640
1641            // we don't need N spans, but we want at least one, so print all of prev_token
1642            dbg_fmt.field("prev_token", &self.prev_token);
1643            let mut tokens = vec![];
1644            for i in 0..lookahead {
1645                let tok = self.look_ahead(i, |tok| tok.kind);
1646                let is_eof = tok == TokenKind::Eof;
1647                tokens.push(tok);
1648                if is_eof {
1649                    // Don't look ahead past EOF.
1650                    break;
1651                }
1652            }
1653            dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1654            dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1655
1656            // some fields are interesting for certain values, as they relate to macro parsing
1657            if let Some(subparser) = self.subparser_name {
1658                dbg_fmt.field("subparser_name", &subparser);
1659            }
1660            if let Recovery::Forbidden = self.recovery {
1661                dbg_fmt.field("recovery", &self.recovery);
1662            }
1663
1664            // imply there's "more to know" than this view
1665            dbg_fmt.finish_non_exhaustive()
1666        })
1667    }
1668
1669    pub fn clear_expected_token_types(&mut self) {
1670        self.expected_token_types.clear();
1671    }
1672
1673    pub fn approx_token_stream_pos(&self) -> u32 {
1674        self.num_bump_calls
1675    }
1676
1677    /// For interpolated `self.token`, returns a span of the fragment to which
1678    /// the interpolated token refers. For all other tokens this is just a
1679    /// regular span. It is particularly important to use this for identifiers
1680    /// and lifetimes for which spans affect name resolution and edition
1681    /// checks. Note that keywords are also identifiers, so they should use
1682    /// this if they keep spans or perform edition checks.
1683    pub fn token_uninterpolated_span(&self) -> Span {
1684        match &self.token.kind {
1685            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1686            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span),
1687            _ => self.token.span,
1688        }
1689    }
1690
1691    /// Like `token_uninterpolated_span`, but works on `self.prev_token`.
1692    pub fn prev_token_uninterpolated_span(&self) -> Span {
1693        match &self.prev_token.kind {
1694            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1695            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span),
1696            _ => self.prev_token.span,
1697        }
1698    }
1699}
1700
1701// Metavar captures of various kinds.
1702#[derive(Clone, Debug)]
1703pub enum ParseNtResult {
1704    Tt(TokenTree),
1705    Ident(Ident, IdentIsRaw),
1706    Lifetime(Ident, IdentIsRaw),
1707    Item(Box<ast::Item>),
1708    Block(Box<ast::Block>),
1709    Stmt(Box<ast::Stmt>),
1710    Pat(Box<ast::Pat>, NtPatKind),
1711    Expr(Box<ast::Expr>, NtExprKind),
1712    Literal(Box<ast::Expr>),
1713    Ty(Box<ast::Ty>),
1714    Meta(Box<ast::AttrItem>),
1715    Path(Box<ast::Path>),
1716    Vis(Box<ast::Visibility>),
1717}