rustc_parse/parser/
mod.rs

1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14use std::assert_matches::debug_assert_matches;
15use std::ops::Range;
16use std::sync::Arc;
17use std::{fmt, mem, slice};
18
19use attr_wrapper::{AttrWrapper, UsePreAttrPos};
20pub use diagnostics::AttemptLocalParseRecovery;
21pub(crate) use expr::ForbiddenLetReason;
22pub(crate) use item::FnParseMode;
23pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
24use path::PathStyle;
25use rustc_ast::ptr::P;
26use rustc_ast::token::{
27    self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind,
28};
29use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
30use rustc_ast::util::case::Case;
31use rustc_ast::{
32    self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
33    DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
34    Visibility, VisibilityKind,
35};
36use rustc_ast_pretty::pprust;
37use rustc_data_structures::fx::FxHashMap;
38use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
39use rustc_index::interval::IntervalSet;
40use rustc_session::parse::ParseSess;
41use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
42use thin_vec::ThinVec;
43use token_type::TokenTypeSet;
44pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
45use tracing::debug;
46
47use crate::errors::{
48    self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
49};
50use crate::exp;
51use crate::lexer::UnmatchedDelim;
52
53#[cfg(test)]
54mod tests;
55
56// Ideally, these tests would be in `rustc_ast`. But they depend on having a
57// parser, so they are here.
58#[cfg(test)]
59mod tokenstream {
60    mod tests;
61}
62#[cfg(test)]
63mod mut_visit {
64    mod tests;
65}
66
67bitflags::bitflags! {
68    #[derive(Clone, Copy, Debug)]
69    struct Restrictions: u8 {
70        const STMT_EXPR         = 1 << 0;
71        const NO_STRUCT_LITERAL = 1 << 1;
72        const CONST_EXPR        = 1 << 2;
73        const ALLOW_LET         = 1 << 3;
74        const IN_IF_GUARD       = 1 << 4;
75        const IS_PAT            = 1 << 5;
76    }
77}
78
79#[derive(Clone, Copy, PartialEq, Debug)]
80enum SemiColonMode {
81    Break,
82    Ignore,
83    Comma,
84}
85
86#[derive(Clone, Copy, PartialEq, Debug)]
87enum BlockMode {
88    Break,
89    Ignore,
90}
91
92/// Whether or not we should force collection of tokens for an AST node,
93/// regardless of whether or not it has attributes
94#[derive(Clone, Copy, Debug, PartialEq)]
95pub enum ForceCollect {
96    Yes,
97    No,
98}
99
100#[macro_export]
101macro_rules! maybe_whole {
102    ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
103        if let token::Interpolated(nt) = &$p.token.kind
104            && let token::$constructor(x) = &**nt
105        {
106            #[allow(unused_mut)]
107            let mut $x = x.clone();
108            $p.bump();
109            return Ok($e);
110        }
111    };
112}
113
114/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
115#[macro_export]
116macro_rules! maybe_recover_from_interpolated_ty_qpath {
117    ($self: expr, $allow_qpath_recovery: expr) => {
118        if $allow_qpath_recovery
119            && $self.may_recover()
120            && $self.look_ahead(1, |t| t == &token::PathSep)
121            && let token::Interpolated(nt) = &$self.token.kind
122            && let token::NtTy(ty) = &**nt
123        {
124            let ty = ty.clone();
125            $self.bump();
126            return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
127        }
128    };
129}
130
131#[derive(Clone, Copy, Debug)]
132pub enum Recovery {
133    Allowed,
134    Forbidden,
135}
136
137#[derive(Clone)]
138pub struct Parser<'a> {
139    pub psess: &'a ParseSess,
140    /// The current token.
141    pub token: Token,
142    /// The spacing for the current token.
143    token_spacing: Spacing,
144    /// The previous token.
145    pub prev_token: Token,
146    pub capture_cfg: bool,
147    restrictions: Restrictions,
148    expected_token_types: TokenTypeSet,
149    token_cursor: TokenCursor,
150    // The number of calls to `bump`, i.e. the position in the token stream.
151    num_bump_calls: u32,
152    // During parsing we may sometimes need to "unglue" a glued token into two
153    // or three component tokens (e.g. `>>` into `>` and `>`, or `>>=` into `>`
154    // and `>` and `=`), so the parser can consume them one at a time. This
155    // process bypasses the normal capturing mechanism (e.g. `num_bump_calls`
156    // will not be incremented), since the "unglued" tokens due not exist in
157    // the original `TokenStream`.
158    //
159    // If we end up consuming all the component tokens, this is not an issue,
160    // because we'll end up capturing the single "glued" token.
161    //
162    // However, sometimes we may want to capture not all of the original
163    // token. For example, capturing the `Vec<u8>` in `Option<Vec<u8>>`
164    // requires us to unglue the trailing `>>` token. The `break_last_token`
165    // field is used to track these tokens. They get appended to the captured
166    // stream when we evaluate a `LazyAttrTokenStream`.
167    //
168    // This value is always 0, 1, or 2. It can only reach 2 when splitting
169    // `>>=` or `<<=`.
170    break_last_token: u32,
171    /// This field is used to keep track of how many left angle brackets we have seen. This is
172    /// required in order to detect extra leading left angle brackets (`<` characters) and error
173    /// appropriately.
174    ///
175    /// See the comments in the `parse_path_segment` function for more details.
176    unmatched_angle_bracket_count: u16,
177    angle_bracket_nesting: u16,
178
179    last_unexpected_token_span: Option<Span>,
180    /// If present, this `Parser` is not parsing Rust code but rather a macro call.
181    subparser_name: Option<&'static str>,
182    capture_state: CaptureState,
183    /// This allows us to recover when the user forget to add braces around
184    /// multiple statements in the closure body.
185    current_closure: Option<ClosureSpans>,
186    /// Whether the parser is allowed to do recovery.
187    /// This is disabled when parsing macro arguments, see #103534
188    recovery: Recovery,
189}
190
191// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with
192// nonterminals. Make sure it doesn't unintentionally get bigger. We only check a few arches
193// though, because `TokenTypeSet(u128)` alignment varies on others, changing the total size.
194#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
195rustc_data_structures::static_assert_size!(Parser<'_>, 288);
196
197/// Stores span information about a closure.
198#[derive(Clone, Debug)]
199struct ClosureSpans {
200    whole_closure: Span,
201    closing_pipe: Span,
202    body: Span,
203}
204
205/// A token range within a `Parser`'s full token stream.
206#[derive(Clone, Debug)]
207struct ParserRange(Range<u32>);
208
209/// A token range within an individual AST node's (lazy) token stream, i.e.
210/// relative to that node's first token. Distinct from `ParserRange` so the two
211/// kinds of range can't be mixed up.
212#[derive(Clone, Debug)]
213struct NodeRange(Range<u32>);
214
215/// Indicates a range of tokens that should be replaced by an `AttrsTarget`
216/// (replacement) or be replaced by nothing (deletion). This is used in two
217/// places during token collection.
218///
219/// 1. Replacement. During the parsing of an AST node that may have a
220///    `#[derive]` attribute, when we parse a nested AST node that has `#[cfg]`
221///    or `#[cfg_attr]`, we replace the entire inner AST node with
222///    `FlatToken::AttrsTarget`. This lets us perform eager cfg-expansion on an
223///    `AttrTokenStream`.
224///
225/// 2. Deletion. We delete inner attributes from all collected token streams,
226///    and instead track them through the `attrs` field on the AST node. This
227///    lets us manipulate them similarly to outer attributes. When we create a
228///    `TokenStream`, the inner attributes are inserted into the proper place
229///    in the token stream.
230///
231/// Each replacement starts off in `ParserReplacement` form but is converted to
232/// `NodeReplacement` form when it is attached to a single AST node, via
233/// `LazyAttrTokenStreamImpl`.
234type ParserReplacement = (ParserRange, Option<AttrsTarget>);
235
236/// See the comment on `ParserReplacement`.
237type NodeReplacement = (NodeRange, Option<AttrsTarget>);
238
239impl NodeRange {
240    // Converts a range within a parser's tokens to a range within a
241    // node's tokens beginning at `start_pos`.
242    //
243    // For example, imagine a parser with 50 tokens in its token stream, a
244    // function that spans `ParserRange(20..40)` and an inner attribute within
245    // that function that spans `ParserRange(30..35)`. We would find the inner
246    // attribute's range within the function's tokens by subtracting 20, which
247    // is the position of the function's start token. This gives
248    // `NodeRange(10..15)`.
249    fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
250        assert!(!parser_range.is_empty());
251        assert!(parser_range.start >= start_pos);
252        NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
253    }
254}
255
256/// Controls how we capture tokens. Capturing can be expensive,
257/// so we try to avoid performing capturing in cases where
258/// we will never need an `AttrTokenStream`.
259#[derive(Copy, Clone, Debug)]
260enum Capturing {
261    /// We aren't performing any capturing - this is the default mode.
262    No,
263    /// We are capturing tokens
264    Yes,
265}
266
267// This state is used by `Parser::collect_tokens`.
268#[derive(Clone, Debug)]
269struct CaptureState {
270    capturing: Capturing,
271    parser_replacements: Vec<ParserReplacement>,
272    inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
273    // `IntervalSet` is good for perf because attrs are mostly added to this
274    // set in contiguous ranges.
275    seen_attrs: IntervalSet<AttrId>,
276}
277
278#[derive(Clone, Debug)]
279struct TokenTreeCursor {
280    stream: TokenStream,
281    /// Points to the current token tree in the stream. In `TokenCursor::curr`,
282    /// this can be any token tree. In `TokenCursor::stack`, this is always a
283    /// `TokenTree::Delimited`.
284    index: usize,
285}
286
287impl TokenTreeCursor {
288    #[inline]
289    fn new(stream: TokenStream) -> Self {
290        TokenTreeCursor { stream, index: 0 }
291    }
292
293    #[inline]
294    fn curr(&self) -> Option<&TokenTree> {
295        self.stream.get(self.index)
296    }
297
298    #[inline]
299    fn bump(&mut self) {
300        self.index += 1;
301    }
302}
303
304/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that
305/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
306/// use this type to emit them as a linear sequence. But a linear sequence is
307/// what the parser expects, for the most part.
308#[derive(Clone, Debug)]
309struct TokenCursor {
310    // Cursor for the current (innermost) token stream. The index within the
311    // cursor can point to any token tree in the stream (or one past the end).
312    // The delimiters for this token stream are found in `self.stack.last()`;
313    // if that is `None` we are in the outermost token stream which never has
314    // delimiters.
315    curr: TokenTreeCursor,
316
317    // Token streams surrounding the current one. The index within each cursor
318    // always points to a `TokenTree::Delimited`.
319    stack: Vec<TokenTreeCursor>,
320}
321
322impl TokenCursor {
323    fn next(&mut self) -> (Token, Spacing) {
324        self.inlined_next()
325    }
326
327    /// This always-inlined version should only be used on hot code paths.
328    #[inline(always)]
329    fn inlined_next(&mut self) -> (Token, Spacing) {
330        loop {
331            // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
332            // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
333            // below can be removed.
334            if let Some(tree) = self.curr.curr() {
335                match tree {
336                    &TokenTree::Token(ref token, spacing) => {
337                        debug_assert!(!matches!(
338                            token.kind,
339                            token::OpenDelim(_) | token::CloseDelim(_)
340                        ));
341                        let res = (token.clone(), spacing);
342                        self.curr.bump();
343                        return res;
344                    }
345                    &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
346                        let trees = TokenTreeCursor::new(tts.clone());
347                        self.stack.push(mem::replace(&mut self.curr, trees));
348                        if !delim.skip() {
349                            return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
350                        }
351                        // No open delimiter to return; continue on to the next iteration.
352                    }
353                };
354            } else if let Some(parent) = self.stack.pop() {
355                // We have exhausted this token stream. Move back to its parent token stream.
356                let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
357                    panic!("parent should be Delimited")
358                };
359                self.curr = parent;
360                self.curr.bump(); // move past the `Delimited`
361                if !delim.skip() {
362                    return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
363                }
364                // No close delimiter to return; continue on to the next iteration.
365            } else {
366                // We have exhausted the outermost token stream. The use of
367                // `Spacing::Alone` is arbitrary and immaterial, because the
368                // `Eof` token's spacing is never used.
369                return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
370            }
371        }
372    }
373}
374
375/// A sequence separator.
376#[derive(Debug)]
377struct SeqSep<'a> {
378    /// The separator token.
379    sep: Option<ExpTokenPair<'a>>,
380    /// `true` if a trailing separator is allowed.
381    trailing_sep_allowed: bool,
382}
383
384impl<'a> SeqSep<'a> {
385    fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
386        SeqSep { sep: Some(sep), trailing_sep_allowed: true }
387    }
388
389    fn none() -> SeqSep<'a> {
390        SeqSep { sep: None, trailing_sep_allowed: false }
391    }
392}
393
394#[derive(Debug)]
395pub enum FollowedByType {
396    Yes,
397    No,
398}
399
400#[derive(Copy, Clone, Debug)]
401enum Trailing {
402    No,
403    Yes,
404}
405
406impl From<bool> for Trailing {
407    fn from(b: bool) -> Trailing {
408        if b { Trailing::Yes } else { Trailing::No }
409    }
410}
411
412#[derive(Clone, Copy, Debug, PartialEq, Eq)]
413pub(super) enum TokenDescription {
414    ReservedIdentifier,
415    Keyword,
416    ReservedKeyword,
417    DocComment,
418
419    // Expanded metavariables are wrapped in invisible delimiters which aren't
420    // pretty-printed. In error messages we must handle these specially
421    // otherwise we get confusing things in messages like "expected `(`, found
422    // ``". It's better to say e.g. "expected `(`, found type metavariable".
423    MetaVar(MetaVarKind),
424}
425
426impl TokenDescription {
427    pub(super) fn from_token(token: &Token) -> Option<Self> {
428        match token.kind {
429            _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
430            _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
431            _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
432            token::DocComment(..) => Some(TokenDescription::DocComment),
433            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
434                Some(TokenDescription::MetaVar(kind))
435            }
436            _ => None,
437        }
438    }
439}
440
441pub fn token_descr(token: &Token) -> String {
442    let s = pprust::token_to_string(token).to_string();
443
444    match (TokenDescription::from_token(token), &token.kind) {
445        (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
446        (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
447        (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
448        (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
449        // Deliberately doesn't print `s`, which is empty.
450        (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
451        (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
452        (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
453        (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()),
454        (None, _) => format!("`{s}`"),
455    }
456}
457
458impl<'a> Parser<'a> {
459    pub fn new(
460        psess: &'a ParseSess,
461        stream: TokenStream,
462        subparser_name: Option<&'static str>,
463    ) -> Self {
464        let mut parser = Parser {
465            psess,
466            token: Token::dummy(),
467            token_spacing: Spacing::Alone,
468            prev_token: Token::dummy(),
469            capture_cfg: false,
470            restrictions: Restrictions::empty(),
471            expected_token_types: TokenTypeSet::new(),
472            token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
473            num_bump_calls: 0,
474            break_last_token: 0,
475            unmatched_angle_bracket_count: 0,
476            angle_bracket_nesting: 0,
477            last_unexpected_token_span: None,
478            subparser_name,
479            capture_state: CaptureState {
480                capturing: Capturing::No,
481                parser_replacements: Vec::new(),
482                inner_attr_parser_ranges: Default::default(),
483                seen_attrs: IntervalSet::new(u32::MAX as usize),
484            },
485            current_closure: None,
486            recovery: Recovery::Allowed,
487        };
488
489        // Make parser point to the first token.
490        parser.bump();
491
492        // Change this from 1 back to 0 after the bump. This eases debugging of
493        // `Parser::collect_tokens` because 0-indexed token positions are nicer
494        // than 1-indexed token positions.
495        parser.num_bump_calls = 0;
496
497        parser
498    }
499
500    #[inline]
501    pub fn recovery(mut self, recovery: Recovery) -> Self {
502        self.recovery = recovery;
503        self
504    }
505
506    /// Whether the parser is allowed to recover from broken code.
507    ///
508    /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
509    /// is not allowed. All recovery done by the parser must be gated behind this check.
510    ///
511    /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
512    /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
513    #[inline]
514    fn may_recover(&self) -> bool {
515        matches!(self.recovery, Recovery::Allowed)
516    }
517
518    /// Version of [`unexpected`](Parser::unexpected) that "returns" any type in the `Ok`
519    /// (both those functions never return "Ok", and so can lie like that in the type).
520    pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
521        match self.expect_one_of(&[], &[]) {
522            Err(e) => Err(e),
523            // We can get `Ok(true)` from `recover_closing_delimiter`
524            // which is called in `expected_one_of_not_found`.
525            Ok(_) => FatalError.raise(),
526        }
527    }
528
529    pub fn unexpected(&mut self) -> PResult<'a, ()> {
530        self.unexpected_any()
531    }
532
533    /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
534    pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
535        if self.expected_token_types.is_empty() {
536            if self.token == *exp.tok {
537                self.bump();
538                Ok(Recovered::No)
539            } else {
540                self.unexpected_try_recover(exp.tok)
541            }
542        } else {
543            self.expect_one_of(slice::from_ref(&exp), &[])
544        }
545    }
546
547    /// Expect next token to be edible or inedible token. If edible,
548    /// then consume it; if inedible, then return without consuming
549    /// anything. Signal a fatal error if next token is unexpected.
550    fn expect_one_of(
551        &mut self,
552        edible: &[ExpTokenPair<'_>],
553        inedible: &[ExpTokenPair<'_>],
554    ) -> PResult<'a, Recovered> {
555        if edible.iter().any(|exp| exp.tok == &self.token.kind) {
556            self.bump();
557            Ok(Recovered::No)
558        } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
559            // leave it in the input
560            Ok(Recovered::No)
561        } else if self.token != token::Eof
562            && self.last_unexpected_token_span == Some(self.token.span)
563        {
564            FatalError.raise();
565        } else {
566            self.expected_one_of_not_found(edible, inedible)
567                .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
568        }
569    }
570
571    // Public for rustfmt usage.
572    pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
573        self.parse_ident_common(true)
574    }
575
576    fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
577        let (ident, is_raw) = self.ident_or_err(recover)?;
578
579        if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
580            let err = self.expected_ident_found_err();
581            if recover {
582                err.emit();
583            } else {
584                return Err(err);
585            }
586        }
587        self.bump();
588        Ok(ident)
589    }
590
591    fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
592        match self.token.ident() {
593            Some(ident) => Ok(ident),
594            None => self.expected_ident_found(recover),
595        }
596    }
597
598    /// Checks if the next token is `tok`, and returns `true` if so.
599    ///
600    /// This method will automatically add `tok` to `expected_token_types` if `tok` is not
601    /// encountered.
602    #[inline]
603    fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
604        let is_present = self.token == *exp.tok;
605        if !is_present {
606            self.expected_token_types.insert(exp.token_type);
607        }
608        is_present
609    }
610
611    #[inline]
612    #[must_use]
613    fn check_noexpect(&self, tok: &TokenKind) -> bool {
614        self.token == *tok
615    }
616
617    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
618    ///
619    /// the main purpose of this function is to reduce the cluttering of the suggestions list
620    /// which using the normal eat method could introduce in some cases.
621    #[inline]
622    #[must_use]
623    fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
624        let is_present = self.check_noexpect(tok);
625        if is_present {
626            self.bump()
627        }
628        is_present
629    }
630
631    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
632    #[inline]
633    #[must_use]
634    pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
635        let is_present = self.check(exp);
636        if is_present {
637            self.bump()
638        }
639        is_present
640    }
641
642    /// If the next token is the given keyword, returns `true` without eating it.
643    /// An expectation is also added for diagnostics purposes.
644    #[inline]
645    #[must_use]
646    fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
647        let is_keyword = self.token.is_keyword(exp.kw);
648        if !is_keyword {
649            self.expected_token_types.insert(exp.token_type);
650        }
651        is_keyword
652    }
653
654    #[inline]
655    #[must_use]
656    fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
657        if self.check_keyword(exp) {
658            true
659        } else if case == Case::Insensitive
660            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
661            // Do an ASCII case-insensitive match, because all keywords are ASCII.
662            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
663        {
664            true
665        } else {
666            false
667        }
668    }
669
670    /// If the next token is the given keyword, eats it and returns `true`.
671    /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
672    // Public for rustc_builtin_macros and rustfmt usage.
673    #[inline]
674    #[must_use]
675    pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
676        let is_keyword = self.check_keyword(exp);
677        if is_keyword {
678            self.bump();
679        }
680        is_keyword
681    }
682
683    /// Eats a keyword, optionally ignoring the case.
684    /// If the case differs (and is ignored) an error is issued.
685    /// This is useful for recovery.
686    #[inline]
687    #[must_use]
688    fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
689        if self.eat_keyword(exp) {
690            true
691        } else if case == Case::Insensitive
692            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
693            // Do an ASCII case-insensitive match, because all keywords are ASCII.
694            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
695        {
696            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
697            self.bump();
698            true
699        } else {
700            false
701        }
702    }
703
704    /// If the next token is the given keyword, eats it and returns `true`.
705    /// Otherwise, returns `false`. No expectation is added.
706    // Public for rustc_builtin_macros usage.
707    #[inline]
708    #[must_use]
709    pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
710        let is_keyword = self.token.is_keyword(kw);
711        if is_keyword {
712            self.bump();
713        }
714        is_keyword
715    }
716
717    /// If the given word is not a keyword, signals an error.
718    /// If the next token is not the given word, signals an error.
719    /// Otherwise, eats it.
720    pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
721        if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
722    }
723
724    /// Is the given keyword `kw` followed by a non-reserved identifier?
725    fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
726        self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
727    }
728
729    #[inline]
730    fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
731        if !ok {
732            self.expected_token_types.insert(token_type);
733        }
734        ok
735    }
736
737    fn check_ident(&mut self) -> bool {
738        self.check_or_expected(self.token.is_ident(), TokenType::Ident)
739    }
740
741    fn check_path(&mut self) -> bool {
742        self.check_or_expected(self.token.is_path_start(), TokenType::Path)
743    }
744
745    fn check_type(&mut self) -> bool {
746        self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
747    }
748
749    fn check_const_arg(&mut self) -> bool {
750        self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
751    }
752
753    fn check_const_closure(&self) -> bool {
754        self.is_keyword_ahead(0, &[kw::Const])
755            && self.look_ahead(1, |t| match &t.kind {
756                // async closures do not work with const closures, so we do not parse that here.
757                token::Ident(kw::Move | kw::Static, _) | token::OrOr | token::BinOp(token::Or) => {
758                    true
759                }
760                _ => false,
761            })
762    }
763
764    fn check_inline_const(&self, dist: usize) -> bool {
765        self.is_keyword_ahead(dist, &[kw::Const])
766            && self.look_ahead(dist + 1, |t| match &t.kind {
767                token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)),
768                token::OpenDelim(Delimiter::Brace) => true,
769                _ => false,
770            })
771    }
772
773    /// Checks to see if the next token is either `+` or `+=`.
774    /// Otherwise returns `false`.
775    #[inline]
776    fn check_plus(&mut self) -> bool {
777        self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
778    }
779
780    /// Eats the expected token if it's present possibly breaking
781    /// compound tokens like multi-character operators in process.
782    /// Returns `true` if the token was eaten.
783    fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
784        if self.token == *exp.tok {
785            self.bump();
786            return true;
787        }
788        match self.token.kind.break_two_token_op(1) {
789            Some((first, second)) if first == *exp.tok => {
790                let first_span = self.psess.source_map().start_point(self.token.span);
791                let second_span = self.token.span.with_lo(first_span.hi());
792                self.token = Token::new(first, first_span);
793                // Keep track of this token - if we end token capturing now,
794                // we'll want to append this token to the captured stream.
795                //
796                // If we consume any additional tokens, then this token
797                // is not needed (we'll capture the entire 'glued' token),
798                // and `bump` will set this field to 0.
799                self.break_last_token += 1;
800                // Use the spacing of the glued token as the spacing of the
801                // unglued second token.
802                self.bump_with((Token::new(second, second_span), self.token_spacing));
803                true
804            }
805            _ => {
806                self.expected_token_types.insert(exp.token_type);
807                false
808            }
809        }
810    }
811
812    /// Eats `+` possibly breaking tokens like `+=` in process.
813    fn eat_plus(&mut self) -> bool {
814        self.break_and_eat(exp!(Plus))
815    }
816
817    /// Eats `&` possibly breaking tokens like `&&` in process.
818    /// Signals an error if `&` is not eaten.
819    fn expect_and(&mut self) -> PResult<'a, ()> {
820        if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
821    }
822
823    /// Eats `|` possibly breaking tokens like `||` in process.
824    /// Signals an error if `|` was not eaten.
825    fn expect_or(&mut self) -> PResult<'a, ()> {
826        if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
827    }
828
829    /// Eats `<` possibly breaking tokens like `<<` in process.
830    fn eat_lt(&mut self) -> bool {
831        let ate = self.break_and_eat(exp!(Lt));
832        if ate {
833            // See doc comment for `unmatched_angle_bracket_count`.
834            self.unmatched_angle_bracket_count += 1;
835            debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
836        }
837        ate
838    }
839
840    /// Eats `<` possibly breaking tokens like `<<` in process.
841    /// Signals an error if `<` was not eaten.
842    fn expect_lt(&mut self) -> PResult<'a, ()> {
843        if self.eat_lt() { Ok(()) } else { self.unexpected() }
844    }
845
846    /// Eats `>` possibly breaking tokens like `>>` in process.
847    /// Signals an error if `>` was not eaten.
848    fn expect_gt(&mut self) -> PResult<'a, ()> {
849        if self.break_and_eat(exp!(Gt)) {
850            // See doc comment for `unmatched_angle_bracket_count`.
851            if self.unmatched_angle_bracket_count > 0 {
852                self.unmatched_angle_bracket_count -= 1;
853                debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
854            }
855            Ok(())
856        } else {
857            self.unexpected()
858        }
859    }
860
861    /// Checks if the next token is contained within `closes`, and returns `true` if so.
862    fn expect_any_with_type(
863        &mut self,
864        closes_expected: &[ExpTokenPair<'_>],
865        closes_not_expected: &[&TokenKind],
866    ) -> bool {
867        closes_expected.iter().any(|&close| self.check(close))
868            || closes_not_expected.iter().any(|k| self.check_noexpect(k))
869    }
870
871    /// Parses a sequence until the specified delimiters. The function
872    /// `f` must consume tokens until reaching the next separator or
873    /// closing bracket.
874    fn parse_seq_to_before_tokens<T>(
875        &mut self,
876        closes_expected: &[ExpTokenPair<'_>],
877        closes_not_expected: &[&TokenKind],
878        sep: SeqSep<'_>,
879        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
880    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
881        let mut first = true;
882        let mut recovered = Recovered::No;
883        let mut trailing = Trailing::No;
884        let mut v = ThinVec::new();
885
886        while !self.expect_any_with_type(closes_expected, closes_not_expected) {
887            if let token::CloseDelim(..) | token::Eof = self.token.kind {
888                break;
889            }
890            if let Some(exp) = sep.sep {
891                if first {
892                    // no separator for the first element
893                    first = false;
894                } else {
895                    // check for separator
896                    match self.expect(exp) {
897                        Ok(Recovered::No) => {
898                            self.current_closure.take();
899                        }
900                        Ok(Recovered::Yes(guar)) => {
901                            self.current_closure.take();
902                            recovered = Recovered::Yes(guar);
903                            break;
904                        }
905                        Err(mut expect_err) => {
906                            let sp = self.prev_token.span.shrink_to_hi();
907                            let token_str = pprust::token_kind_to_string(exp.tok);
908
909                            match self.current_closure.take() {
910                                Some(closure_spans) if self.token == TokenKind::Semi => {
911                                    // Finding a semicolon instead of a comma
912                                    // after a closure body indicates that the
913                                    // closure body may be a block but the user
914                                    // forgot to put braces around its
915                                    // statements.
916
917                                    self.recover_missing_braces_around_closure_body(
918                                        closure_spans,
919                                        expect_err,
920                                    )?;
921
922                                    continue;
923                                }
924
925                                _ => {
926                                    // Attempt to keep parsing if it was a similar separator.
927                                    if exp.tok.similar_tokens().contains(&self.token.kind) {
928                                        self.bump();
929                                    }
930                                }
931                            }
932
933                            // If this was a missing `@` in a binding pattern
934                            // bail with a suggestion
935                            // https://github.com/rust-lang/rust/issues/72373
936                            if self.prev_token.is_ident() && self.token == token::DotDot {
937                                let msg = format!(
938                                    "if you meant to bind the contents of the rest of the array \
939                                     pattern into `{}`, use `@`",
940                                    pprust::token_to_string(&self.prev_token)
941                                );
942                                expect_err
943                                    .with_span_suggestion_verbose(
944                                        self.prev_token.span.shrink_to_hi().until(self.token.span),
945                                        msg,
946                                        " @ ",
947                                        Applicability::MaybeIncorrect,
948                                    )
949                                    .emit();
950                                break;
951                            }
952
953                            // Attempt to keep parsing if it was an omitted separator.
954                            self.last_unexpected_token_span = None;
955                            match f(self) {
956                                Ok(t) => {
957                                    // Parsed successfully, therefore most probably the code only
958                                    // misses a separator.
959                                    expect_err
960                                        .with_span_suggestion_short(
961                                            sp,
962                                            format!("missing `{token_str}`"),
963                                            token_str,
964                                            Applicability::MaybeIncorrect,
965                                        )
966                                        .emit();
967
968                                    v.push(t);
969                                    continue;
970                                }
971                                Err(e) => {
972                                    // Parsing failed, therefore it must be something more serious
973                                    // than just a missing separator.
974                                    for xx in &e.children {
975                                        // Propagate the help message from sub error `e` to main
976                                        // error `expect_err`.
977                                        expect_err.children.push(xx.clone());
978                                    }
979                                    e.cancel();
980                                    if self.token == token::Colon {
981                                        // We will try to recover in
982                                        // `maybe_recover_struct_lit_bad_delims`.
983                                        return Err(expect_err);
984                                    } else if let [exp] = closes_expected
985                                        && exp.token_type == TokenType::CloseParen
986                                    {
987                                        return Err(expect_err);
988                                    } else {
989                                        expect_err.emit();
990                                        break;
991                                    }
992                                }
993                            }
994                        }
995                    }
996                }
997            }
998            if sep.trailing_sep_allowed
999                && self.expect_any_with_type(closes_expected, closes_not_expected)
1000            {
1001                trailing = Trailing::Yes;
1002                break;
1003            }
1004
1005            let t = f(self)?;
1006            v.push(t);
1007        }
1008
1009        Ok((v, trailing, recovered))
1010    }
1011
1012    fn recover_missing_braces_around_closure_body(
1013        &mut self,
1014        closure_spans: ClosureSpans,
1015        mut expect_err: Diag<'_>,
1016    ) -> PResult<'a, ()> {
1017        let initial_semicolon = self.token.span;
1018
1019        while self.eat(exp!(Semi)) {
1020            let _ = self.parse_stmt_without_recovery(false, ForceCollect::No).unwrap_or_else(|e| {
1021                e.cancel();
1022                None
1023            });
1024        }
1025
1026        expect_err
1027            .primary_message("closure bodies that contain statements must be surrounded by braces");
1028
1029        let preceding_pipe_span = closure_spans.closing_pipe;
1030        let following_token_span = self.token.span;
1031
1032        let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1033        first_note.push_span_label(
1034            initial_semicolon,
1035            "this `;` turns the preceding closure into a statement",
1036        );
1037        first_note.push_span_label(
1038            closure_spans.body,
1039            "this expression is a statement because of the trailing semicolon",
1040        );
1041        expect_err.span_note(first_note, "statement found outside of a block");
1042
1043        let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1044        second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1045        second_note.push_span_label(
1046            following_token_span,
1047            "...but likely you meant the closure to end here",
1048        );
1049        expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1050
1051        expect_err.span(vec![preceding_pipe_span, following_token_span]);
1052
1053        let opening_suggestion_str = " {".to_string();
1054        let closing_suggestion_str = "}".to_string();
1055
1056        expect_err.multipart_suggestion(
1057            "try adding braces",
1058            vec![
1059                (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1060                (following_token_span.shrink_to_lo(), closing_suggestion_str),
1061            ],
1062            Applicability::MaybeIncorrect,
1063        );
1064
1065        expect_err.emit();
1066
1067        Ok(())
1068    }
1069
1070    /// Parses a sequence, not including the delimiters. The function
1071    /// `f` must consume tokens until reaching the next separator or
1072    /// closing bracket.
1073    fn parse_seq_to_before_end<T>(
1074        &mut self,
1075        close: ExpTokenPair<'_>,
1076        sep: SeqSep<'_>,
1077        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1078    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1079        self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1080    }
1081
1082    /// Parses a sequence, including only the closing delimiter. The function
1083    /// `f` must consume tokens until reaching the next separator or
1084    /// closing bracket.
1085    fn parse_seq_to_end<T>(
1086        &mut self,
1087        close: ExpTokenPair<'_>,
1088        sep: SeqSep<'_>,
1089        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1090    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1091        let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1092        if matches!(recovered, Recovered::No) && !self.eat(close) {
1093            self.dcx().span_delayed_bug(
1094                self.token.span,
1095                "recovered but `parse_seq_to_before_end` did not give us the close token",
1096            );
1097        }
1098        Ok((val, trailing))
1099    }
1100
1101    /// Parses a sequence, including both delimiters. The function
1102    /// `f` must consume tokens until reaching the next separator or
1103    /// closing bracket.
1104    fn parse_unspanned_seq<T>(
1105        &mut self,
1106        open: ExpTokenPair<'_>,
1107        close: ExpTokenPair<'_>,
1108        sep: SeqSep<'_>,
1109        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1110    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1111        self.expect(open)?;
1112        self.parse_seq_to_end(close, sep, f)
1113    }
1114
1115    /// Parses a comma-separated sequence, including both delimiters.
1116    /// The function `f` must consume tokens until reaching the next separator or
1117    /// closing bracket.
1118    fn parse_delim_comma_seq<T>(
1119        &mut self,
1120        open: ExpTokenPair<'_>,
1121        close: ExpTokenPair<'_>,
1122        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1123    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1124        self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1125    }
1126
1127    /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
1128    /// The function `f` must consume tokens until reaching the next separator or
1129    /// closing bracket.
1130    fn parse_paren_comma_seq<T>(
1131        &mut self,
1132        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1133    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1134        self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1135    }
1136
1137    /// Advance the parser by one token using provided token as the next one.
1138    fn bump_with(&mut self, next: (Token, Spacing)) {
1139        self.inlined_bump_with(next)
1140    }
1141
1142    /// This always-inlined version should only be used on hot code paths.
1143    #[inline(always)]
1144    fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1145        // Update the current and previous tokens.
1146        self.prev_token = mem::replace(&mut self.token, next_token);
1147        self.token_spacing = next_spacing;
1148
1149        // Diagnostics.
1150        self.expected_token_types.clear();
1151    }
1152
1153    /// Advance the parser by one token.
1154    pub fn bump(&mut self) {
1155        // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
1156        // than `.0`/`.1` access.
1157        let mut next = self.token_cursor.inlined_next();
1158        self.num_bump_calls += 1;
1159        // We got a token from the underlying cursor and no longer need to
1160        // worry about an unglued token. See `break_and_eat` for more details.
1161        self.break_last_token = 0;
1162        if next.0.span.is_dummy() {
1163            // Tweak the location for better diagnostics, but keep syntactic context intact.
1164            let fallback_span = self.token.span;
1165            next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1166        }
1167        debug_assert!(!matches!(
1168            next.0.kind,
1169            token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
1170        ));
1171        self.inlined_bump_with(next)
1172    }
1173
1174    /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
1175    /// When `dist == 0` then the current token is looked at. `Eof` will be
1176    /// returned if the look-ahead is any distance past the end of the tokens.
1177    pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1178        if dist == 0 {
1179            return looker(&self.token);
1180        }
1181
1182        // Typically around 98% of the `dist > 0` cases have `dist == 1`, so we
1183        // have a fast special case for that.
1184        if dist == 1 {
1185            // The index is zero because the tree cursor's index always points
1186            // to the next token to be gotten.
1187            match self.token_cursor.curr.curr() {
1188                Some(tree) => {
1189                    // Indexing stayed within the current token tree.
1190                    match tree {
1191                        TokenTree::Token(token, _) => return looker(token),
1192                        &TokenTree::Delimited(dspan, _, delim, _) => {
1193                            if !delim.skip() {
1194                                return looker(&Token::new(token::OpenDelim(delim), dspan.open));
1195                            }
1196                        }
1197                    }
1198                }
1199                None => {
1200                    // The tree cursor lookahead went (one) past the end of the
1201                    // current token tree. Try to return a close delimiter.
1202                    if let Some(last) = self.token_cursor.stack.last()
1203                        && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1204                        && !delim.skip()
1205                    {
1206                        // We are not in the outermost token stream, so we have
1207                        // delimiters. Also, those delimiters are not skipped.
1208                        return looker(&Token::new(token::CloseDelim(delim), span.close));
1209                    }
1210                }
1211            }
1212        }
1213
1214        // Just clone the token cursor and use `next`, skipping delimiters as
1215        // necessary. Slow but simple.
1216        let mut cursor = self.token_cursor.clone();
1217        let mut i = 0;
1218        let mut token = Token::dummy();
1219        while i < dist {
1220            token = cursor.next().0;
1221            if matches!(
1222                token.kind,
1223                token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
1224            ) {
1225                continue;
1226            }
1227            i += 1;
1228        }
1229        looker(&token)
1230    }
1231
1232    /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
1233    pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1234        self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1235    }
1236
1237    /// Parses asyncness: `async` or nothing.
1238    fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1239        let span = self.token.uninterpolated_span();
1240        if self.eat_keyword_case(exp!(Async), case) {
1241            // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
1242            // error if edition <= 2024, like we do with async and edition <= 2018?
1243            if self.token.uninterpolated_span().at_least_rust_2024()
1244                && self.eat_keyword_case(exp!(Gen), case)
1245            {
1246                let gen_span = self.prev_token.uninterpolated_span();
1247                Some(CoroutineKind::AsyncGen {
1248                    span: span.to(gen_span),
1249                    closure_id: DUMMY_NODE_ID,
1250                    return_impl_trait_id: DUMMY_NODE_ID,
1251                })
1252            } else {
1253                Some(CoroutineKind::Async {
1254                    span,
1255                    closure_id: DUMMY_NODE_ID,
1256                    return_impl_trait_id: DUMMY_NODE_ID,
1257                })
1258            }
1259        } else if self.token.uninterpolated_span().at_least_rust_2024()
1260            && self.eat_keyword_case(exp!(Gen), case)
1261        {
1262            Some(CoroutineKind::Gen {
1263                span,
1264                closure_id: DUMMY_NODE_ID,
1265                return_impl_trait_id: DUMMY_NODE_ID,
1266            })
1267        } else {
1268            None
1269        }
1270    }
1271
1272    /// Parses fn unsafety: `unsafe`, `safe` or nothing.
1273    fn parse_safety(&mut self, case: Case) -> Safety {
1274        if self.eat_keyword_case(exp!(Unsafe), case) {
1275            Safety::Unsafe(self.prev_token.uninterpolated_span())
1276        } else if self.eat_keyword_case(exp!(Safe), case) {
1277            Safety::Safe(self.prev_token.uninterpolated_span())
1278        } else {
1279            Safety::Default
1280        }
1281    }
1282
1283    /// Parses constness: `const` or nothing.
1284    fn parse_constness(&mut self, case: Case) -> Const {
1285        self.parse_constness_(case, false)
1286    }
1287
1288    /// Parses constness for closures (case sensitive, feature-gated)
1289    fn parse_closure_constness(&mut self) -> Const {
1290        let constness = self.parse_constness_(Case::Sensitive, true);
1291        if let Const::Yes(span) = constness {
1292            self.psess.gated_spans.gate(sym::const_closures, span);
1293        }
1294        constness
1295    }
1296
1297    fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1298        // Avoid const blocks and const closures to be parsed as const items
1299        if (self.check_const_closure() == is_closure)
1300            && !self
1301                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
1302            && self.eat_keyword_case(exp!(Const), case)
1303        {
1304            Const::Yes(self.prev_token.uninterpolated_span())
1305        } else {
1306            Const::No
1307        }
1308    }
1309
1310    /// Parses inline const expressions.
1311    fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1312        if pat {
1313            self.psess.gated_spans.gate(sym::inline_const_pat, span);
1314        }
1315        self.expect_keyword(exp!(Const))?;
1316        let (attrs, blk) = self.parse_inner_attrs_and_block()?;
1317        let anon_const = AnonConst {
1318            id: DUMMY_NODE_ID,
1319            value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1320        };
1321        let blk_span = anon_const.value.span;
1322        Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
1323    }
1324
1325    /// Parses mutability (`mut` or nothing).
1326    fn parse_mutability(&mut self) -> Mutability {
1327        if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1328    }
1329
1330    /// Parses reference binding mode (`ref`, `ref mut`, or nothing).
1331    fn parse_byref(&mut self) -> ByRef {
1332        if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
1333    }
1334
1335    /// Possibly parses mutability (`const` or `mut`).
1336    fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1337        if self.eat_keyword(exp!(Mut)) {
1338            Some(Mutability::Mut)
1339        } else if self.eat_keyword(exp!(Const)) {
1340            Some(Mutability::Not)
1341        } else {
1342            None
1343        }
1344    }
1345
1346    fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1347        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1348        {
1349            if let Some(suffix) = suffix {
1350                self.expect_no_tuple_index_suffix(self.token.span, suffix);
1351            }
1352            self.bump();
1353            Ok(Ident::new(symbol, self.prev_token.span))
1354        } else {
1355            self.parse_ident_common(true)
1356        }
1357    }
1358
1359    fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
1360        if let Some(args) = self.parse_delim_args_inner() {
1361            Ok(P(args))
1362        } else {
1363            self.unexpected_any()
1364        }
1365    }
1366
1367    fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1368        Ok(if let Some(args) = self.parse_delim_args_inner() {
1369            AttrArgs::Delimited(args)
1370        } else if self.eat(exp!(Eq)) {
1371            let eq_span = self.prev_token.span;
1372            AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
1373        } else {
1374            AttrArgs::Empty
1375        })
1376    }
1377
1378    fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1379        let delimited = self.check(exp!(OpenParen))
1380            || self.check(exp!(OpenBracket))
1381            || self.check(exp!(OpenBrace));
1382
1383        delimited.then(|| {
1384            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1385                unreachable!()
1386            };
1387            DelimArgs { dspan, delim, tokens }
1388        })
1389    }
1390
1391    /// Parses a single token tree from the input.
1392    pub fn parse_token_tree(&mut self) -> TokenTree {
1393        match self.token.kind {
1394            token::OpenDelim(..) => {
1395                // Clone the `TokenTree::Delimited` that we are currently
1396                // within. That's what we are going to return.
1397                let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1398                debug_assert_matches!(tree, TokenTree::Delimited(..));
1399
1400                // Advance the token cursor through the entire delimited
1401                // sequence. After getting the `OpenDelim` we are *within* the
1402                // delimited sequence, i.e. at depth `d`. After getting the
1403                // matching `CloseDelim` we are *after* the delimited sequence,
1404                // i.e. at depth `d - 1`.
1405                let target_depth = self.token_cursor.stack.len() - 1;
1406                loop {
1407                    // Advance one token at a time, so `TokenCursor::next()`
1408                    // can capture these tokens if necessary.
1409                    self.bump();
1410                    if self.token_cursor.stack.len() == target_depth {
1411                        debug_assert_matches!(self.token.kind, token::CloseDelim(_));
1412                        break;
1413                    }
1414                }
1415
1416                // Consume close delimiter
1417                self.bump();
1418                tree
1419            }
1420            token::CloseDelim(_) | token::Eof => unreachable!(),
1421            _ => {
1422                let prev_spacing = self.token_spacing;
1423                self.bump();
1424                TokenTree::Token(self.prev_token.clone(), prev_spacing)
1425            }
1426        }
1427    }
1428
1429    pub fn parse_tokens(&mut self) -> TokenStream {
1430        let mut result = Vec::new();
1431        loop {
1432            match self.token.kind {
1433                token::Eof | token::CloseDelim(..) => break,
1434                _ => result.push(self.parse_token_tree()),
1435            }
1436        }
1437        TokenStream::new(result)
1438    }
1439
1440    /// Evaluates the closure with restrictions in place.
1441    ///
1442    /// Afters the closure is evaluated, restrictions are reset.
1443    fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1444        let old = self.restrictions;
1445        self.restrictions = res;
1446        let res = f(self);
1447        self.restrictions = old;
1448        res
1449    }
1450
1451    /// Parses `pub` and `pub(in path)` plus shortcuts `pub(crate)` for `pub(in crate)`, `pub(self)`
1452    /// for `pub(in self)` and `pub(super)` for `pub(in super)`.
1453    /// If the following element can't be a tuple (i.e., it's a function definition), then
1454    /// it's not a tuple struct field), and the contents within the parentheses aren't valid,
1455    /// so emit a proper diagnostic.
1456    // Public for rustfmt usage.
1457    pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1458        maybe_whole!(self, NtVis, |vis| vis.into_inner());
1459
1460        if !self.eat_keyword(exp!(Pub)) {
1461            // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1462            // keyword to grab a span from for inherited visibility; an empty span at the
1463            // beginning of the current token would seem to be the "Schelling span".
1464            return Ok(Visibility {
1465                span: self.token.span.shrink_to_lo(),
1466                kind: VisibilityKind::Inherited,
1467                tokens: None,
1468            });
1469        }
1470        let lo = self.prev_token.span;
1471
1472        if self.check(exp!(OpenParen)) {
1473            // We don't `self.bump()` the `(` yet because this might be a struct definition where
1474            // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1475            // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1476            // by the following tokens.
1477            if self.is_keyword_ahead(1, &[kw::In]) {
1478                // Parse `pub(in path)`.
1479                self.bump(); // `(`
1480                self.bump(); // `in`
1481                let path = self.parse_path(PathStyle::Mod)?; // `path`
1482                self.expect(exp!(CloseParen))?; // `)`
1483                let vis = VisibilityKind::Restricted {
1484                    path: P(path),
1485                    id: ast::DUMMY_NODE_ID,
1486                    shorthand: false,
1487                };
1488                return Ok(Visibility {
1489                    span: lo.to(self.prev_token.span),
1490                    kind: vis,
1491                    tokens: None,
1492                });
1493            } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
1494                && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1495            {
1496                // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
1497                self.bump(); // `(`
1498                let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
1499                self.expect(exp!(CloseParen))?; // `)`
1500                let vis = VisibilityKind::Restricted {
1501                    path: P(path),
1502                    id: ast::DUMMY_NODE_ID,
1503                    shorthand: true,
1504                };
1505                return Ok(Visibility {
1506                    span: lo.to(self.prev_token.span),
1507                    kind: vis,
1508                    tokens: None,
1509                });
1510            } else if let FollowedByType::No = fbt {
1511                // Provide this diagnostic if a type cannot follow;
1512                // in particular, if this is not a tuple struct.
1513                self.recover_incorrect_vis_restriction()?;
1514                // Emit diagnostic, but continue with public visibility.
1515            }
1516        }
1517
1518        Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1519    }
1520
1521    /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1522    fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1523        self.bump(); // `(`
1524        let path = self.parse_path(PathStyle::Mod)?;
1525        self.expect(exp!(CloseParen))?; // `)`
1526
1527        let path_str = pprust::path_to_string(&path);
1528        self.dcx()
1529            .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1530
1531        Ok(())
1532    }
1533
1534    /// Parses `extern string_literal?`.
1535    fn parse_extern(&mut self, case: Case) -> Extern {
1536        if self.eat_keyword_case(exp!(Extern), case) {
1537            let mut extern_span = self.prev_token.span;
1538            let abi = self.parse_abi();
1539            if let Some(abi) = abi {
1540                extern_span = extern_span.to(abi.span);
1541            }
1542            Extern::from_abi(abi, extern_span)
1543        } else {
1544            Extern::None
1545        }
1546    }
1547
1548    /// Parses a string literal as an ABI spec.
1549    fn parse_abi(&mut self) -> Option<StrLit> {
1550        match self.parse_str_lit() {
1551            Ok(str_lit) => Some(str_lit),
1552            Err(Some(lit)) => match lit.kind {
1553                ast::LitKind::Err(_) => None,
1554                _ => {
1555                    self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1556                    None
1557                }
1558            },
1559            Err(None) => None,
1560        }
1561    }
1562
1563    fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1564        &mut self,
1565        f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1566    ) -> PResult<'a, R> {
1567        // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1568        // `ForceCollect::Yes`
1569        self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1570            Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1571        })
1572    }
1573
1574    /// Checks for `::` or, potentially, `:::` and then look ahead after it.
1575    fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1576        if self.check(exp!(PathSep)) {
1577            if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1578                debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1579                self.look_ahead(2, looker)
1580            } else {
1581                self.look_ahead(1, looker)
1582            }
1583        } else {
1584            false
1585        }
1586    }
1587
1588    /// `::{` or `::*`
1589    fn is_import_coupler(&mut self) -> bool {
1590        self.check_path_sep_and_look_ahead(|t| {
1591            matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::BinOp(token::Star))
1592        })
1593    }
1594
1595    // Debug view of the parser's token stream, up to `{lookahead}` tokens.
1596    // Only used when debugging.
1597    #[allow(unused)]
1598    pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug + '_ {
1599        fmt::from_fn(move |f| {
1600            let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of
1601
1602            // we don't need N spans, but we want at least one, so print all of prev_token
1603            dbg_fmt.field("prev_token", &self.prev_token);
1604            let mut tokens = vec![];
1605            for i in 0..lookahead {
1606                let tok = self.look_ahead(i, |tok| tok.kind.clone());
1607                let is_eof = tok == TokenKind::Eof;
1608                tokens.push(tok);
1609                if is_eof {
1610                    // Don't look ahead past EOF.
1611                    break;
1612                }
1613            }
1614            dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1615            dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1616
1617            // some fields are interesting for certain values, as they relate to macro parsing
1618            if let Some(subparser) = self.subparser_name {
1619                dbg_fmt.field("subparser_name", &subparser);
1620            }
1621            if let Recovery::Forbidden = self.recovery {
1622                dbg_fmt.field("recovery", &self.recovery);
1623            }
1624
1625            // imply there's "more to know" than this view
1626            dbg_fmt.finish_non_exhaustive()
1627        })
1628    }
1629
1630    pub fn clear_expected_token_types(&mut self) {
1631        self.expected_token_types.clear();
1632    }
1633
1634    pub fn approx_token_stream_pos(&self) -> u32 {
1635        self.num_bump_calls
1636    }
1637}
1638
1639pub(crate) fn make_unclosed_delims_error(
1640    unmatched: UnmatchedDelim,
1641    psess: &ParseSess,
1642) -> Option<Diag<'_>> {
1643    // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
1644    // `unmatched_delims` only for error recovery in the `Parser`.
1645    let found_delim = unmatched.found_delim?;
1646    let mut spans = vec![unmatched.found_span];
1647    if let Some(sp) = unmatched.unclosed_span {
1648        spans.push(sp);
1649    };
1650    let err = psess.dcx().create_err(MismatchedClosingDelimiter {
1651        spans,
1652        delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
1653        unmatched: unmatched.found_span,
1654        opening_candidate: unmatched.candidate_span,
1655        unclosed: unmatched.unclosed_span,
1656    });
1657    Some(err)
1658}
1659
1660/// A helper struct used when building an `AttrTokenStream` from
1661/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
1662/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
1663/// is then 'parsed' to build up an `AttrTokenStream` with nested
1664/// `AttrTokenTree::Delimited` tokens.
1665#[derive(Debug, Clone)]
1666enum FlatToken {
1667    /// A token - this holds both delimiter (e.g. '{' and '}')
1668    /// and non-delimiter tokens
1669    Token((Token, Spacing)),
1670    /// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
1671    /// directly into the constructed `AttrTokenStream` as an
1672    /// `AttrTokenTree::AttrsTarget`.
1673    AttrsTarget(AttrsTarget),
1674    /// A special 'empty' token that is ignored during the conversion
1675    /// to an `AttrTokenStream`. This is used to simplify the
1676    /// handling of replace ranges.
1677    Empty,
1678}
1679
1680// Metavar captures of various kinds.
1681#[derive(Clone, Debug)]
1682pub enum ParseNtResult {
1683    Tt(TokenTree),
1684    Ident(Ident, IdentIsRaw),
1685    Lifetime(Ident, IdentIsRaw),
1686
1687    /// This case will eventually be removed, along with `Token::Interpolate`.
1688    Nt(Arc<Nonterminal>),
1689}