rustc_parse/parser/
mod.rs

1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14use std::assert_matches::debug_assert_matches;
15use std::ops::Range;
16use std::sync::Arc;
17use std::{fmt, mem, slice};
18
19use attr_wrapper::{AttrWrapper, UsePreAttrPos};
20pub use diagnostics::AttemptLocalParseRecovery;
21pub(crate) use expr::ForbiddenLetReason;
22pub(crate) use item::FnParseMode;
23pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
24use path::PathStyle;
25use rustc_ast::ptr::P;
26use rustc_ast::token::{
27    self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtPatKind, Token,
28    TokenKind,
29};
30use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
31use rustc_ast::util::case::Case;
32use rustc_ast::{
33    self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
34    DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
35    Visibility, VisibilityKind,
36};
37use rustc_ast_pretty::pprust;
38use rustc_data_structures::fx::FxHashMap;
39use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
40use rustc_index::interval::IntervalSet;
41use rustc_session::parse::ParseSess;
42use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
43use thin_vec::ThinVec;
44use token_type::TokenTypeSet;
45pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
46use tracing::debug;
47
48use crate::errors::{
49    self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
50};
51use crate::exp;
52use crate::lexer::UnmatchedDelim;
53
54#[cfg(test)]
55mod tests;
56
57// Ideally, these tests would be in `rustc_ast`. But they depend on having a
58// parser, so they are here.
59#[cfg(test)]
60mod tokenstream {
61    mod tests;
62}
63#[cfg(test)]
64mod mut_visit {
65    mod tests;
66}
67
68bitflags::bitflags! {
69    #[derive(Clone, Copy, Debug)]
70    struct Restrictions: u8 {
71        const STMT_EXPR         = 1 << 0;
72        const NO_STRUCT_LITERAL = 1 << 1;
73        const CONST_EXPR        = 1 << 2;
74        const ALLOW_LET         = 1 << 3;
75        const IN_IF_GUARD       = 1 << 4;
76        const IS_PAT            = 1 << 5;
77    }
78}
79
80#[derive(Clone, Copy, PartialEq, Debug)]
81enum SemiColonMode {
82    Break,
83    Ignore,
84    Comma,
85}
86
87#[derive(Clone, Copy, PartialEq, Debug)]
88enum BlockMode {
89    Break,
90    Ignore,
91}
92
93/// Whether or not we should force collection of tokens for an AST node,
94/// regardless of whether or not it has attributes
95#[derive(Clone, Copy, Debug, PartialEq)]
96pub enum ForceCollect {
97    Yes,
98    No,
99}
100
101#[macro_export]
102macro_rules! maybe_whole {
103    ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
104        if let token::Interpolated(nt) = &$p.token.kind
105            && let token::$constructor(x) = &**nt
106        {
107            #[allow(unused_mut)]
108            let mut $x = x.clone();
109            $p.bump();
110            return Ok($e);
111        }
112    };
113}
114
115/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
116#[macro_export]
117macro_rules! maybe_recover_from_interpolated_ty_qpath {
118    ($self: expr, $allow_qpath_recovery: expr) => {
119        if $allow_qpath_recovery
120            && $self.may_recover()
121            && let Some(mv_kind) = $self.token.is_metavar_seq()
122            && let token::MetaVarKind::Ty { .. } = mv_kind
123            && $self.check_noexpect_past_close_delim(&token::PathSep)
124        {
125            // Reparse the type, then move to recovery.
126            let ty = $self
127                .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
128                .expect("metavar seq ty");
129
130            return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
131        }
132    };
133}
134
135#[derive(Clone, Copy, Debug)]
136pub enum Recovery {
137    Allowed,
138    Forbidden,
139}
140
141#[derive(Clone)]
142pub struct Parser<'a> {
143    pub psess: &'a ParseSess,
144    /// The current token.
145    pub token: Token,
146    /// The spacing for the current token.
147    token_spacing: Spacing,
148    /// The previous token.
149    pub prev_token: Token,
150    pub capture_cfg: bool,
151    restrictions: Restrictions,
152    expected_token_types: TokenTypeSet,
153    token_cursor: TokenCursor,
154    // The number of calls to `bump`, i.e. the position in the token stream.
155    num_bump_calls: u32,
156    // During parsing we may sometimes need to "unglue" a glued token into two
157    // or three component tokens (e.g. `>>` into `>` and `>`, or `>>=` into `>`
158    // and `>` and `=`), so the parser can consume them one at a time. This
159    // process bypasses the normal capturing mechanism (e.g. `num_bump_calls`
160    // will not be incremented), since the "unglued" tokens due not exist in
161    // the original `TokenStream`.
162    //
163    // If we end up consuming all the component tokens, this is not an issue,
164    // because we'll end up capturing the single "glued" token.
165    //
166    // However, sometimes we may want to capture not all of the original
167    // token. For example, capturing the `Vec<u8>` in `Option<Vec<u8>>`
168    // requires us to unglue the trailing `>>` token. The `break_last_token`
169    // field is used to track these tokens. They get appended to the captured
170    // stream when we evaluate a `LazyAttrTokenStream`.
171    //
172    // This value is always 0, 1, or 2. It can only reach 2 when splitting
173    // `>>=` or `<<=`.
174    break_last_token: u32,
175    /// This field is used to keep track of how many left angle brackets we have seen. This is
176    /// required in order to detect extra leading left angle brackets (`<` characters) and error
177    /// appropriately.
178    ///
179    /// See the comments in the `parse_path_segment` function for more details.
180    unmatched_angle_bracket_count: u16,
181    angle_bracket_nesting: u16,
182
183    last_unexpected_token_span: Option<Span>,
184    /// If present, this `Parser` is not parsing Rust code but rather a macro call.
185    subparser_name: Option<&'static str>,
186    capture_state: CaptureState,
187    /// This allows us to recover when the user forget to add braces around
188    /// multiple statements in the closure body.
189    current_closure: Option<ClosureSpans>,
190    /// Whether the parser is allowed to do recovery.
191    /// This is disabled when parsing macro arguments, see #103534
192    recovery: Recovery,
193}
194
195// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with
196// nonterminals. Make sure it doesn't unintentionally get bigger. We only check a few arches
197// though, because `TokenTypeSet(u128)` alignment varies on others, changing the total size.
198#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
199rustc_data_structures::static_assert_size!(Parser<'_>, 288);
200
201/// Stores span information about a closure.
202#[derive(Clone, Debug)]
203struct ClosureSpans {
204    whole_closure: Span,
205    closing_pipe: Span,
206    body: Span,
207}
208
209/// A token range within a `Parser`'s full token stream.
210#[derive(Clone, Debug)]
211struct ParserRange(Range<u32>);
212
213/// A token range within an individual AST node's (lazy) token stream, i.e.
214/// relative to that node's first token. Distinct from `ParserRange` so the two
215/// kinds of range can't be mixed up.
216#[derive(Clone, Debug)]
217struct NodeRange(Range<u32>);
218
219/// Indicates a range of tokens that should be replaced by an `AttrsTarget`
220/// (replacement) or be replaced by nothing (deletion). This is used in two
221/// places during token collection.
222///
223/// 1. Replacement. During the parsing of an AST node that may have a
224///    `#[derive]` attribute, when we parse a nested AST node that has `#[cfg]`
225///    or `#[cfg_attr]`, we replace the entire inner AST node with
226///    `FlatToken::AttrsTarget`. This lets us perform eager cfg-expansion on an
227///    `AttrTokenStream`.
228///
229/// 2. Deletion. We delete inner attributes from all collected token streams,
230///    and instead track them through the `attrs` field on the AST node. This
231///    lets us manipulate them similarly to outer attributes. When we create a
232///    `TokenStream`, the inner attributes are inserted into the proper place
233///    in the token stream.
234///
235/// Each replacement starts off in `ParserReplacement` form but is converted to
236/// `NodeReplacement` form when it is attached to a single AST node, via
237/// `LazyAttrTokenStreamImpl`.
238type ParserReplacement = (ParserRange, Option<AttrsTarget>);
239
240/// See the comment on `ParserReplacement`.
241type NodeReplacement = (NodeRange, Option<AttrsTarget>);
242
243impl NodeRange {
244    // Converts a range within a parser's tokens to a range within a
245    // node's tokens beginning at `start_pos`.
246    //
247    // For example, imagine a parser with 50 tokens in its token stream, a
248    // function that spans `ParserRange(20..40)` and an inner attribute within
249    // that function that spans `ParserRange(30..35)`. We would find the inner
250    // attribute's range within the function's tokens by subtracting 20, which
251    // is the position of the function's start token. This gives
252    // `NodeRange(10..15)`.
253    fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
254        assert!(!parser_range.is_empty());
255        assert!(parser_range.start >= start_pos);
256        NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
257    }
258}
259
260/// Controls how we capture tokens. Capturing can be expensive,
261/// so we try to avoid performing capturing in cases where
262/// we will never need an `AttrTokenStream`.
263#[derive(Copy, Clone, Debug)]
264enum Capturing {
265    /// We aren't performing any capturing - this is the default mode.
266    No,
267    /// We are capturing tokens
268    Yes,
269}
270
271// This state is used by `Parser::collect_tokens`.
272#[derive(Clone, Debug)]
273struct CaptureState {
274    capturing: Capturing,
275    parser_replacements: Vec<ParserReplacement>,
276    inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
277    // `IntervalSet` is good for perf because attrs are mostly added to this
278    // set in contiguous ranges.
279    seen_attrs: IntervalSet<AttrId>,
280}
281
282#[derive(Clone, Debug)]
283struct TokenTreeCursor {
284    stream: TokenStream,
285    /// Points to the current token tree in the stream. In `TokenCursor::curr`,
286    /// this can be any token tree. In `TokenCursor::stack`, this is always a
287    /// `TokenTree::Delimited`.
288    index: usize,
289}
290
291impl TokenTreeCursor {
292    #[inline]
293    fn new(stream: TokenStream) -> Self {
294        TokenTreeCursor { stream, index: 0 }
295    }
296
297    #[inline]
298    fn curr(&self) -> Option<&TokenTree> {
299        self.stream.get(self.index)
300    }
301
302    #[inline]
303    fn bump(&mut self) {
304        self.index += 1;
305    }
306}
307
308/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that
309/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
310/// use this type to emit them as a linear sequence. But a linear sequence is
311/// what the parser expects, for the most part.
312#[derive(Clone, Debug)]
313struct TokenCursor {
314    // Cursor for the current (innermost) token stream. The index within the
315    // cursor can point to any token tree in the stream (or one past the end).
316    // The delimiters for this token stream are found in `self.stack.last()`;
317    // if that is `None` we are in the outermost token stream which never has
318    // delimiters.
319    curr: TokenTreeCursor,
320
321    // Token streams surrounding the current one. The index within each cursor
322    // always points to a `TokenTree::Delimited`.
323    stack: Vec<TokenTreeCursor>,
324}
325
326impl TokenCursor {
327    fn next(&mut self) -> (Token, Spacing) {
328        self.inlined_next()
329    }
330
331    /// This always-inlined version should only be used on hot code paths.
332    #[inline(always)]
333    fn inlined_next(&mut self) -> (Token, Spacing) {
334        loop {
335            // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
336            // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
337            // below can be removed.
338            if let Some(tree) = self.curr.curr() {
339                match tree {
340                    &TokenTree::Token(ref token, spacing) => {
341                        debug_assert!(!matches!(
342                            token.kind,
343                            token::OpenDelim(_) | token::CloseDelim(_)
344                        ));
345                        let res = (token.clone(), spacing);
346                        self.curr.bump();
347                        return res;
348                    }
349                    &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
350                        let trees = TokenTreeCursor::new(tts.clone());
351                        self.stack.push(mem::replace(&mut self.curr, trees));
352                        if !delim.skip() {
353                            return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
354                        }
355                        // No open delimiter to return; continue on to the next iteration.
356                    }
357                };
358            } else if let Some(parent) = self.stack.pop() {
359                // We have exhausted this token stream. Move back to its parent token stream.
360                let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
361                    panic!("parent should be Delimited")
362                };
363                self.curr = parent;
364                self.curr.bump(); // move past the `Delimited`
365                if !delim.skip() {
366                    return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
367                }
368                // No close delimiter to return; continue on to the next iteration.
369            } else {
370                // We have exhausted the outermost token stream. The use of
371                // `Spacing::Alone` is arbitrary and immaterial, because the
372                // `Eof` token's spacing is never used.
373                return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
374            }
375        }
376    }
377}
378
379/// A sequence separator.
380#[derive(Debug)]
381struct SeqSep<'a> {
382    /// The separator token.
383    sep: Option<ExpTokenPair<'a>>,
384    /// `true` if a trailing separator is allowed.
385    trailing_sep_allowed: bool,
386}
387
388impl<'a> SeqSep<'a> {
389    fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
390        SeqSep { sep: Some(sep), trailing_sep_allowed: true }
391    }
392
393    fn none() -> SeqSep<'a> {
394        SeqSep { sep: None, trailing_sep_allowed: false }
395    }
396}
397
398#[derive(Debug)]
399pub enum FollowedByType {
400    Yes,
401    No,
402}
403
404#[derive(Copy, Clone, Debug)]
405enum Trailing {
406    No,
407    Yes,
408}
409
410impl From<bool> for Trailing {
411    fn from(b: bool) -> Trailing {
412        if b { Trailing::Yes } else { Trailing::No }
413    }
414}
415
416#[derive(Clone, Copy, Debug, PartialEq, Eq)]
417pub(super) enum TokenDescription {
418    ReservedIdentifier,
419    Keyword,
420    ReservedKeyword,
421    DocComment,
422
423    // Expanded metavariables are wrapped in invisible delimiters which aren't
424    // pretty-printed. In error messages we must handle these specially
425    // otherwise we get confusing things in messages like "expected `(`, found
426    // ``". It's better to say e.g. "expected `(`, found type metavariable".
427    MetaVar(MetaVarKind),
428}
429
430impl TokenDescription {
431    pub(super) fn from_token(token: &Token) -> Option<Self> {
432        match token.kind {
433            _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
434            _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
435            _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
436            token::DocComment(..) => Some(TokenDescription::DocComment),
437            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
438                Some(TokenDescription::MetaVar(kind))
439            }
440            _ => None,
441        }
442    }
443}
444
445pub fn token_descr(token: &Token) -> String {
446    let s = pprust::token_to_string(token).to_string();
447
448    match (TokenDescription::from_token(token), &token.kind) {
449        (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
450        (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
451        (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
452        (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
453        // Deliberately doesn't print `s`, which is empty.
454        (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
455        (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
456        (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
457        (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()),
458        (None, _) => format!("`{s}`"),
459    }
460}
461
462impl<'a> Parser<'a> {
463    pub fn new(
464        psess: &'a ParseSess,
465        stream: TokenStream,
466        subparser_name: Option<&'static str>,
467    ) -> Self {
468        let mut parser = Parser {
469            psess,
470            token: Token::dummy(),
471            token_spacing: Spacing::Alone,
472            prev_token: Token::dummy(),
473            capture_cfg: false,
474            restrictions: Restrictions::empty(),
475            expected_token_types: TokenTypeSet::new(),
476            token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
477            num_bump_calls: 0,
478            break_last_token: 0,
479            unmatched_angle_bracket_count: 0,
480            angle_bracket_nesting: 0,
481            last_unexpected_token_span: None,
482            subparser_name,
483            capture_state: CaptureState {
484                capturing: Capturing::No,
485                parser_replacements: Vec::new(),
486                inner_attr_parser_ranges: Default::default(),
487                seen_attrs: IntervalSet::new(u32::MAX as usize),
488            },
489            current_closure: None,
490            recovery: Recovery::Allowed,
491        };
492
493        // Make parser point to the first token.
494        parser.bump();
495
496        // Change this from 1 back to 0 after the bump. This eases debugging of
497        // `Parser::collect_tokens` because 0-indexed token positions are nicer
498        // than 1-indexed token positions.
499        parser.num_bump_calls = 0;
500
501        parser
502    }
503
504    #[inline]
505    pub fn recovery(mut self, recovery: Recovery) -> Self {
506        self.recovery = recovery;
507        self
508    }
509
510    /// Whether the parser is allowed to recover from broken code.
511    ///
512    /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
513    /// is not allowed. All recovery done by the parser must be gated behind this check.
514    ///
515    /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
516    /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
517    #[inline]
518    fn may_recover(&self) -> bool {
519        matches!(self.recovery, Recovery::Allowed)
520    }
521
522    /// Version of [`unexpected`](Parser::unexpected) that "returns" any type in the `Ok`
523    /// (both those functions never return "Ok", and so can lie like that in the type).
524    pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
525        match self.expect_one_of(&[], &[]) {
526            Err(e) => Err(e),
527            // We can get `Ok(true)` from `recover_closing_delimiter`
528            // which is called in `expected_one_of_not_found`.
529            Ok(_) => FatalError.raise(),
530        }
531    }
532
533    pub fn unexpected(&mut self) -> PResult<'a, ()> {
534        self.unexpected_any()
535    }
536
537    /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
538    pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
539        if self.expected_token_types.is_empty() {
540            if self.token == *exp.tok {
541                self.bump();
542                Ok(Recovered::No)
543            } else {
544                self.unexpected_try_recover(exp.tok)
545            }
546        } else {
547            self.expect_one_of(slice::from_ref(&exp), &[])
548        }
549    }
550
551    /// Expect next token to be edible or inedible token. If edible,
552    /// then consume it; if inedible, then return without consuming
553    /// anything. Signal a fatal error if next token is unexpected.
554    fn expect_one_of(
555        &mut self,
556        edible: &[ExpTokenPair<'_>],
557        inedible: &[ExpTokenPair<'_>],
558    ) -> PResult<'a, Recovered> {
559        if edible.iter().any(|exp| exp.tok == &self.token.kind) {
560            self.bump();
561            Ok(Recovered::No)
562        } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
563            // leave it in the input
564            Ok(Recovered::No)
565        } else if self.token != token::Eof
566            && self.last_unexpected_token_span == Some(self.token.span)
567        {
568            FatalError.raise();
569        } else {
570            self.expected_one_of_not_found(edible, inedible)
571                .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
572        }
573    }
574
575    // Public for rustfmt usage.
576    pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
577        self.parse_ident_common(true)
578    }
579
580    fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
581        let (ident, is_raw) = self.ident_or_err(recover)?;
582
583        if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
584            let err = self.expected_ident_found_err();
585            if recover {
586                err.emit();
587            } else {
588                return Err(err);
589            }
590        }
591        self.bump();
592        Ok(ident)
593    }
594
595    fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
596        match self.token.ident() {
597            Some(ident) => Ok(ident),
598            None => self.expected_ident_found(recover),
599        }
600    }
601
602    /// Checks if the next token is `tok`, and returns `true` if so.
603    ///
604    /// This method will automatically add `tok` to `expected_token_types` if `tok` is not
605    /// encountered.
606    #[inline]
607    fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
608        let is_present = self.token == *exp.tok;
609        if !is_present {
610            self.expected_token_types.insert(exp.token_type);
611        }
612        is_present
613    }
614
615    #[inline]
616    #[must_use]
617    fn check_noexpect(&self, tok: &TokenKind) -> bool {
618        self.token == *tok
619    }
620
621    // Check the first token after the delimiter that closes the current
622    // delimited sequence. (Panics if used in the outermost token stream, which
623    // has no delimiters.) It uses a clone of the relevant tree cursor to skip
624    // past the entire `TokenTree::Delimited` in a single step, avoiding the
625    // need for unbounded token lookahead.
626    //
627    // Primarily used when `self.token` matches
628    // `OpenDelim(Delimiter::Invisible(_))`, to look ahead through the current
629    // metavar expansion.
630    fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
631        let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
632        tree_cursor.bump();
633        matches!(
634            tree_cursor.curr(),
635            Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
636        )
637    }
638
639    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
640    ///
641    /// the main purpose of this function is to reduce the cluttering of the suggestions list
642    /// which using the normal eat method could introduce in some cases.
643    #[inline]
644    #[must_use]
645    fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
646        let is_present = self.check_noexpect(tok);
647        if is_present {
648            self.bump()
649        }
650        is_present
651    }
652
653    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
654    #[inline]
655    #[must_use]
656    pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
657        let is_present = self.check(exp);
658        if is_present {
659            self.bump()
660        }
661        is_present
662    }
663
664    /// If the next token is the given keyword, returns `true` without eating it.
665    /// An expectation is also added for diagnostics purposes.
666    #[inline]
667    #[must_use]
668    fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
669        let is_keyword = self.token.is_keyword(exp.kw);
670        if !is_keyword {
671            self.expected_token_types.insert(exp.token_type);
672        }
673        is_keyword
674    }
675
676    #[inline]
677    #[must_use]
678    fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
679        if self.check_keyword(exp) {
680            true
681        } else if case == Case::Insensitive
682            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
683            // Do an ASCII case-insensitive match, because all keywords are ASCII.
684            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
685        {
686            true
687        } else {
688            false
689        }
690    }
691
692    /// If the next token is the given keyword, eats it and returns `true`.
693    /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
694    // Public for rustc_builtin_macros and rustfmt usage.
695    #[inline]
696    #[must_use]
697    pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
698        let is_keyword = self.check_keyword(exp);
699        if is_keyword {
700            self.bump();
701        }
702        is_keyword
703    }
704
705    /// Eats a keyword, optionally ignoring the case.
706    /// If the case differs (and is ignored) an error is issued.
707    /// This is useful for recovery.
708    #[inline]
709    #[must_use]
710    fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
711        if self.eat_keyword(exp) {
712            true
713        } else if case == Case::Insensitive
714            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
715            // Do an ASCII case-insensitive match, because all keywords are ASCII.
716            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
717        {
718            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
719            self.bump();
720            true
721        } else {
722            false
723        }
724    }
725
726    /// If the next token is the given keyword, eats it and returns `true`.
727    /// Otherwise, returns `false`. No expectation is added.
728    // Public for rustc_builtin_macros usage.
729    #[inline]
730    #[must_use]
731    pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
732        let is_keyword = self.token.is_keyword(kw);
733        if is_keyword {
734            self.bump();
735        }
736        is_keyword
737    }
738
739    /// If the given word is not a keyword, signals an error.
740    /// If the next token is not the given word, signals an error.
741    /// Otherwise, eats it.
742    pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
743        if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
744    }
745
746    /// Consume a sequence produced by a metavar expansion, if present.
747    fn eat_metavar_seq<T>(
748        &mut self,
749        mv_kind: MetaVarKind,
750        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
751    ) -> Option<T> {
752        self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
753    }
754
755    /// A slightly more general form of `eat_metavar_seq`, for use with the
756    /// `MetaVarKind` variants that have parameters, where an exact match isn't
757    /// desired.
758    fn eat_metavar_seq_with_matcher<T>(
759        &mut self,
760        match_mv_kind: impl Fn(MetaVarKind) -> bool,
761        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
762    ) -> Option<T> {
763        if let token::OpenDelim(delim) = self.token.kind
764            && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim
765            && match_mv_kind(mv_kind)
766        {
767            self.bump();
768            let res = f(self).expect("failed to reparse {mv_kind:?}");
769            if let token::CloseDelim(delim) = self.token.kind
770                && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim
771                && match_mv_kind(mv_kind)
772            {
773                self.bump();
774                Some(res)
775            } else {
776                panic!("no close delim when reparsing {mv_kind:?}");
777            }
778        } else {
779            None
780        }
781    }
782
783    /// Is the given keyword `kw` followed by a non-reserved identifier?
784    fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
785        self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
786    }
787
788    #[inline]
789    fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
790        if !ok {
791            self.expected_token_types.insert(token_type);
792        }
793        ok
794    }
795
796    fn check_ident(&mut self) -> bool {
797        self.check_or_expected(self.token.is_ident(), TokenType::Ident)
798    }
799
800    fn check_path(&mut self) -> bool {
801        self.check_or_expected(self.token.is_path_start(), TokenType::Path)
802    }
803
804    fn check_type(&mut self) -> bool {
805        self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
806    }
807
808    fn check_const_arg(&mut self) -> bool {
809        self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
810    }
811
812    fn check_const_closure(&self) -> bool {
813        self.is_keyword_ahead(0, &[kw::Const])
814            && self.look_ahead(1, |t| match &t.kind {
815                // async closures do not work with const closures, so we do not parse that here.
816                token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
817                | token::OrOr
818                | token::Or => true,
819                _ => false,
820            })
821    }
822
823    fn check_inline_const(&self, dist: usize) -> bool {
824        self.is_keyword_ahead(dist, &[kw::Const])
825            && self.look_ahead(dist + 1, |t| match &t.kind {
826                token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)),
827                token::OpenDelim(Delimiter::Brace) => true,
828                _ => false,
829            })
830    }
831
832    /// Checks to see if the next token is either `+` or `+=`.
833    /// Otherwise returns `false`.
834    #[inline]
835    fn check_plus(&mut self) -> bool {
836        self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
837    }
838
839    /// Eats the expected token if it's present possibly breaking
840    /// compound tokens like multi-character operators in process.
841    /// Returns `true` if the token was eaten.
842    fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
843        if self.token == *exp.tok {
844            self.bump();
845            return true;
846        }
847        match self.token.kind.break_two_token_op(1) {
848            Some((first, second)) if first == *exp.tok => {
849                let first_span = self.psess.source_map().start_point(self.token.span);
850                let second_span = self.token.span.with_lo(first_span.hi());
851                self.token = Token::new(first, first_span);
852                // Keep track of this token - if we end token capturing now,
853                // we'll want to append this token to the captured stream.
854                //
855                // If we consume any additional tokens, then this token
856                // is not needed (we'll capture the entire 'glued' token),
857                // and `bump` will set this field to 0.
858                self.break_last_token += 1;
859                // Use the spacing of the glued token as the spacing of the
860                // unglued second token.
861                self.bump_with((Token::new(second, second_span), self.token_spacing));
862                true
863            }
864            _ => {
865                self.expected_token_types.insert(exp.token_type);
866                false
867            }
868        }
869    }
870
871    /// Eats `+` possibly breaking tokens like `+=` in process.
872    fn eat_plus(&mut self) -> bool {
873        self.break_and_eat(exp!(Plus))
874    }
875
876    /// Eats `&` possibly breaking tokens like `&&` in process.
877    /// Signals an error if `&` is not eaten.
878    fn expect_and(&mut self) -> PResult<'a, ()> {
879        if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
880    }
881
882    /// Eats `|` possibly breaking tokens like `||` in process.
883    /// Signals an error if `|` was not eaten.
884    fn expect_or(&mut self) -> PResult<'a, ()> {
885        if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
886    }
887
888    /// Eats `<` possibly breaking tokens like `<<` in process.
889    fn eat_lt(&mut self) -> bool {
890        let ate = self.break_and_eat(exp!(Lt));
891        if ate {
892            // See doc comment for `unmatched_angle_bracket_count`.
893            self.unmatched_angle_bracket_count += 1;
894            debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
895        }
896        ate
897    }
898
899    /// Eats `<` possibly breaking tokens like `<<` in process.
900    /// Signals an error if `<` was not eaten.
901    fn expect_lt(&mut self) -> PResult<'a, ()> {
902        if self.eat_lt() { Ok(()) } else { self.unexpected() }
903    }
904
905    /// Eats `>` possibly breaking tokens like `>>` in process.
906    /// Signals an error if `>` was not eaten.
907    fn expect_gt(&mut self) -> PResult<'a, ()> {
908        if self.break_and_eat(exp!(Gt)) {
909            // See doc comment for `unmatched_angle_bracket_count`.
910            if self.unmatched_angle_bracket_count > 0 {
911                self.unmatched_angle_bracket_count -= 1;
912                debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
913            }
914            Ok(())
915        } else {
916            self.unexpected()
917        }
918    }
919
920    /// Checks if the next token is contained within `closes`, and returns `true` if so.
921    fn expect_any_with_type(
922        &mut self,
923        closes_expected: &[ExpTokenPair<'_>],
924        closes_not_expected: &[&TokenKind],
925    ) -> bool {
926        closes_expected.iter().any(|&close| self.check(close))
927            || closes_not_expected.iter().any(|k| self.check_noexpect(k))
928    }
929
930    /// Parses a sequence until the specified delimiters. The function
931    /// `f` must consume tokens until reaching the next separator or
932    /// closing bracket.
933    fn parse_seq_to_before_tokens<T>(
934        &mut self,
935        closes_expected: &[ExpTokenPair<'_>],
936        closes_not_expected: &[&TokenKind],
937        sep: SeqSep<'_>,
938        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
939    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
940        let mut first = true;
941        let mut recovered = Recovered::No;
942        let mut trailing = Trailing::No;
943        let mut v = ThinVec::new();
944
945        while !self.expect_any_with_type(closes_expected, closes_not_expected) {
946            if let token::CloseDelim(..) | token::Eof = self.token.kind {
947                break;
948            }
949            if let Some(exp) = sep.sep {
950                if first {
951                    // no separator for the first element
952                    first = false;
953                } else {
954                    // check for separator
955                    match self.expect(exp) {
956                        Ok(Recovered::No) => {
957                            self.current_closure.take();
958                        }
959                        Ok(Recovered::Yes(guar)) => {
960                            self.current_closure.take();
961                            recovered = Recovered::Yes(guar);
962                            break;
963                        }
964                        Err(mut expect_err) => {
965                            let sp = self.prev_token.span.shrink_to_hi();
966                            let token_str = pprust::token_kind_to_string(exp.tok);
967
968                            match self.current_closure.take() {
969                                Some(closure_spans) if self.token == TokenKind::Semi => {
970                                    // Finding a semicolon instead of a comma
971                                    // after a closure body indicates that the
972                                    // closure body may be a block but the user
973                                    // forgot to put braces around its
974                                    // statements.
975
976                                    self.recover_missing_braces_around_closure_body(
977                                        closure_spans,
978                                        expect_err,
979                                    )?;
980
981                                    continue;
982                                }
983
984                                _ => {
985                                    // Attempt to keep parsing if it was a similar separator.
986                                    if exp.tok.similar_tokens().contains(&self.token.kind) {
987                                        self.bump();
988                                    }
989                                }
990                            }
991
992                            // If this was a missing `@` in a binding pattern
993                            // bail with a suggestion
994                            // https://github.com/rust-lang/rust/issues/72373
995                            if self.prev_token.is_ident() && self.token == token::DotDot {
996                                let msg = format!(
997                                    "if you meant to bind the contents of the rest of the array \
998                                     pattern into `{}`, use `@`",
999                                    pprust::token_to_string(&self.prev_token)
1000                                );
1001                                expect_err
1002                                    .with_span_suggestion_verbose(
1003                                        self.prev_token.span.shrink_to_hi().until(self.token.span),
1004                                        msg,
1005                                        " @ ",
1006                                        Applicability::MaybeIncorrect,
1007                                    )
1008                                    .emit();
1009                                break;
1010                            }
1011
1012                            // Attempt to keep parsing if it was an omitted separator.
1013                            self.last_unexpected_token_span = None;
1014                            match f(self) {
1015                                Ok(t) => {
1016                                    // Parsed successfully, therefore most probably the code only
1017                                    // misses a separator.
1018                                    expect_err
1019                                        .with_span_suggestion_short(
1020                                            sp,
1021                                            format!("missing `{token_str}`"),
1022                                            token_str,
1023                                            Applicability::MaybeIncorrect,
1024                                        )
1025                                        .emit();
1026
1027                                    v.push(t);
1028                                    continue;
1029                                }
1030                                Err(e) => {
1031                                    // Parsing failed, therefore it must be something more serious
1032                                    // than just a missing separator.
1033                                    for xx in &e.children {
1034                                        // Propagate the help message from sub error `e` to main
1035                                        // error `expect_err`.
1036                                        expect_err.children.push(xx.clone());
1037                                    }
1038                                    e.cancel();
1039                                    if self.token == token::Colon {
1040                                        // We will try to recover in
1041                                        // `maybe_recover_struct_lit_bad_delims`.
1042                                        return Err(expect_err);
1043                                    } else if let [exp] = closes_expected
1044                                        && exp.token_type == TokenType::CloseParen
1045                                    {
1046                                        return Err(expect_err);
1047                                    } else {
1048                                        expect_err.emit();
1049                                        break;
1050                                    }
1051                                }
1052                            }
1053                        }
1054                    }
1055                }
1056            }
1057            if sep.trailing_sep_allowed
1058                && self.expect_any_with_type(closes_expected, closes_not_expected)
1059            {
1060                trailing = Trailing::Yes;
1061                break;
1062            }
1063
1064            let t = f(self)?;
1065            v.push(t);
1066        }
1067
1068        Ok((v, trailing, recovered))
1069    }
1070
1071    fn recover_missing_braces_around_closure_body(
1072        &mut self,
1073        closure_spans: ClosureSpans,
1074        mut expect_err: Diag<'_>,
1075    ) -> PResult<'a, ()> {
1076        let initial_semicolon = self.token.span;
1077
1078        while self.eat(exp!(Semi)) {
1079            let _ = self
1080                .parse_stmt_without_recovery(false, ForceCollect::No, false)
1081                .unwrap_or_else(|e| {
1082                    e.cancel();
1083                    None
1084                });
1085        }
1086
1087        expect_err
1088            .primary_message("closure bodies that contain statements must be surrounded by braces");
1089
1090        let preceding_pipe_span = closure_spans.closing_pipe;
1091        let following_token_span = self.token.span;
1092
1093        let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1094        first_note.push_span_label(
1095            initial_semicolon,
1096            "this `;` turns the preceding closure into a statement",
1097        );
1098        first_note.push_span_label(
1099            closure_spans.body,
1100            "this expression is a statement because of the trailing semicolon",
1101        );
1102        expect_err.span_note(first_note, "statement found outside of a block");
1103
1104        let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1105        second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1106        second_note.push_span_label(
1107            following_token_span,
1108            "...but likely you meant the closure to end here",
1109        );
1110        expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1111
1112        expect_err.span(vec![preceding_pipe_span, following_token_span]);
1113
1114        let opening_suggestion_str = " {".to_string();
1115        let closing_suggestion_str = "}".to_string();
1116
1117        expect_err.multipart_suggestion(
1118            "try adding braces",
1119            vec![
1120                (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1121                (following_token_span.shrink_to_lo(), closing_suggestion_str),
1122            ],
1123            Applicability::MaybeIncorrect,
1124        );
1125
1126        expect_err.emit();
1127
1128        Ok(())
1129    }
1130
1131    /// Parses a sequence, not including the delimiters. The function
1132    /// `f` must consume tokens until reaching the next separator or
1133    /// closing bracket.
1134    fn parse_seq_to_before_end<T>(
1135        &mut self,
1136        close: ExpTokenPair<'_>,
1137        sep: SeqSep<'_>,
1138        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1139    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1140        self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1141    }
1142
1143    /// Parses a sequence, including only the closing delimiter. The function
1144    /// `f` must consume tokens until reaching the next separator or
1145    /// closing bracket.
1146    fn parse_seq_to_end<T>(
1147        &mut self,
1148        close: ExpTokenPair<'_>,
1149        sep: SeqSep<'_>,
1150        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1151    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1152        let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1153        if matches!(recovered, Recovered::No) && !self.eat(close) {
1154            self.dcx().span_delayed_bug(
1155                self.token.span,
1156                "recovered but `parse_seq_to_before_end` did not give us the close token",
1157            );
1158        }
1159        Ok((val, trailing))
1160    }
1161
1162    /// Parses a sequence, including both delimiters. The function
1163    /// `f` must consume tokens until reaching the next separator or
1164    /// closing bracket.
1165    fn parse_unspanned_seq<T>(
1166        &mut self,
1167        open: ExpTokenPair<'_>,
1168        close: ExpTokenPair<'_>,
1169        sep: SeqSep<'_>,
1170        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1171    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1172        self.expect(open)?;
1173        self.parse_seq_to_end(close, sep, f)
1174    }
1175
1176    /// Parses a comma-separated sequence, including both delimiters.
1177    /// The function `f` must consume tokens until reaching the next separator or
1178    /// closing bracket.
1179    fn parse_delim_comma_seq<T>(
1180        &mut self,
1181        open: ExpTokenPair<'_>,
1182        close: ExpTokenPair<'_>,
1183        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1184    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1185        self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1186    }
1187
1188    /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
1189    /// The function `f` must consume tokens until reaching the next separator or
1190    /// closing bracket.
1191    fn parse_paren_comma_seq<T>(
1192        &mut self,
1193        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1194    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1195        self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1196    }
1197
1198    /// Advance the parser by one token using provided token as the next one.
1199    fn bump_with(&mut self, next: (Token, Spacing)) {
1200        self.inlined_bump_with(next)
1201    }
1202
1203    /// This always-inlined version should only be used on hot code paths.
1204    #[inline(always)]
1205    fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1206        // Update the current and previous tokens.
1207        self.prev_token = mem::replace(&mut self.token, next_token);
1208        self.token_spacing = next_spacing;
1209
1210        // Diagnostics.
1211        self.expected_token_types.clear();
1212    }
1213
1214    /// Advance the parser by one token.
1215    pub fn bump(&mut self) {
1216        // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
1217        // than `.0`/`.1` access.
1218        let mut next = self.token_cursor.inlined_next();
1219        self.num_bump_calls += 1;
1220        // We got a token from the underlying cursor and no longer need to
1221        // worry about an unglued token. See `break_and_eat` for more details.
1222        self.break_last_token = 0;
1223        if next.0.span.is_dummy() {
1224            // Tweak the location for better diagnostics, but keep syntactic context intact.
1225            let fallback_span = self.token.span;
1226            next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1227        }
1228        debug_assert!(!matches!(
1229            next.0.kind,
1230            token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
1231        ));
1232        self.inlined_bump_with(next)
1233    }
1234
1235    /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
1236    /// When `dist == 0` then the current token is looked at. `Eof` will be
1237    /// returned if the look-ahead is any distance past the end of the tokens.
1238    pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1239        if dist == 0 {
1240            return looker(&self.token);
1241        }
1242
1243        // Typically around 98% of the `dist > 0` cases have `dist == 1`, so we
1244        // have a fast special case for that.
1245        if dist == 1 {
1246            // The index is zero because the tree cursor's index always points
1247            // to the next token to be gotten.
1248            match self.token_cursor.curr.curr() {
1249                Some(tree) => {
1250                    // Indexing stayed within the current token tree.
1251                    match tree {
1252                        TokenTree::Token(token, _) => return looker(token),
1253                        &TokenTree::Delimited(dspan, _, delim, _) => {
1254                            if !delim.skip() {
1255                                return looker(&Token::new(token::OpenDelim(delim), dspan.open));
1256                            }
1257                        }
1258                    }
1259                }
1260                None => {
1261                    // The tree cursor lookahead went (one) past the end of the
1262                    // current token tree. Try to return a close delimiter.
1263                    if let Some(last) = self.token_cursor.stack.last()
1264                        && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1265                        && !delim.skip()
1266                    {
1267                        // We are not in the outermost token stream, so we have
1268                        // delimiters. Also, those delimiters are not skipped.
1269                        return looker(&Token::new(token::CloseDelim(delim), span.close));
1270                    }
1271                }
1272            }
1273        }
1274
1275        // Just clone the token cursor and use `next`, skipping delimiters as
1276        // necessary. Slow but simple.
1277        let mut cursor = self.token_cursor.clone();
1278        let mut i = 0;
1279        let mut token = Token::dummy();
1280        while i < dist {
1281            token = cursor.next().0;
1282            if matches!(
1283                token.kind,
1284                token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
1285            ) {
1286                continue;
1287            }
1288            i += 1;
1289        }
1290        looker(&token)
1291    }
1292
1293    /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
1294    pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1295        self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1296    }
1297
1298    /// Parses asyncness: `async` or nothing.
1299    fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1300        let span = self.token.uninterpolated_span();
1301        if self.eat_keyword_case(exp!(Async), case) {
1302            // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
1303            // error if edition <= 2024, like we do with async and edition <= 2018?
1304            if self.token.uninterpolated_span().at_least_rust_2024()
1305                && self.eat_keyword_case(exp!(Gen), case)
1306            {
1307                let gen_span = self.prev_token.uninterpolated_span();
1308                Some(CoroutineKind::AsyncGen {
1309                    span: span.to(gen_span),
1310                    closure_id: DUMMY_NODE_ID,
1311                    return_impl_trait_id: DUMMY_NODE_ID,
1312                })
1313            } else {
1314                Some(CoroutineKind::Async {
1315                    span,
1316                    closure_id: DUMMY_NODE_ID,
1317                    return_impl_trait_id: DUMMY_NODE_ID,
1318                })
1319            }
1320        } else if self.token.uninterpolated_span().at_least_rust_2024()
1321            && self.eat_keyword_case(exp!(Gen), case)
1322        {
1323            Some(CoroutineKind::Gen {
1324                span,
1325                closure_id: DUMMY_NODE_ID,
1326                return_impl_trait_id: DUMMY_NODE_ID,
1327            })
1328        } else {
1329            None
1330        }
1331    }
1332
1333    /// Parses fn unsafety: `unsafe`, `safe` or nothing.
1334    fn parse_safety(&mut self, case: Case) -> Safety {
1335        if self.eat_keyword_case(exp!(Unsafe), case) {
1336            Safety::Unsafe(self.prev_token.uninterpolated_span())
1337        } else if self.eat_keyword_case(exp!(Safe), case) {
1338            Safety::Safe(self.prev_token.uninterpolated_span())
1339        } else {
1340            Safety::Default
1341        }
1342    }
1343
1344    /// Parses constness: `const` or nothing.
1345    fn parse_constness(&mut self, case: Case) -> Const {
1346        self.parse_constness_(case, false)
1347    }
1348
1349    /// Parses constness for closures (case sensitive, feature-gated)
1350    fn parse_closure_constness(&mut self) -> Const {
1351        let constness = self.parse_constness_(Case::Sensitive, true);
1352        if let Const::Yes(span) = constness {
1353            self.psess.gated_spans.gate(sym::const_closures, span);
1354        }
1355        constness
1356    }
1357
1358    fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1359        // Avoid const blocks and const closures to be parsed as const items
1360        if (self.check_const_closure() == is_closure)
1361            && !self
1362                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
1363            && self.eat_keyword_case(exp!(Const), case)
1364        {
1365            Const::Yes(self.prev_token.uninterpolated_span())
1366        } else {
1367            Const::No
1368        }
1369    }
1370
1371    /// Parses inline const expressions.
1372    fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1373        if pat {
1374            self.psess.gated_spans.gate(sym::inline_const_pat, span);
1375        }
1376        self.expect_keyword(exp!(Const))?;
1377        let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
1378        let anon_const = AnonConst {
1379            id: DUMMY_NODE_ID,
1380            value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1381        };
1382        let blk_span = anon_const.value.span;
1383        Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
1384    }
1385
1386    /// Parses mutability (`mut` or nothing).
1387    fn parse_mutability(&mut self) -> Mutability {
1388        if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1389    }
1390
1391    /// Parses reference binding mode (`ref`, `ref mut`, or nothing).
1392    fn parse_byref(&mut self) -> ByRef {
1393        if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
1394    }
1395
1396    /// Possibly parses mutability (`const` or `mut`).
1397    fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1398        if self.eat_keyword(exp!(Mut)) {
1399            Some(Mutability::Mut)
1400        } else if self.eat_keyword(exp!(Const)) {
1401            Some(Mutability::Not)
1402        } else {
1403            None
1404        }
1405    }
1406
1407    fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1408        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1409        {
1410            if let Some(suffix) = suffix {
1411                self.expect_no_tuple_index_suffix(self.token.span, suffix);
1412            }
1413            self.bump();
1414            Ok(Ident::new(symbol, self.prev_token.span))
1415        } else {
1416            self.parse_ident_common(true)
1417        }
1418    }
1419
1420    fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
1421        if let Some(args) = self.parse_delim_args_inner() {
1422            Ok(P(args))
1423        } else {
1424            self.unexpected_any()
1425        }
1426    }
1427
1428    fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1429        Ok(if let Some(args) = self.parse_delim_args_inner() {
1430            AttrArgs::Delimited(args)
1431        } else if self.eat(exp!(Eq)) {
1432            let eq_span = self.prev_token.span;
1433            AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
1434        } else {
1435            AttrArgs::Empty
1436        })
1437    }
1438
1439    fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1440        let delimited = self.check(exp!(OpenParen))
1441            || self.check(exp!(OpenBracket))
1442            || self.check(exp!(OpenBrace));
1443
1444        delimited.then(|| {
1445            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1446                unreachable!()
1447            };
1448            DelimArgs { dspan, delim, tokens }
1449        })
1450    }
1451
1452    /// Parses a single token tree from the input.
1453    pub fn parse_token_tree(&mut self) -> TokenTree {
1454        match self.token.kind {
1455            token::OpenDelim(..) => {
1456                // Clone the `TokenTree::Delimited` that we are currently
1457                // within. That's what we are going to return.
1458                let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1459                debug_assert_matches!(tree, TokenTree::Delimited(..));
1460
1461                // Advance the token cursor through the entire delimited
1462                // sequence. After getting the `OpenDelim` we are *within* the
1463                // delimited sequence, i.e. at depth `d`. After getting the
1464                // matching `CloseDelim` we are *after* the delimited sequence,
1465                // i.e. at depth `d - 1`.
1466                let target_depth = self.token_cursor.stack.len() - 1;
1467                loop {
1468                    // Advance one token at a time, so `TokenCursor::next()`
1469                    // can capture these tokens if necessary.
1470                    self.bump();
1471                    if self.token_cursor.stack.len() == target_depth {
1472                        debug_assert_matches!(self.token.kind, token::CloseDelim(_));
1473                        break;
1474                    }
1475                }
1476
1477                // Consume close delimiter
1478                self.bump();
1479                tree
1480            }
1481            token::CloseDelim(_) | token::Eof => unreachable!(),
1482            _ => {
1483                let prev_spacing = self.token_spacing;
1484                self.bump();
1485                TokenTree::Token(self.prev_token.clone(), prev_spacing)
1486            }
1487        }
1488    }
1489
1490    pub fn parse_tokens(&mut self) -> TokenStream {
1491        let mut result = Vec::new();
1492        loop {
1493            match self.token.kind {
1494                token::Eof | token::CloseDelim(..) => break,
1495                _ => result.push(self.parse_token_tree()),
1496            }
1497        }
1498        TokenStream::new(result)
1499    }
1500
1501    /// Evaluates the closure with restrictions in place.
1502    ///
1503    /// Afters the closure is evaluated, restrictions are reset.
1504    fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1505        let old = self.restrictions;
1506        self.restrictions = res;
1507        let res = f(self);
1508        self.restrictions = old;
1509        res
1510    }
1511
1512    /// Parses `pub` and `pub(in path)` plus shortcuts `pub(crate)` for `pub(in crate)`, `pub(self)`
1513    /// for `pub(in self)` and `pub(super)` for `pub(in super)`.
1514    /// If the following element can't be a tuple (i.e., it's a function definition), then
1515    /// it's not a tuple struct field), and the contents within the parentheses aren't valid,
1516    /// so emit a proper diagnostic.
1517    // Public for rustfmt usage.
1518    pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1519        if let Some(vis) = self
1520            .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
1521        {
1522            return Ok(vis);
1523        }
1524
1525        if !self.eat_keyword(exp!(Pub)) {
1526            // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1527            // keyword to grab a span from for inherited visibility; an empty span at the
1528            // beginning of the current token would seem to be the "Schelling span".
1529            return Ok(Visibility {
1530                span: self.token.span.shrink_to_lo(),
1531                kind: VisibilityKind::Inherited,
1532                tokens: None,
1533            });
1534        }
1535        let lo = self.prev_token.span;
1536
1537        if self.check(exp!(OpenParen)) {
1538            // We don't `self.bump()` the `(` yet because this might be a struct definition where
1539            // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1540            // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1541            // by the following tokens.
1542            if self.is_keyword_ahead(1, &[kw::In]) {
1543                // Parse `pub(in path)`.
1544                self.bump(); // `(`
1545                self.bump(); // `in`
1546                let path = self.parse_path(PathStyle::Mod)?; // `path`
1547                self.expect(exp!(CloseParen))?; // `)`
1548                let vis = VisibilityKind::Restricted {
1549                    path: P(path),
1550                    id: ast::DUMMY_NODE_ID,
1551                    shorthand: false,
1552                };
1553                return Ok(Visibility {
1554                    span: lo.to(self.prev_token.span),
1555                    kind: vis,
1556                    tokens: None,
1557                });
1558            } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
1559                && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1560            {
1561                // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
1562                self.bump(); // `(`
1563                let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
1564                self.expect(exp!(CloseParen))?; // `)`
1565                let vis = VisibilityKind::Restricted {
1566                    path: P(path),
1567                    id: ast::DUMMY_NODE_ID,
1568                    shorthand: true,
1569                };
1570                return Ok(Visibility {
1571                    span: lo.to(self.prev_token.span),
1572                    kind: vis,
1573                    tokens: None,
1574                });
1575            } else if let FollowedByType::No = fbt {
1576                // Provide this diagnostic if a type cannot follow;
1577                // in particular, if this is not a tuple struct.
1578                self.recover_incorrect_vis_restriction()?;
1579                // Emit diagnostic, but continue with public visibility.
1580            }
1581        }
1582
1583        Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1584    }
1585
1586    /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1587    fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1588        self.bump(); // `(`
1589        let path = self.parse_path(PathStyle::Mod)?;
1590        self.expect(exp!(CloseParen))?; // `)`
1591
1592        let path_str = pprust::path_to_string(&path);
1593        self.dcx()
1594            .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1595
1596        Ok(())
1597    }
1598
1599    /// Parses `extern string_literal?`.
1600    fn parse_extern(&mut self, case: Case) -> Extern {
1601        if self.eat_keyword_case(exp!(Extern), case) {
1602            let mut extern_span = self.prev_token.span;
1603            let abi = self.parse_abi();
1604            if let Some(abi) = abi {
1605                extern_span = extern_span.to(abi.span);
1606            }
1607            Extern::from_abi(abi, extern_span)
1608        } else {
1609            Extern::None
1610        }
1611    }
1612
1613    /// Parses a string literal as an ABI spec.
1614    fn parse_abi(&mut self) -> Option<StrLit> {
1615        match self.parse_str_lit() {
1616            Ok(str_lit) => Some(str_lit),
1617            Err(Some(lit)) => match lit.kind {
1618                ast::LitKind::Err(_) => None,
1619                _ => {
1620                    self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1621                    None
1622                }
1623            },
1624            Err(None) => None,
1625        }
1626    }
1627
1628    fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1629        &mut self,
1630        f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1631    ) -> PResult<'a, R> {
1632        // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1633        // `ForceCollect::Yes`
1634        self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1635            Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1636        })
1637    }
1638
1639    /// Checks for `::` or, potentially, `:::` and then look ahead after it.
1640    fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1641        if self.check(exp!(PathSep)) {
1642            if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1643                debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1644                self.look_ahead(2, looker)
1645            } else {
1646                self.look_ahead(1, looker)
1647            }
1648        } else {
1649            false
1650        }
1651    }
1652
1653    /// `::{` or `::*`
1654    fn is_import_coupler(&mut self) -> bool {
1655        self.check_path_sep_and_look_ahead(|t| {
1656            matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::Star)
1657        })
1658    }
1659
1660    // Debug view of the parser's token stream, up to `{lookahead}` tokens.
1661    // Only used when debugging.
1662    #[allow(unused)]
1663    pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
1664        fmt::from_fn(move |f| {
1665            let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of
1666
1667            // we don't need N spans, but we want at least one, so print all of prev_token
1668            dbg_fmt.field("prev_token", &self.prev_token);
1669            let mut tokens = vec![];
1670            for i in 0..lookahead {
1671                let tok = self.look_ahead(i, |tok| tok.kind.clone());
1672                let is_eof = tok == TokenKind::Eof;
1673                tokens.push(tok);
1674                if is_eof {
1675                    // Don't look ahead past EOF.
1676                    break;
1677                }
1678            }
1679            dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1680            dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1681
1682            // some fields are interesting for certain values, as they relate to macro parsing
1683            if let Some(subparser) = self.subparser_name {
1684                dbg_fmt.field("subparser_name", &subparser);
1685            }
1686            if let Recovery::Forbidden = self.recovery {
1687                dbg_fmt.field("recovery", &self.recovery);
1688            }
1689
1690            // imply there's "more to know" than this view
1691            dbg_fmt.finish_non_exhaustive()
1692        })
1693    }
1694
1695    pub fn clear_expected_token_types(&mut self) {
1696        self.expected_token_types.clear();
1697    }
1698
1699    pub fn approx_token_stream_pos(&self) -> u32 {
1700        self.num_bump_calls
1701    }
1702}
1703
1704pub(crate) fn make_unclosed_delims_error(
1705    unmatched: UnmatchedDelim,
1706    psess: &ParseSess,
1707) -> Option<Diag<'_>> {
1708    // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
1709    // `unmatched_delims` only for error recovery in the `Parser`.
1710    let found_delim = unmatched.found_delim?;
1711    let mut spans = vec![unmatched.found_span];
1712    if let Some(sp) = unmatched.unclosed_span {
1713        spans.push(sp);
1714    };
1715    let err = psess.dcx().create_err(MismatchedClosingDelimiter {
1716        spans,
1717        delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
1718        unmatched: unmatched.found_span,
1719        opening_candidate: unmatched.candidate_span,
1720        unclosed: unmatched.unclosed_span,
1721    });
1722    Some(err)
1723}
1724
1725/// A helper struct used when building an `AttrTokenStream` from
1726/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
1727/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
1728/// is then 'parsed' to build up an `AttrTokenStream` with nested
1729/// `AttrTokenTree::Delimited` tokens.
1730#[derive(Debug, Clone)]
1731enum FlatToken {
1732    /// A token - this holds both delimiter (e.g. '{' and '}')
1733    /// and non-delimiter tokens
1734    Token((Token, Spacing)),
1735    /// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
1736    /// directly into the constructed `AttrTokenStream` as an
1737    /// `AttrTokenTree::AttrsTarget`.
1738    AttrsTarget(AttrsTarget),
1739    /// A special 'empty' token that is ignored during the conversion
1740    /// to an `AttrTokenStream`. This is used to simplify the
1741    /// handling of replace ranges.
1742    Empty,
1743}
1744
1745// Metavar captures of various kinds.
1746#[derive(Clone, Debug)]
1747pub enum ParseNtResult {
1748    Tt(TokenTree),
1749    Ident(Ident, IdentIsRaw),
1750    Lifetime(Ident, IdentIsRaw),
1751    Item(P<ast::Item>),
1752    Stmt(P<ast::Stmt>),
1753    Pat(P<ast::Pat>, NtPatKind),
1754    Ty(P<ast::Ty>),
1755    Meta(P<ast::AttrItem>),
1756    Path(P<ast::Path>),
1757    Vis(P<ast::Visibility>),
1758
1759    /// This variant will eventually be removed, along with `Token::Interpolate`.
1760    Nt(Arc<Nonterminal>),
1761}