rustc_parse/parser/
attr_wrapper.rs

1use std::borrow::Cow;
2use std::{iter, mem};
3
4use rustc_ast::token::{Delimiter, Token, TokenKind};
5use rustc_ast::tokenstream::{
6    AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing, DelimSpan, LazyAttrTokenStream,
7    Spacing, ToAttrTokenStream,
8};
9use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, HasTokens};
10use rustc_data_structures::fx::FxHashSet;
11use rustc_errors::PResult;
12use rustc_session::parse::ParseSess;
13use rustc_span::{DUMMY_SP, Span, sym};
14
15use super::{
16    Capturing, FlatToken, ForceCollect, NodeRange, NodeReplacement, Parser, ParserRange,
17    TokenCursor, Trailing,
18};
19
20// When collecting tokens, this fully captures the start point. Usually its
21// just after outer attributes, but occasionally it's before.
22#[derive(Clone, Debug)]
23pub(super) struct CollectPos {
24    start_token: (Token, Spacing),
25    cursor_snapshot: TokenCursor,
26    start_pos: u32,
27}
28
29pub(super) enum UsePreAttrPos {
30    No,
31    Yes,
32}
33
34/// A wrapper type to ensure that the parser handles outer attributes correctly.
35/// When we parse outer attributes, we need to ensure that we capture tokens
36/// for the attribute target. This allows us to perform cfg-expansion on
37/// a token stream before we invoke a derive proc-macro.
38///
39/// This wrapper prevents direct access to the underlying `ast::AttrVec`.
40/// Parsing code can only get access to the underlying attributes
41/// by passing an `AttrWrapper` to `collect_tokens`.
42/// This makes it difficult to accidentally construct an AST node
43/// (which stores an `ast::AttrVec`) without first collecting tokens.
44///
45/// This struct has its own module, to ensure that the parser code
46/// cannot directly access the `attrs` field.
47#[derive(Debug, Clone)]
48pub(super) struct AttrWrapper {
49    attrs: AttrVec,
50    // The start of the outer attributes in the parser's token stream.
51    // This lets us create a `NodeReplacement` for the entire attribute
52    // target, including outer attributes. `None` if there are no outer
53    // attributes.
54    start_pos: Option<u32>,
55}
56
57impl AttrWrapper {
58    pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
59        AttrWrapper { attrs, start_pos: Some(start_pos) }
60    }
61
62    pub(super) fn empty() -> AttrWrapper {
63        AttrWrapper { attrs: AttrVec::new(), start_pos: None }
64    }
65
66    pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
67        psess.dcx().span_delayed_bug(
68            self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
69            "AttrVec is taken for recovery but no error is produced",
70        );
71
72        self.attrs
73    }
74
75    /// Prepend `self.attrs` to `attrs`.
76    // FIXME: require passing an NT to prevent misuse of this method
77    pub(super) fn prepend_to_nt_inner(mut self, attrs: &mut AttrVec) {
78        mem::swap(attrs, &mut self.attrs);
79        attrs.extend(self.attrs);
80    }
81
82    pub(super) fn is_empty(&self) -> bool {
83        self.attrs.is_empty()
84    }
85}
86
87/// Returns `true` if `attrs` contains a `cfg` or `cfg_attr` attribute
88fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
89    // NOTE: Builtin attributes like `cfg` and `cfg_attr` cannot be renamed via imports.
90    // Therefore, the absence of a literal `cfg` or `cfg_attr` guarantees that
91    // we don't need to do any eager expansion.
92    attrs.iter().any(|attr| {
93        attr.ident().is_some_and(|ident| ident.name == sym::cfg || ident.name == sym::cfg_attr)
94    })
95}
96
97// From a value of this type we can reconstruct the `TokenStream` seen by the
98// `f` callback passed to a call to `Parser::collect_tokens`, by
99// replaying the getting of the tokens. This saves us producing a `TokenStream`
100// if it is never needed, e.g. a captured `macro_rules!` argument that is never
101// passed to a proc macro. In practice, token stream creation happens rarely
102// compared to calls to `collect_tokens` (see some statistics in #78736) so we
103// are doing as little up-front work as possible.
104//
105// This also makes `Parser` very cheap to clone, since
106// there is no intermediate collection buffer to clone.
107struct LazyAttrTokenStreamImpl {
108    start_token: (Token, Spacing),
109    cursor_snapshot: TokenCursor,
110    num_calls: u32,
111    break_last_token: u32,
112    node_replacements: Box<[NodeReplacement]>,
113}
114
115impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
116    fn to_attr_token_stream(&self) -> AttrTokenStream {
117        // The token produced by the final call to `{,inlined_}next` was not
118        // actually consumed by the callback. The combination of chaining the
119        // initial token and using `take` produces the desired result - we
120        // produce an empty `TokenStream` if no calls were made, and omit the
121        // final token otherwise.
122        let mut cursor_snapshot = self.cursor_snapshot.clone();
123        let tokens = iter::once(FlatToken::Token(self.start_token.clone()))
124            .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
125            .take(self.num_calls as usize);
126
127        if self.node_replacements.is_empty() {
128            make_attr_token_stream(tokens, self.break_last_token)
129        } else {
130            let mut tokens: Vec<_> = tokens.collect();
131            let mut node_replacements = self.node_replacements.to_vec();
132            node_replacements.sort_by_key(|(range, _)| range.0.start);
133
134            #[cfg(debug_assertions)]
135            for [(node_range, tokens), (next_node_range, next_tokens)] in
136                node_replacements.array_windows()
137            {
138                assert!(
139                    node_range.0.end <= next_node_range.0.start
140                        || node_range.0.end >= next_node_range.0.end,
141                    "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
142                    node_range,
143                    tokens,
144                    next_node_range,
145                    next_tokens,
146                );
147            }
148
149            // Process the replace ranges, starting from the highest start
150            // position and working our way back. If have tokens like:
151            //
152            // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
153            //
154            // Then we will generate replace ranges for both
155            // the `#[cfg(FALSE)] field: bool` and the entire
156            // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
157            //
158            // By starting processing from the replace range with the greatest
159            // start position, we ensure that any (outer) replace range which
160            // encloses another (inner) replace range will fully overwrite the
161            // inner range's replacement.
162            for (node_range, target) in node_replacements.into_iter().rev() {
163                assert!(
164                    !node_range.0.is_empty(),
165                    "Cannot replace an empty node range: {:?}",
166                    node_range.0
167                );
168
169                // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
170                // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
171                // total length of `tokens` constant throughout the replacement process, allowing
172                // us to do all replacements without adjusting indices.
173                let target_len = target.is_some() as usize;
174                tokens.splice(
175                    (node_range.0.start as usize)..(node_range.0.end as usize),
176                    target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
177                        iter::repeat(FlatToken::Empty).take(node_range.0.len() - target_len),
178                    ),
179                );
180            }
181            make_attr_token_stream(tokens.into_iter(), self.break_last_token)
182        }
183    }
184}
185
186impl<'a> Parser<'a> {
187    pub(super) fn collect_pos(&self) -> CollectPos {
188        CollectPos {
189            start_token: (self.token.clone(), self.token_spacing),
190            cursor_snapshot: self.token_cursor.clone(),
191            start_pos: self.num_bump_calls,
192        }
193    }
194
195    /// Parses code with `f`. If appropriate, it records the tokens (in
196    /// `LazyAttrTokenStream` form) that were parsed in the result, accessible
197    /// via the `HasTokens` trait. The `Trailing` part of the callback's
198    /// result indicates if an extra token should be captured, e.g. a comma or
199    /// semicolon. The `UsePreAttrPos` part of the callback's result indicates
200    /// if we should use `pre_attr_pos` as the collection start position (only
201    /// required in a few cases).
202    ///
203    /// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The
204    /// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for
205    /// details.
206    ///
207    /// `pre_attr_pos` is the position before the outer attributes (or the node
208    /// itself, if no outer attributes are present). It is only needed if `f`
209    /// can return `UsePreAttrPos::Yes`.
210    ///
211    /// Note: If your callback consumes an opening delimiter (including the
212    /// case where `self.token` is an opening delimiter on entry to this
213    /// function), you must also consume the corresponding closing delimiter.
214    /// E.g. you can consume `something ([{ }])` or `([{}])`, but not `([{}]`.
215    /// This restriction isn't a problem in practice, because parsed AST items
216    /// always have matching delimiters.
217    ///
218    /// The following example code will be used to explain things in comments
219    /// below. It has an outer attribute and an inner attribute. Parsing it
220    /// involves two calls to this method, one of which is indirectly
221    /// recursive.
222    /// ```ignore (fake attributes)
223    /// #[cfg_eval]                         // token pos
224    /// mod m {                             //   0.. 3
225    ///     #[cfg_attr(cond1, attr1)]       //   3..12
226    ///     fn g() {                        //  12..17
227    ///         #![cfg_attr(cond2, attr2)]  //  17..27
228    ///         let _x = 3;                 //  27..32
229    ///     }                               //  32..33
230    /// }                                   //  33..34
231    /// ```
232    pub(super) fn collect_tokens<R: HasAttrs + HasTokens>(
233        &mut self,
234        pre_attr_pos: Option<CollectPos>,
235        attrs: AttrWrapper,
236        force_collect: ForceCollect,
237        f: impl FnOnce(&mut Self, AttrVec) -> PResult<'a, (R, Trailing, UsePreAttrPos)>,
238    ) -> PResult<'a, R> {
239        let possible_capture_mode = self.capture_cfg;
240
241        // We must collect if anything could observe the collected tokens, i.e.
242        // if any of the following conditions hold.
243        // - We are force collecting tokens (because force collection requires
244        //   tokens by definition).
245        let needs_collection = matches!(force_collect, ForceCollect::Yes)
246            // - Any of our outer attributes require tokens.
247            || needs_tokens(&attrs.attrs)
248            // - Our target supports custom inner attributes (custom
249            //   inner attribute invocation might require token capturing).
250            || R::SUPPORTS_CUSTOM_INNER_ATTRS
251            // - We are in "possible capture mode" (which requires tokens if
252            //   the parsed node has `#[cfg]` or `#[cfg_attr]` attributes).
253            || possible_capture_mode;
254        if !needs_collection {
255            return Ok(f(self, attrs.attrs)?.0);
256        }
257
258        let mut collect_pos = self.collect_pos();
259        let has_outer_attrs = !attrs.attrs.is_empty();
260        let parser_replacements_start = self.capture_state.parser_replacements.len();
261
262        // We set and restore `Capturing::Yes` on either side of the call to
263        // `f`, so we can distinguish the outermost call to `collect_tokens`
264        // (e.g. parsing `m` in the example above) from any inner (indirectly
265        // recursive) calls (e.g. parsing `g` in the example above). This
266        // distinction is used below and in `Parser::parse_inner_attributes`.
267        let (mut ret, capture_trailing, use_pre_attr_pos) = {
268            let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
269            let res = f(self, attrs.attrs);
270            self.capture_state.capturing = prev_capturing;
271            res?
272        };
273
274        // - `None`: Our target doesn't support tokens at all (e.g. `NtIdent`).
275        // - `Some(None)`: Our target supports tokens and has none.
276        // - `Some(Some(_))`: Our target already has tokens set (e.g. we've
277        //   parsed something like `#[my_attr] $item`).
278        let ret_can_hold_tokens = matches!(ret.tokens_mut(), Some(None));
279
280        // Ignore any attributes we've previously processed. This happens when
281        // an inner call to `collect_tokens` returns an AST node and then an
282        // outer call ends up with the same AST node without any additional
283        // wrapping layer.
284        let mut seen_indices = FxHashSet::default();
285        for (i, attr) in ret.attrs().iter().enumerate() {
286            let is_unseen = self.capture_state.seen_attrs.insert(attr.id);
287            if !is_unseen {
288                seen_indices.insert(i);
289            }
290        }
291        let ret_attrs: Cow<'_, [Attribute]> =
292            if seen_indices.is_empty() {
293                Cow::Borrowed(ret.attrs())
294            } else {
295                let ret_attrs =
296                    ret.attrs()
297                        .iter()
298                        .enumerate()
299                        .filter_map(|(i, attr)| {
300                            if seen_indices.contains(&i) { None } else { Some(attr.clone()) }
301                        })
302                        .collect();
303                Cow::Owned(ret_attrs)
304            };
305
306        // When we're not in "definite capture mode", then skip collecting and
307        // return early if `ret` doesn't support tokens or already has some.
308        //
309        // Note that this check is independent of `force_collect`. There's no
310        // need to collect tokens when we don't support tokens or already have
311        // tokens.
312        let definite_capture_mode = self.capture_cfg
313            && matches!(self.capture_state.capturing, Capturing::Yes)
314            && has_cfg_or_cfg_attr(&ret_attrs);
315        if !definite_capture_mode && !ret_can_hold_tokens {
316            return Ok(ret);
317        }
318
319        // This is similar to the `needs_collection` check at the start of this
320        // function, but now that we've parsed an AST node we have complete
321        // information available. (If we return early here that means the
322        // setup, such as cloning the token cursor, was unnecessary. That's
323        // hard to avoid.)
324        //
325        // We must collect if anything could observe the collected tokens, i.e.
326        // if any of the following conditions hold.
327        // - We are force collecting tokens.
328        let needs_collection = matches!(force_collect, ForceCollect::Yes)
329            // - Any of our outer *or* inner attributes require tokens.
330            //   (`attr.attrs` was just outer attributes, but `ret.attrs()` is
331            //   outer and inner attributes. So this check is more precise than
332            //   the earlier `needs_tokens` check, and we don't need to
333            //   check `R::SUPPORTS_CUSTOM_INNER_ATTRS`.)
334            || needs_tokens(&ret_attrs)
335            // - We are in "definite capture mode", which requires that there
336            //   are `#[cfg]` or `#[cfg_attr]` attributes. (During normal
337            //   non-`capture_cfg` parsing, we don't need any special capturing
338            //   for those attributes, because they're builtin.)
339            || definite_capture_mode;
340        if !needs_collection {
341            return Ok(ret);
342        }
343
344        // Replace the post-attribute collection start position with the
345        // pre-attribute position supplied, if `f` indicated it is necessary.
346        // (The caller is responsible for providing a non-`None` `pre_attr_pos`
347        // if this is a possibility.)
348        if matches!(use_pre_attr_pos, UsePreAttrPos::Yes) {
349            collect_pos = pre_attr_pos.unwrap();
350        }
351
352        let parser_replacements_end = self.capture_state.parser_replacements.len();
353
354        assert!(
355            !(self.break_last_token > 0 && matches!(capture_trailing, Trailing::Yes)),
356            "Cannot have break_last_token > 0 and have trailing token"
357        );
358        assert!(self.break_last_token <= 2, "cannot break token more than twice");
359
360        let end_pos = self.num_bump_calls
361            + capture_trailing as u32
362            // If we "broke" the last token (e.g. breaking a `>>` token once into `>` + `>`, or
363            // breaking a `>>=` token twice into `>` + `>` + `=`), then extend the range of
364            // captured tokens to include it, because the parser was not actually bumped past it.
365            // (Even if we broke twice, it was still just one token originally, hence the `1`.)
366            // When the `LazyAttrTokenStream` gets converted into an `AttrTokenStream`, we will
367            // rebreak that final token once or twice.
368            + if self.break_last_token == 0 { 0 } else { 1 };
369
370        let num_calls = end_pos - collect_pos.start_pos;
371
372        // Take the captured `ParserRange`s for any inner attributes that we parsed in
373        // `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`,
374        // which means the relevant tokens will be removed. (More details below.)
375        let mut inner_attr_parser_replacements = Vec::new();
376        for attr in ret_attrs.iter() {
377            if attr.style == ast::AttrStyle::Inner {
378                if let Some(inner_attr_parser_range) =
379                    self.capture_state.inner_attr_parser_ranges.remove(&attr.id)
380                {
381                    inner_attr_parser_replacements.push((inner_attr_parser_range, None));
382                } else {
383                    self.dcx().span_delayed_bug(attr.span, "Missing token range for attribute");
384                }
385            }
386        }
387
388        // This is hot enough for `deep-vector` that checking the conditions for an empty iterator
389        // is measurably faster than actually executing the iterator.
390        let node_replacements: Box<[_]> = if parser_replacements_start == parser_replacements_end
391            && inner_attr_parser_replacements.is_empty()
392        {
393            Box::new([])
394        } else {
395            // Grab any replace ranges that occur *inside* the current AST node. Convert them
396            // from `ParserRange` form to `NodeRange` form. We will perform the actual
397            // replacement only when we convert the `LazyAttrTokenStream` to an
398            // `AttrTokenStream`.
399            self.capture_state.parser_replacements
400                [parser_replacements_start..parser_replacements_end]
401                .iter()
402                .cloned()
403                .chain(inner_attr_parser_replacements)
404                .map(|(parser_range, data)| {
405                    (NodeRange::new(parser_range, collect_pos.start_pos), data)
406                })
407                .collect()
408        };
409
410        // What is the status here when parsing the example code at the top of this method?
411        //
412        // When parsing `g`:
413        // - `start_pos..end_pos` is `12..33` (`fn g { ... }`, excluding the outer attr).
414        // - `inner_attr_parser_replacements` has one entry (`ParserRange(17..27)`), to
415        //   delete the inner attr's tokens.
416        //   - This entry is converted to `NodeRange(5..15)` (relative to the `fn`) and put into
417        //     the lazy tokens for `g`, i.e. deleting the inner attr from those tokens (if they get
418        //     evaluated).
419        //   - Those lazy tokens are also put into an `AttrsTarget` that is appended to `self`'s
420        //     replace ranges at the bottom of this function, for processing when parsing `m`.
421        // - `parser_replacements_start..parser_replacements_end` is empty.
422        //
423        // When parsing `m`:
424        // - `start_pos..end_pos` is `0..34` (`mod m`, excluding the `#[cfg_eval]` attribute).
425        // - `inner_attr_parser_replacements` is empty.
426        // - `parser_replacements_start..parser_replacements_end` has one entry.
427        //   - One `AttrsTarget` (added below when parsing `g`) to replace all of `g` (`3..33`,
428        //     including its outer attribute), with:
429        //     - `attrs`: includes the outer and the inner attr.
430        //     - `tokens`: lazy tokens for `g` (with its inner attr deleted).
431
432        let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
433            start_token: collect_pos.start_token,
434            cursor_snapshot: collect_pos.cursor_snapshot,
435            num_calls,
436            break_last_token: self.break_last_token,
437            node_replacements,
438        });
439        let mut tokens_used = false;
440
441        // If in "definite capture mode" we need to register a replace range
442        // for the `#[cfg]` and/or `#[cfg_attr]` attrs. This allows us to run
443        // eager cfg-expansion on the captured token stream.
444        if definite_capture_mode {
445            assert!(self.break_last_token == 0, "Should not have unglued last token with cfg attr");
446
447            // What is the status here when parsing the example code at the top of this method?
448            //
449            // When parsing `g`, we add one entry:
450            // - The pushed entry (`ParserRange(3..33)`) has a new `AttrsTarget` with:
451            //   - `attrs`: includes the outer and the inner attr.
452            //   - `tokens`: lazy tokens for `g` (with its inner attr deleted).
453            //
454            // When parsing `m`, we do nothing here.
455
456            // Set things up so that the entire AST node that we just parsed, including attributes,
457            // will be replaced with `target` in the lazy token stream. This will allow us to
458            // cfg-expand this AST node.
459            let start_pos =
460                if has_outer_attrs { attrs.start_pos.unwrap() } else { collect_pos.start_pos };
461            let target =
462                AttrsTarget { attrs: ret_attrs.iter().cloned().collect(), tokens: tokens.clone() };
463            tokens_used = true;
464            self.capture_state
465                .parser_replacements
466                .push((ParserRange(start_pos..end_pos), Some(target)));
467        } else if matches!(self.capture_state.capturing, Capturing::No) {
468            // Only clear the ranges once we've finished capturing entirely, i.e. we've finished
469            // the outermost call to this method.
470            self.capture_state.parser_replacements.clear();
471            self.capture_state.inner_attr_parser_ranges.clear();
472            self.capture_state.seen_attrs.clear();
473        }
474
475        // If we support tokens and don't already have them, store the newly captured tokens.
476        if let Some(target_tokens @ None) = ret.tokens_mut() {
477            tokens_used = true;
478            *target_tokens = Some(tokens);
479        }
480
481        assert!(tokens_used); // check we didn't create `tokens` unnecessarily
482        Ok(ret)
483    }
484}
485
486/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an
487/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and
488/// close delims.
489fn make_attr_token_stream(
490    iter: impl Iterator<Item = FlatToken>,
491    break_last_token: u32,
492) -> AttrTokenStream {
493    #[derive(Debug)]
494    struct FrameData {
495        // This is `None` for the first frame, `Some` for all others.
496        open_delim_sp: Option<(Delimiter, Span, Spacing)>,
497        inner: Vec<AttrTokenTree>,
498    }
499    // The stack always has at least one element. Storing it separately makes for shorter code.
500    let mut stack_top = FrameData { open_delim_sp: None, inner: vec![] };
501    let mut stack_rest = vec![];
502    for flat_token in iter {
503        match flat_token {
504            FlatToken::Token((Token { kind: TokenKind::OpenDelim(delim), span }, spacing)) => {
505                stack_rest.push(mem::replace(
506                    &mut stack_top,
507                    FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] },
508                ));
509            }
510            FlatToken::Token((Token { kind: TokenKind::CloseDelim(delim), span }, spacing)) => {
511                let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap());
512                let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
513                assert!(
514                    open_delim.eq_ignoring_invisible_origin(&delim),
515                    "Mismatched open/close delims: open={open_delim:?} close={span:?}"
516                );
517                let dspan = DelimSpan::from_pair(open_sp, span);
518                let dspacing = DelimSpacing::new(open_spacing, spacing);
519                let stream = AttrTokenStream::new(frame_data.inner);
520                let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
521                stack_top.inner.push(delimited);
522            }
523            FlatToken::Token((token, spacing)) => {
524                stack_top.inner.push(AttrTokenTree::Token(token, spacing))
525            }
526            FlatToken::AttrsTarget(target) => {
527                stack_top.inner.push(AttrTokenTree::AttrsTarget(target))
528            }
529            FlatToken::Empty => {}
530        }
531    }
532
533    if break_last_token > 0 {
534        let last_token = stack_top.inner.pop().unwrap();
535        if let AttrTokenTree::Token(last_token, spacing) = last_token {
536            let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap();
537
538            // Tokens are always ASCII chars, so we can use byte arithmetic here.
539            let mut first_span = last_token.span.shrink_to_lo();
540            first_span =
541                first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token));
542
543            stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing));
544        } else {
545            panic!("Unexpected last token {last_token:?}")
546        }
547    }
548    AttrTokenStream::new(stack_top.inner)
549}
550
551/// Tokens are needed if:
552/// - any non-single-segment attributes (other than doc comments) are present,
553///   e.g. `rustfmt::skip`; or
554/// - any `cfg_attr` attributes are present; or
555/// - any single-segment, non-builtin attributes are present, e.g. `derive`,
556///   `test`, `global_allocator`.
557fn needs_tokens(attrs: &[ast::Attribute]) -> bool {
558    attrs.iter().any(|attr| match attr.ident() {
559        None => !attr.is_doc_comment(),
560        Some(ident) => {
561            ident.name == sym::cfg_attr || !rustc_feature::is_builtin_attr_name(ident.name)
562        }
563    })
564}
565
566// Some types are used a lot. Make sure they don't unintentionally get bigger.
567#[cfg(target_pointer_width = "64")]
568mod size_asserts {
569    use rustc_data_structures::static_assert_size;
570
571    use super::*;
572    // tidy-alphabetical-start
573    static_assert_size!(LazyAttrTokenStreamImpl, 96);
574    // tidy-alphabetical-end
575}