Skip to main content

rustc_ast/
tokenstream.rs

1//! # Token Streams
2//!
3//! `TokenStream`s represent syntactic objects before they are converted into ASTs.
4//! A `TokenStream` is, roughly speaking, a sequence of [`TokenTree`]s,
5//! which are themselves a single [`Token`] or a `Delimited` subsequence of tokens.
6
7use std::borrow::Cow;
8use std::hash::Hash;
9use std::ops::Range;
10use std::sync::Arc;
11use std::{cmp, fmt, iter, mem};
12
13use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
14use rustc_data_structures::sync;
15use rustc_macros::{Decodable, Encodable, HashStable_Generic, Walkable};
16use rustc_serialize::{Decodable, Encodable};
17use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
18use thin_vec::ThinVec;
19
20use crate::ast::AttrStyle;
21use crate::ast_traits::{HasAttrs, HasTokens};
22use crate::token::{self, Delimiter, Token, TokenKind};
23use crate::{AttrVec, Attribute};
24
25/// Part of a `TokenStream`.
26#[derive(#[automatically_derived]
impl ::core::fmt::Debug for TokenTree {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            TokenTree::Token(__self_0, __self_1) =>
                ::core::fmt::Formatter::debug_tuple_field2_finish(f, "Token",
                    __self_0, &__self_1),
            TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) =>
                ::core::fmt::Formatter::debug_tuple_field4_finish(f,
                    "Delimited", __self_0, __self_1, __self_2, &__self_3),
        }
    }
}Debug, #[automatically_derived]
impl ::core::clone::Clone for TokenTree {
    #[inline]
    fn clone(&self) -> TokenTree {
        match self {
            TokenTree::Token(__self_0, __self_1) =>
                TokenTree::Token(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1)),
            TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) =>
                TokenTree::Delimited(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1),
                    ::core::clone::Clone::clone(__self_2),
                    ::core::clone::Clone::clone(__self_3)),
        }
    }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for TokenTree {
    #[inline]
    fn eq(&self, other: &TokenTree) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr &&
            match (self, other) {
                (TokenTree::Token(__self_0, __self_1),
                    TokenTree::Token(__arg1_0, __arg1_1)) =>
                    __self_0 == __arg1_0 && __self_1 == __arg1_1,
                (TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3),
                    TokenTree::Delimited(__arg1_0, __arg1_1, __arg1_2,
                    __arg1_3)) =>
                    __self_0 == __arg1_0 && __self_1 == __arg1_1 &&
                            __self_2 == __arg1_2 && __self_3 == __arg1_3,
                _ => unsafe { ::core::intrinsics::unreachable() }
            }
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for TokenTree {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) {
        let _: ::core::cmp::AssertParamIsEq<Token>;
        let _: ::core::cmp::AssertParamIsEq<Spacing>;
        let _: ::core::cmp::AssertParamIsEq<DelimSpan>;
        let _: ::core::cmp::AssertParamIsEq<DelimSpacing>;
        let _: ::core::cmp::AssertParamIsEq<Delimiter>;
        let _: ::core::cmp::AssertParamIsEq<TokenStream>;
    }
}Eq, #[automatically_derived]
impl ::core::hash::Hash for TokenTree {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        ::core::hash::Hash::hash(&__self_discr, state);
        match self {
            TokenTree::Token(__self_0, __self_1) => {
                ::core::hash::Hash::hash(__self_0, state);
                ::core::hash::Hash::hash(__self_1, state)
            }
            TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) => {
                ::core::hash::Hash::hash(__self_0, state);
                ::core::hash::Hash::hash(__self_1, state);
                ::core::hash::Hash::hash(__self_2, state);
                ::core::hash::Hash::hash(__self_3, state)
            }
        }
    }
}Hash, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for TokenTree {
            fn encode(&self, __encoder: &mut __E) {
                let disc =
                    match *self {
                        TokenTree::Token(ref __binding_0, ref __binding_1) => {
                            0usize
                        }
                        TokenTree::Delimited(ref __binding_0, ref __binding_1,
                            ref __binding_2, ref __binding_3) => {
                            1usize
                        }
                    };
                ::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
                match *self {
                    TokenTree::Token(ref __binding_0, ref __binding_1) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                    TokenTree::Delimited(ref __binding_0, ref __binding_1,
                        ref __binding_2, ref __binding_3) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_2,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_3,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for TokenTree {
            fn decode(__decoder: &mut __D) -> Self {
                match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
                    {
                    0usize => {
                        TokenTree::Token(::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder))
                    }
                    1usize => {
                        TokenTree::Delimited(::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder))
                    }
                    n => {
                        ::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `TokenTree`, expected 0..2, actual {0}",
                                n));
                    }
                }
            }
        }
    };Decodable, const _: () =
    {
        impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
            for TokenTree where __CTX: crate::HashStableContext {
            #[inline]
            fn hash_stable(&self, __hcx: &mut __CTX,
                __hasher:
                    &mut ::rustc_data_structures::stable_hasher::StableHasher) {
                ::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
                match *self {
                    TokenTree::Token(ref __binding_0, ref __binding_1) => {
                        { __binding_0.hash_stable(__hcx, __hasher); }
                        { __binding_1.hash_stable(__hcx, __hasher); }
                    }
                    TokenTree::Delimited(ref __binding_0, ref __binding_1,
                        ref __binding_2, ref __binding_3) => {
                        { __binding_0.hash_stable(__hcx, __hasher); }
                        { __binding_1.hash_stable(__hcx, __hasher); }
                        { __binding_2.hash_stable(__hcx, __hasher); }
                        { __binding_3.hash_stable(__hcx, __hasher); }
                    }
                }
            }
        }
    };HashStable_Generic)]
27pub enum TokenTree {
28    /// A single token. Should never be `OpenDelim` or `CloseDelim`, because
29    /// delimiters are implicitly represented by `Delimited`.
30    Token(Token, Spacing),
31    /// A delimited sequence of token trees.
32    Delimited(DelimSpan, DelimSpacing, Delimiter, TokenStream),
33}
34
35// Ensure all fields of `TokenTree` are `DynSend` and `DynSync`.
36fn _dummy()
37where
38    Token: sync::DynSend + sync::DynSync,
39    Spacing: sync::DynSend + sync::DynSync,
40    DelimSpan: sync::DynSend + sync::DynSync,
41    Delimiter: sync::DynSend + sync::DynSync,
42    TokenStream: sync::DynSend + sync::DynSync,
43{
44}
45
46impl TokenTree {
47    /// Checks if this `TokenTree` is equal to the other, regardless of span/spacing information.
48    pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
49        match (self, other) {
50            (TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
51            (TokenTree::Delimited(.., delim, tts), TokenTree::Delimited(.., delim2, tts2)) => {
52                delim == delim2 && tts.iter().eq_by(tts2.iter(), |a, b| a.eq_unspanned(b))
53            }
54            _ => false,
55        }
56    }
57
58    /// Retrieves the `TokenTree`'s span.
59    pub fn span(&self) -> Span {
60        match self {
61            TokenTree::Token(token, _) => token.span,
62            TokenTree::Delimited(sp, ..) => sp.entire(),
63        }
64    }
65
66    /// Create a `TokenTree::Token` with alone spacing.
67    pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
68        TokenTree::Token(Token::new(kind, span), Spacing::Alone)
69    }
70
71    /// Create a `TokenTree::Token` with joint spacing.
72    pub fn token_joint(kind: TokenKind, span: Span) -> TokenTree {
73        TokenTree::Token(Token::new(kind, span), Spacing::Joint)
74    }
75
76    /// Create a `TokenTree::Token` with joint-hidden spacing.
77    pub fn token_joint_hidden(kind: TokenKind, span: Span) -> TokenTree {
78        TokenTree::Token(Token::new(kind, span), Spacing::JointHidden)
79    }
80
81    pub fn uninterpolate(&self) -> Cow<'_, TokenTree> {
82        match self {
83            TokenTree::Token(token, spacing) => match token.uninterpolate() {
84                Cow::Owned(token) => Cow::Owned(TokenTree::Token(token, *spacing)),
85                Cow::Borrowed(_) => Cow::Borrowed(self),
86            },
87            _ => Cow::Borrowed(self),
88        }
89    }
90}
91
92/// A lazy version of [`AttrTokenStream`], which defers creation of an actual
93/// `AttrTokenStream` until it is needed.
94#[derive(#[automatically_derived]
impl ::core::clone::Clone for LazyAttrTokenStream {
    #[inline]
    fn clone(&self) -> LazyAttrTokenStream {
        LazyAttrTokenStream(::core::clone::Clone::clone(&self.0))
    }
}Clone)]
95pub struct LazyAttrTokenStream(Arc<LazyAttrTokenStreamInner>);
96
97impl LazyAttrTokenStream {
98    pub fn new_direct(stream: AttrTokenStream) -> LazyAttrTokenStream {
99        LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Direct(stream)))
100    }
101
102    pub fn new_pending(
103        start_token: (Token, Spacing),
104        cursor_snapshot: TokenCursor,
105        num_calls: u32,
106        break_last_token: u32,
107        node_replacements: ThinVec<NodeReplacement>,
108    ) -> LazyAttrTokenStream {
109        LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Pending {
110            start_token,
111            cursor_snapshot,
112            num_calls,
113            break_last_token,
114            node_replacements,
115        }))
116    }
117
118    pub fn to_attr_token_stream(&self) -> AttrTokenStream {
119        self.0.to_attr_token_stream()
120    }
121}
122
123impl fmt::Debug for LazyAttrTokenStream {
124    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
125        f.write_fmt(format_args!("LazyAttrTokenStream({0:?})",
        self.to_attr_token_stream()))write!(f, "LazyAttrTokenStream({:?})", self.to_attr_token_stream())
126    }
127}
128
129impl<S: SpanEncoder> Encodable<S> for LazyAttrTokenStream {
130    fn encode(&self, _s: &mut S) {
131        {
    ::core::panicking::panic_fmt(format_args!("Attempted to encode LazyAttrTokenStream"));
};panic!("Attempted to encode LazyAttrTokenStream");
132    }
133}
134
135impl<D: SpanDecoder> Decodable<D> for LazyAttrTokenStream {
136    fn decode(_d: &mut D) -> Self {
137        {
    ::core::panicking::panic_fmt(format_args!("Attempted to decode LazyAttrTokenStream"));
};panic!("Attempted to decode LazyAttrTokenStream");
138    }
139}
140
141impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
142    fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
143        {
    ::core::panicking::panic_fmt(format_args!("Attempted to compute stable hash for LazyAttrTokenStream"));
};panic!("Attempted to compute stable hash for LazyAttrTokenStream");
144    }
145}
146
147/// A token range within a `Parser`'s full token stream.
148#[derive(#[automatically_derived]
impl ::core::clone::Clone for ParserRange {
    #[inline]
    fn clone(&self) -> ParserRange {
        ParserRange(::core::clone::Clone::clone(&self.0))
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ParserRange {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f, "ParserRange",
            &&self.0)
    }
}Debug)]
149pub struct ParserRange(pub Range<u32>);
150
151/// A token range within an individual AST node's (lazy) token stream, i.e.
152/// relative to that node's first token. Distinct from `ParserRange` so the two
153/// kinds of range can't be mixed up.
154#[derive(#[automatically_derived]
impl ::core::clone::Clone for NodeRange {
    #[inline]
    fn clone(&self) -> NodeRange {
        NodeRange(::core::clone::Clone::clone(&self.0))
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for NodeRange {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f, "NodeRange",
            &&self.0)
    }
}Debug)]
155pub struct NodeRange(pub Range<u32>);
156
157/// Indicates a range of tokens that should be replaced by an `AttrsTarget`
158/// (replacement) or be replaced by nothing (deletion). This is used in two
159/// places during token collection.
160///
161/// 1. Replacement. During the parsing of an AST node that may have a
162///    `#[derive]` attribute, when we parse a nested AST node that has `#[cfg]`
163///    or `#[cfg_attr]`, we replace the entire inner AST node with
164///    `FlatToken::AttrsTarget`. This lets us perform eager cfg-expansion on an
165///    `AttrTokenStream`.
166///
167/// 2. Deletion. We delete inner attributes from all collected token streams,
168///    and instead track them through the `attrs` field on the AST node. This
169///    lets us manipulate them similarly to outer attributes. When we create a
170///    `TokenStream`, the inner attributes are inserted into the proper place
171///    in the token stream.
172///
173/// Each replacement starts off in `ParserReplacement` form but is converted to
174/// `NodeReplacement` form when it is attached to a single AST node, via
175/// `LazyAttrTokenStreamImpl`.
176pub type ParserReplacement = (ParserRange, Option<AttrsTarget>);
177
178/// See the comment on `ParserReplacement`.
179pub type NodeReplacement = (NodeRange, Option<AttrsTarget>);
180
181impl NodeRange {
182    // Converts a range within a parser's tokens to a range within a
183    // node's tokens beginning at `start_pos`.
184    //
185    // For example, imagine a parser with 50 tokens in its token stream, a
186    // function that spans `ParserRange(20..40)` and an inner attribute within
187    // that function that spans `ParserRange(30..35)`. We would find the inner
188    // attribute's range within the function's tokens by subtracting 20, which
189    // is the position of the function's start token. This gives
190    // `NodeRange(10..15)`.
191    pub fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
192        if !!parser_range.is_empty() {
    ::core::panicking::panic("assertion failed: !parser_range.is_empty()")
};assert!(!parser_range.is_empty());
193        if !(parser_range.start >= start_pos) {
    ::core::panicking::panic("assertion failed: parser_range.start >= start_pos")
};assert!(parser_range.start >= start_pos);
194        NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
195    }
196}
197
198enum LazyAttrTokenStreamInner {
199    // The token stream has already been produced.
200    Direct(AttrTokenStream),
201
202    // From a value of this type we can reconstruct the `TokenStream` seen by
203    // the `f` callback passed to a call to `Parser::collect_tokens`, by
204    // replaying the getting of the tokens. This saves us producing a
205    // `TokenStream` if it is never needed, e.g. a captured `macro_rules!`
206    // argument that is never passed to a proc macro. In practice, token stream
207    // creation happens rarely compared to calls to `collect_tokens` (see some
208    // statistics in #78736) so we are doing as little up-front work as
209    // possible.
210    //
211    // This also makes `Parser` very cheap to clone, since there is no
212    // intermediate collection buffer to clone.
213    Pending {
214        start_token: (Token, Spacing),
215        cursor_snapshot: TokenCursor,
216        num_calls: u32,
217        break_last_token: u32,
218        node_replacements: ThinVec<NodeReplacement>,
219    },
220}
221
222impl LazyAttrTokenStreamInner {
223    fn to_attr_token_stream(&self) -> AttrTokenStream {
224        match self {
225            LazyAttrTokenStreamInner::Direct(stream) => stream.clone(),
226            LazyAttrTokenStreamInner::Pending {
227                start_token,
228                cursor_snapshot,
229                num_calls,
230                break_last_token,
231                node_replacements,
232            } => {
233                // The token produced by the final call to `{,inlined_}next` was not
234                // actually consumed by the callback. The combination of chaining the
235                // initial token and using `take` produces the desired result - we
236                // produce an empty `TokenStream` if no calls were made, and omit the
237                // final token otherwise.
238                let mut cursor_snapshot = cursor_snapshot.clone();
239                let tokens = iter::once(FlatToken::Token(*start_token))
240                    .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
241                    .take(*num_calls as usize);
242
243                if node_replacements.is_empty() {
244                    make_attr_token_stream(tokens, *break_last_token)
245                } else {
246                    let mut tokens: Vec<_> = tokens.collect();
247                    let mut node_replacements = node_replacements.to_vec();
248                    node_replacements.sort_by_key(|(range, _)| range.0.start);
249
250                    #[cfg(debug_assertions)]
251                    for [(node_range, tokens), (next_node_range, next_tokens)] in
252                        node_replacements.array_windows()
253                    {
254                        if !(node_range.0.end <= next_node_range.0.start ||
            node_range.0.end >= next_node_range.0.end) {
    {
        ::core::panicking::panic_fmt(format_args!("Node ranges should be disjoint or nested: ({0:?}, {1:?}) ({2:?}, {3:?})",
                node_range, tokens, next_node_range, next_tokens));
    }
};assert!(
255                            node_range.0.end <= next_node_range.0.start
256                                || node_range.0.end >= next_node_range.0.end,
257                            "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
258                            node_range,
259                            tokens,
260                            next_node_range,
261                            next_tokens,
262                        );
263                    }
264
265                    // Process the replace ranges, starting from the highest start
266                    // position and working our way back. If have tokens like:
267                    //
268                    // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
269                    //
270                    // Then we will generate replace ranges for both
271                    // the `#[cfg(FALSE)] field: bool` and the entire
272                    // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
273                    //
274                    // By starting processing from the replace range with the greatest
275                    // start position, we ensure that any (outer) replace range which
276                    // encloses another (inner) replace range will fully overwrite the
277                    // inner range's replacement.
278                    for (node_range, target) in node_replacements.into_iter().rev() {
279                        if !!node_range.0.is_empty() {
    {
        ::core::panicking::panic_fmt(format_args!("Cannot replace an empty node range: {0:?}",
                node_range.0));
    }
};assert!(
280                            !node_range.0.is_empty(),
281                            "Cannot replace an empty node range: {:?}",
282                            node_range.0
283                        );
284
285                        // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s,
286                        // plus enough `FlatToken::Empty`s to fill up the rest of the range. This
287                        // keeps the total length of `tokens` constant throughout the replacement
288                        // process, allowing us to do all replacements without adjusting indices.
289                        let target_len = target.is_some() as usize;
290                        tokens.splice(
291                            (node_range.0.start as usize)..(node_range.0.end as usize),
292                            target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
293                                iter::repeat(FlatToken::Empty)
294                                    .take(node_range.0.len() - target_len),
295                            ),
296                        );
297                    }
298                    make_attr_token_stream(tokens.into_iter(), *break_last_token)
299                }
300            }
301        }
302    }
303}
304
305/// A helper struct used when building an `AttrTokenStream` from
306/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
307/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
308/// is then 'parsed' to build up an `AttrTokenStream` with nested
309/// `AttrTokenTree::Delimited` tokens.
310#[derive(#[automatically_derived]
impl ::core::fmt::Debug for FlatToken {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            FlatToken::Token(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Token",
                    &__self_0),
            FlatToken::AttrsTarget(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "AttrsTarget", &__self_0),
            FlatToken::Empty => ::core::fmt::Formatter::write_str(f, "Empty"),
        }
    }
}Debug, #[automatically_derived]
impl ::core::clone::Clone for FlatToken {
    #[inline]
    fn clone(&self) -> FlatToken {
        match self {
            FlatToken::Token(__self_0) =>
                FlatToken::Token(::core::clone::Clone::clone(__self_0)),
            FlatToken::AttrsTarget(__self_0) =>
                FlatToken::AttrsTarget(::core::clone::Clone::clone(__self_0)),
            FlatToken::Empty => FlatToken::Empty,
        }
    }
}Clone)]
311enum FlatToken {
312    /// A token - this holds both delimiter (e.g. '{' and '}')
313    /// and non-delimiter tokens
314    Token((Token, Spacing)),
315    /// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
316    /// directly into the constructed `AttrTokenStream` as an
317    /// `AttrTokenTree::AttrsTarget`.
318    AttrsTarget(AttrsTarget),
319    /// A special 'empty' token that is ignored during the conversion
320    /// to an `AttrTokenStream`. This is used to simplify the
321    /// handling of replace ranges.
322    Empty,
323}
324
325/// An `AttrTokenStream` is similar to a `TokenStream`, but with extra
326/// information about the tokens for attribute targets. This is used
327/// during expansion to perform early cfg-expansion, and to process attributes
328/// during proc-macro invocations.
329#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrTokenStream {
    #[inline]
    fn clone(&self) -> AttrTokenStream {
        AttrTokenStream(::core::clone::Clone::clone(&self.0))
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrTokenStream {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f,
            "AttrTokenStream", &&self.0)
    }
}Debug, #[automatically_derived]
impl ::core::default::Default for AttrTokenStream {
    #[inline]
    fn default() -> AttrTokenStream {
        AttrTokenStream(::core::default::Default::default())
    }
}Default, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for AttrTokenStream {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    AttrTokenStream(ref __binding_0) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for AttrTokenStream {
            fn decode(__decoder: &mut __D) -> Self {
                AttrTokenStream(::rustc_serialize::Decodable::decode(__decoder))
            }
        }
    };Decodable)]
330pub struct AttrTokenStream(pub Arc<Vec<AttrTokenTree>>);
331
332/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an
333/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and
334/// close delims.
335fn make_attr_token_stream(
336    iter: impl Iterator<Item = FlatToken>,
337    break_last_token: u32,
338) -> AttrTokenStream {
339    #[derive(#[automatically_derived]
impl ::core::fmt::Debug for FrameData {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "FrameData",
            "open_delim_sp", &self.open_delim_sp, "inner", &&self.inner)
    }
}Debug)]
340    struct FrameData {
341        // This is `None` for the first frame, `Some` for all others.
342        open_delim_sp: Option<(Delimiter, Span, Spacing)>,
343        inner: Vec<AttrTokenTree>,
344    }
345    // The stack always has at least one element. Storing it separately makes for shorter code.
346    let mut stack_top = FrameData { open_delim_sp: None, inner: ::alloc::vec::Vec::new()vec![] };
347    let mut stack_rest = ::alloc::vec::Vec::new()vec![];
348    for flat_token in iter {
349        match flat_token {
350            FlatToken::Token((token @ Token { kind, span }, spacing)) => {
351                if let Some(delim) = kind.open_delim() {
352                    stack_rest.push(mem::replace(
353                        &mut stack_top,
354                        FrameData { open_delim_sp: Some((delim, span, spacing)), inner: ::alloc::vec::Vec::new()vec![] },
355                    ));
356                } else if let Some(delim) = kind.close_delim() {
357                    // If there's no matching opening delimiter, the token stream is malformed,
358                    // likely due to a improper delimiter positions in the source code.
359                    // It's not delimiter mismatch, and lexer can not detect it, so we just ignore it here.
360                    let Some(frame) = stack_rest.pop() else {
361                        return AttrTokenStream::new(stack_top.inner);
362                    };
363                    let frame_data = mem::replace(&mut stack_top, frame);
364                    let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
365                    if !open_delim.eq_ignoring_invisible_origin(&delim) {
    {
        ::core::panicking::panic_fmt(format_args!("Mismatched open/close delims: open={0:?} close={1:?}",
                open_delim, span));
    }
};assert!(
366                        open_delim.eq_ignoring_invisible_origin(&delim),
367                        "Mismatched open/close delims: open={open_delim:?} close={span:?}"
368                    );
369                    let dspan = DelimSpan::from_pair(open_sp, span);
370                    let dspacing = DelimSpacing::new(open_spacing, spacing);
371                    let stream = AttrTokenStream::new(frame_data.inner);
372                    let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
373                    stack_top.inner.push(delimited);
374                } else {
375                    stack_top.inner.push(AttrTokenTree::Token(token, spacing))
376                }
377            }
378            FlatToken::AttrsTarget(target) => {
379                stack_top.inner.push(AttrTokenTree::AttrsTarget(target))
380            }
381            FlatToken::Empty => {}
382        }
383    }
384
385    if break_last_token > 0 {
386        let last_token = stack_top.inner.pop().unwrap();
387        if let AttrTokenTree::Token(last_token, spacing) = last_token {
388            let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap();
389
390            // Tokens are always ASCII chars, so we can use byte arithmetic here.
391            let mut first_span = last_token.span.shrink_to_lo();
392            first_span =
393                first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token));
394
395            stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing));
396        } else {
397            {
    ::core::panicking::panic_fmt(format_args!("Unexpected last token {0:?}",
            last_token));
}panic!("Unexpected last token {last_token:?}")
398        }
399    }
400    AttrTokenStream::new(stack_top.inner)
401}
402
403/// Like `TokenTree`, but for `AttrTokenStream`.
404#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrTokenTree {
    #[inline]
    fn clone(&self) -> AttrTokenTree {
        match self {
            AttrTokenTree::Token(__self_0, __self_1) =>
                AttrTokenTree::Token(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1)),
            AttrTokenTree::Delimited(__self_0, __self_1, __self_2, __self_3)
                =>
                AttrTokenTree::Delimited(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1),
                    ::core::clone::Clone::clone(__self_2),
                    ::core::clone::Clone::clone(__self_3)),
            AttrTokenTree::AttrsTarget(__self_0) =>
                AttrTokenTree::AttrsTarget(::core::clone::Clone::clone(__self_0)),
        }
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrTokenTree {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            AttrTokenTree::Token(__self_0, __self_1) =>
                ::core::fmt::Formatter::debug_tuple_field2_finish(f, "Token",
                    __self_0, &__self_1),
            AttrTokenTree::Delimited(__self_0, __self_1, __self_2, __self_3)
                =>
                ::core::fmt::Formatter::debug_tuple_field4_finish(f,
                    "Delimited", __self_0, __self_1, __self_2, &__self_3),
            AttrTokenTree::AttrsTarget(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "AttrsTarget", &__self_0),
        }
    }
}Debug, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for AttrTokenTree {
            fn encode(&self, __encoder: &mut __E) {
                let disc =
                    match *self {
                        AttrTokenTree::Token(ref __binding_0, ref __binding_1) => {
                            0usize
                        }
                        AttrTokenTree::Delimited(ref __binding_0, ref __binding_1,
                            ref __binding_2, ref __binding_3) => {
                            1usize
                        }
                        AttrTokenTree::AttrsTarget(ref __binding_0) => { 2usize }
                    };
                ::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
                match *self {
                    AttrTokenTree::Token(ref __binding_0, ref __binding_1) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                    AttrTokenTree::Delimited(ref __binding_0, ref __binding_1,
                        ref __binding_2, ref __binding_3) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_2,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_3,
                            __encoder);
                    }
                    AttrTokenTree::AttrsTarget(ref __binding_0) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for AttrTokenTree {
            fn decode(__decoder: &mut __D) -> Self {
                match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
                    {
                    0usize => {
                        AttrTokenTree::Token(::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder))
                    }
                    1usize => {
                        AttrTokenTree::Delimited(::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder))
                    }
                    2usize => {
                        AttrTokenTree::AttrsTarget(::rustc_serialize::Decodable::decode(__decoder))
                    }
                    n => {
                        ::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `AttrTokenTree`, expected 0..3, actual {0}",
                                n));
                    }
                }
            }
        }
    };Decodable)]
405pub enum AttrTokenTree {
406    Token(Token, Spacing),
407    Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
408    /// Stores the attributes for an attribute target,
409    /// along with the tokens for that attribute target.
410    /// See `AttrsTarget` for more information
411    AttrsTarget(AttrsTarget),
412}
413
414impl AttrTokenStream {
415    pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
416        AttrTokenStream(Arc::new(tokens))
417    }
418
419    /// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`. During
420    /// conversion, any `AttrTokenTree::AttrsTarget` gets "flattened" back to a
421    /// `TokenStream`, as described in the comment on
422    /// `attrs_and_tokens_to_token_trees`.
423    pub fn to_token_trees(&self) -> Vec<TokenTree> {
424        let mut res = Vec::with_capacity(self.0.len());
425        for tree in self.0.iter() {
426            match tree {
427                AttrTokenTree::Token(inner, spacing) => {
428                    res.push(TokenTree::Token(inner.clone(), *spacing));
429                }
430                AttrTokenTree::Delimited(span, spacing, delim, stream) => {
431                    res.push(TokenTree::Delimited(
432                        *span,
433                        *spacing,
434                        *delim,
435                        TokenStream::new(stream.to_token_trees()),
436                    ))
437                }
438                AttrTokenTree::AttrsTarget(target) => {
439                    attrs_and_tokens_to_token_trees(&target.attrs, &target.tokens, &mut res);
440                }
441            }
442        }
443        res
444    }
445}
446
447// Converts multiple attributes and the tokens for a target AST node into token trees, and appends
448// them to `res`.
449//
450// Example: if the AST node is "fn f() { blah(); }", then:
451// - Simple if no attributes are present, e.g. "fn f() { blah(); }"
452// - Simple if only outer attribute are present, e.g. "#[outer1] #[outer2] fn f() { blah(); }"
453// - Trickier if inner attributes are present, because they must be moved within the AST node's
454//   tokens, e.g. "#[outer] fn f() { #![inner] blah() }"
455fn attrs_and_tokens_to_token_trees(
456    attrs: &[Attribute],
457    target_tokens: &LazyAttrTokenStream,
458    res: &mut Vec<TokenTree>,
459) {
460    let idx = attrs.partition_point(|attr| #[allow(non_exhaustive_omitted_patterns)] match attr.style {
    crate::AttrStyle::Outer => true,
    _ => false,
}matches!(attr.style, crate::AttrStyle::Outer));
461    let (outer_attrs, inner_attrs) = attrs.split_at(idx);
462
463    // Add outer attribute tokens.
464    for attr in outer_attrs {
465        res.extend(attr.token_trees());
466    }
467
468    // Add target AST node tokens.
469    res.extend(target_tokens.to_attr_token_stream().to_token_trees());
470
471    // Insert inner attribute tokens.
472    if !inner_attrs.is_empty() {
473        let found = insert_inner_attrs(inner_attrs, res);
474        if !found {
    {
        ::core::panicking::panic_fmt(format_args!("Failed to find trailing delimited group in: {0:?}",
                res));
    }
};assert!(found, "Failed to find trailing delimited group in: {res:?}");
475    }
476
477    // Inner attributes are only supported on blocks, functions, impls, and
478    // modules. All of these have their inner attributes placed at the
479    // beginning of the rightmost outermost braced group:
480    // e.g. `fn foo() { #![my_attr] }`. (Note: the braces may be within
481    // invisible delimiters.)
482    //
483    // Therefore, we can insert them back into the right location without
484    // needing to do any extra position tracking.
485    //
486    // Note: Outline modules are an exception - they can have attributes like
487    // `#![my_attr]` at the start of a file. Support for custom attributes in
488    // this position is not properly implemented - we always synthesize fake
489    // tokens, so we never reach this code.
490    fn insert_inner_attrs(inner_attrs: &[Attribute], tts: &mut Vec<TokenTree>) -> bool {
491        for tree in tts.iter_mut().rev() {
492            if let TokenTree::Delimited(span, spacing, Delimiter::Brace, stream) = tree {
493                // Found it: the rightmost, outermost braced group.
494                let mut tts = ::alloc::vec::Vec::new()vec![];
495                for inner_attr in inner_attrs {
496                    tts.extend(inner_attr.token_trees());
497                }
498                tts.extend(stream.0.iter().cloned());
499                let stream = TokenStream::new(tts);
500                *tree = TokenTree::Delimited(*span, *spacing, Delimiter::Brace, stream);
501                return true;
502            } else if let TokenTree::Delimited(span, spacing, Delimiter::Invisible(src), stream) =
503                tree
504            {
505                // Recurse inside invisible delimiters.
506                let mut vec: Vec<_> = stream.iter().cloned().collect();
507                if insert_inner_attrs(inner_attrs, &mut vec) {
508                    *tree = TokenTree::Delimited(
509                        *span,
510                        *spacing,
511                        Delimiter::Invisible(*src),
512                        TokenStream::new(vec),
513                    );
514                    return true;
515                }
516            }
517        }
518        false
519    }
520}
521
522/// Stores the tokens for an attribute target, along
523/// with its attributes.
524///
525/// This is constructed during parsing when we need to capture
526/// tokens, for `cfg` and `cfg_attr` attributes.
527///
528/// For example, `#[cfg(FALSE)] struct Foo {}` would
529/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
530/// and a `tokens` field storing the (unparsed) tokens `struct Foo {}`
531///
532/// The `cfg`/`cfg_attr` processing occurs in
533/// `StripUnconfigured::configure_tokens`.
534#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrsTarget {
    #[inline]
    fn clone(&self) -> AttrsTarget {
        AttrsTarget {
            attrs: ::core::clone::Clone::clone(&self.attrs),
            tokens: ::core::clone::Clone::clone(&self.tokens),
        }
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrsTarget {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "AttrsTarget",
            "attrs", &self.attrs, "tokens", &&self.tokens)
    }
}Debug, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for AttrsTarget {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    AttrsTarget {
                        attrs: ref __binding_0, tokens: ref __binding_1 } => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for AttrsTarget {
            fn decode(__decoder: &mut __D) -> Self {
                AttrsTarget {
                    attrs: ::rustc_serialize::Decodable::decode(__decoder),
                    tokens: ::rustc_serialize::Decodable::decode(__decoder),
                }
            }
        }
    };Decodable)]
535pub struct AttrsTarget {
536    /// Attributes, both outer and inner.
537    /// These are stored in the original order that they were parsed in.
538    pub attrs: AttrVec,
539    /// The underlying tokens for the attribute target that `attrs`
540    /// are applied to
541    pub tokens: LazyAttrTokenStream,
542}
543
544/// Indicates whether a token can join with the following token to form a
545/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
546/// guide pretty-printing, which is where the `JointHidden` value (which isn't
547/// part of `proc_macro::Spacing`) comes in useful.
548#[derive(#[automatically_derived]
impl ::core::clone::Clone for Spacing {
    #[inline]
    fn clone(&self) -> Spacing { *self }
}Clone, #[automatically_derived]
impl ::core::marker::Copy for Spacing { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for Spacing {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f,
            match self {
                Spacing::Alone => "Alone",
                Spacing::Joint => "Joint",
                Spacing::JointHidden => "JointHidden",
            })
    }
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for Spacing {
    #[inline]
    fn eq(&self, other: &Spacing) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for Spacing {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for Spacing {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        ::core::hash::Hash::hash(&__self_discr, state)
    }
}Hash, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for Spacing {
            fn encode(&self, __encoder: &mut __E) {
                let disc =
                    match *self {
                        Spacing::Alone => { 0usize }
                        Spacing::Joint => { 1usize }
                        Spacing::JointHidden => { 2usize }
                    };
                ::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
                match *self {
                    Spacing::Alone => {}
                    Spacing::Joint => {}
                    Spacing::JointHidden => {}
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for Spacing {
            fn decode(__decoder: &mut __D) -> Self {
                match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
                    {
                    0usize => { Spacing::Alone }
                    1usize => { Spacing::Joint }
                    2usize => { Spacing::JointHidden }
                    n => {
                        ::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `Spacing`, expected 0..3, actual {0}",
                                n));
                    }
                }
            }
        }
    };Decodable, const _: () =
    {
        impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
            for Spacing where __CTX: crate::HashStableContext {
            #[inline]
            fn hash_stable(&self, __hcx: &mut __CTX,
                __hasher:
                    &mut ::rustc_data_structures::stable_hasher::StableHasher) {
                ::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
                match *self {
                    Spacing::Alone => {}
                    Spacing::Joint => {}
                    Spacing::JointHidden => {}
                }
            }
        }
    };HashStable_Generic)]
549pub enum Spacing {
550    /// The token cannot join with the following token to form a compound
551    /// token.
552    ///
553    /// In token streams parsed from source code, the compiler will use `Alone`
554    /// for any token immediately followed by whitespace, a non-doc comment, or
555    /// EOF.
556    ///
557    /// When constructing token streams within the compiler, use this for each
558    /// token that (a) should be pretty-printed with a space after it, or (b)
559    /// is the last token in the stream. (In the latter case the choice of
560    /// spacing doesn't matter because it is never used for the last token. We
561    /// arbitrarily use `Alone`.)
562    ///
563    /// Converts to `proc_macro::Spacing::Alone`, and
564    /// `proc_macro::Spacing::Alone` converts back to this.
565    Alone,
566
567    /// The token can join with the following token to form a compound token.
568    ///
569    /// In token streams parsed from source code, the compiler will use `Joint`
570    /// for any token immediately followed by punctuation (as determined by
571    /// `Token::is_punct`).
572    ///
573    /// When constructing token streams within the compiler, use this for each
574    /// token that (a) should be pretty-printed without a space after it, and
575    /// (b) is followed by a punctuation token.
576    ///
577    /// Converts to `proc_macro::Spacing::Joint`, and
578    /// `proc_macro::Spacing::Joint` converts back to this.
579    Joint,
580
581    /// The token can join with the following token to form a compound token,
582    /// but this will not be visible at the proc macro level. (This is what the
583    /// `Hidden` means; see below.)
584    ///
585    /// In token streams parsed from source code, the compiler will use
586    /// `JointHidden` for any token immediately followed by anything not
587    /// covered by the `Alone` and `Joint` cases: an identifier, lifetime,
588    /// literal, delimiter, doc comment.
589    ///
590    /// When constructing token streams, use this for each token that (a)
591    /// should be pretty-printed without a space after it, and (b) is followed
592    /// by a non-punctuation token.
593    ///
594    /// Converts to `proc_macro::Spacing::Alone`, but
595    /// `proc_macro::Spacing::Alone` converts back to `token::Spacing::Alone`.
596    /// Because of that, pretty-printing of `TokenStream`s produced by proc
597    /// macros is unavoidably uglier (with more whitespace between tokens) than
598    /// pretty-printing of `TokenStream`'s produced by other means (i.e. parsed
599    /// source code, internally constructed token streams, and token streams
600    /// produced by declarative macros).
601    JointHidden,
602}
603
604/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
605#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenStream {
    #[inline]
    fn clone(&self) -> TokenStream {
        TokenStream(::core::clone::Clone::clone(&self.0))
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenStream {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f, "TokenStream",
            &&self.0)
    }
}Debug, #[automatically_derived]
impl ::core::default::Default for TokenStream {
    #[inline]
    fn default() -> TokenStream {
        TokenStream(::core::default::Default::default())
    }
}Default, #[automatically_derived]
impl ::core::cmp::PartialEq for TokenStream {
    #[inline]
    fn eq(&self, other: &TokenStream) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for TokenStream {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) {
        let _: ::core::cmp::AssertParamIsEq<Arc<Vec<TokenTree>>>;
    }
}Eq, #[automatically_derived]
impl ::core::hash::Hash for TokenStream {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
        ::core::hash::Hash::hash(&self.0, state)
    }
}Hash, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for TokenStream {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    TokenStream(ref __binding_0) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for TokenStream {
            fn decode(__decoder: &mut __D) -> Self {
                TokenStream(::rustc_serialize::Decodable::decode(__decoder))
            }
        }
    };Decodable)]
606pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
607
608impl TokenStream {
609    pub fn new(tts: Vec<TokenTree>) -> TokenStream {
610        TokenStream(Arc::new(tts))
611    }
612
613    pub fn is_empty(&self) -> bool {
614        self.0.is_empty()
615    }
616
617    pub fn len(&self) -> usize {
618        self.0.len()
619    }
620
621    pub fn get(&self, index: usize) -> Option<&TokenTree> {
622        self.0.get(index)
623    }
624
625    pub fn iter(&self) -> TokenStreamIter<'_> {
626        TokenStreamIter::new(self)
627    }
628
629    /// Create a token stream containing a single token with alone spacing. The
630    /// spacing used for the final token in a constructed stream doesn't matter
631    /// because it's never used. In practice we arbitrarily use
632    /// `Spacing::Alone`.
633    pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
634        TokenStream::new(<[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_alone(kind, span)]))vec![TokenTree::token_alone(kind, span)])
635    }
636
637    pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream {
638        let tokens = node.tokens().unwrap_or_else(|| {
    ::core::panicking::panic_fmt(format_args!("missing tokens for node: {0:?}",
            node));
}panic!("missing tokens for node: {:?}", node));
639        let mut tts = ::alloc::vec::Vec::new()vec![];
640        attrs_and_tokens_to_token_trees(node.attrs(), tokens, &mut tts);
641        TokenStream::new(tts)
642    }
643
644    // If `vec` is not empty, try to glue `tt` onto its last token. The return
645    // value indicates if gluing took place.
646    fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
647        if let Some(TokenTree::Token(last_tok, Spacing::Joint | Spacing::JointHidden)) = vec.last()
648            && let TokenTree::Token(tok, spacing) = tt
649            && let Some(glued_tok) = last_tok.glue(tok)
650        {
651            // ...then overwrite the last token tree in `vec` with the
652            // glued token, and skip the first token tree from `stream`.
653            *vec.last_mut().unwrap() = TokenTree::Token(glued_tok, *spacing);
654            true
655        } else {
656            false
657        }
658    }
659
660    /// Push `tt` onto the end of the stream, possibly gluing it to the last
661    /// token. Uses `make_mut` to maximize efficiency.
662    pub fn push_tree(&mut self, tt: TokenTree) {
663        let vec_mut = Arc::make_mut(&mut self.0);
664
665        if Self::try_glue_to_last(vec_mut, &tt) {
666            // nothing else to do
667        } else {
668            vec_mut.push(tt);
669        }
670    }
671
672    /// Push `stream` onto the end of the stream, possibly gluing the first
673    /// token tree to the last token. (No other token trees will be glued.)
674    /// Uses `make_mut` to maximize efficiency.
675    pub fn push_stream(&mut self, stream: TokenStream) {
676        let vec_mut = Arc::make_mut(&mut self.0);
677
678        let stream_iter = stream.0.iter().cloned();
679
680        if let Some(first) = stream.0.first()
681            && Self::try_glue_to_last(vec_mut, first)
682        {
683            // Now skip the first token tree from `stream`.
684            vec_mut.extend(stream_iter.skip(1));
685        } else {
686            // Append all of `stream`.
687            vec_mut.extend(stream_iter);
688        }
689    }
690
691    pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree> {
692        self.0.chunks(chunk_size)
693    }
694
695    /// Desugar doc comments like `/// foo` in the stream into `#[doc =
696    /// r"foo"]`. Modifies the `TokenStream` via `Arc::make_mut`, but as little
697    /// as possible.
698    pub fn desugar_doc_comments(&mut self) {
699        if let Some(desugared_stream) = desugar_inner(self.clone()) {
700            *self = desugared_stream;
701        }
702
703        // The return value is `None` if nothing in `stream` changed.
704        fn desugar_inner(mut stream: TokenStream) -> Option<TokenStream> {
705            let mut i = 0;
706            let mut modified = false;
707            while let Some(tt) = stream.0.get(i) {
708                match tt {
709                    &TokenTree::Token(
710                        Token { kind: token::DocComment(_, attr_style, data), span },
711                        _spacing,
712                    ) => {
713                        let desugared = desugared_tts(attr_style, data, span);
714                        let desugared_len = desugared.len();
715                        Arc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
716                        modified = true;
717                        i += desugared_len;
718                    }
719
720                    &TokenTree::Token(..) => i += 1,
721
722                    &TokenTree::Delimited(sp, spacing, delim, ref delim_stream) => {
723                        if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
724                            let new_tt =
725                                TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
726                            Arc::make_mut(&mut stream.0)[i] = new_tt;
727                            modified = true;
728                        }
729                        i += 1;
730                    }
731                }
732            }
733            if modified { Some(stream) } else { None }
734        }
735
736        fn desugared_tts(attr_style: AttrStyle, data: Symbol, span: Span) -> Vec<TokenTree> {
737            // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
738            // required to wrap the text. E.g.
739            // - `abc d` is wrapped as `r"abc d"` (num_of_hashes = 0)
740            // - `abc "d"` is wrapped as `r#"abc "d""#` (num_of_hashes = 1)
741            // - `abc "##d##"` is wrapped as `r###"abc ##"d"##"###` (num_of_hashes = 3)
742            let mut num_of_hashes = 0;
743            let mut count = 0;
744            for ch in data.as_str().chars() {
745                count = match ch {
746                    '"' => 1,
747                    '#' if count > 0 => count + 1,
748                    _ => 0,
749                };
750                num_of_hashes = cmp::max(num_of_hashes, count);
751            }
752
753            // `/// foo` becomes `[doc = r"foo"]`.
754            let delim_span = DelimSpan::from_single(span);
755            let body = TokenTree::Delimited(
756                delim_span,
757                DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
758                Delimiter::Bracket,
759                [
760                    TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span),
761                    TokenTree::token_alone(token::Eq, span),
762                    TokenTree::token_alone(
763                        TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
764                        span,
765                    ),
766                ]
767                .into_iter()
768                .collect::<TokenStream>(),
769            );
770
771            if attr_style == AttrStyle::Inner {
772                <[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_joint(token::Pound,
                    span), TokenTree::token_joint_hidden(token::Bang, span),
                body]))vec![
773                    TokenTree::token_joint(token::Pound, span),
774                    TokenTree::token_joint_hidden(token::Bang, span),
775                    body,
776                ]
777            } else {
778                <[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_joint_hidden(token::Pound,
                    span), body]))vec![TokenTree::token_joint_hidden(token::Pound, span), body]
779            }
780        }
781    }
782
783    /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
784    /// separating the two arguments with a comma for diagnostic suggestions.
785    pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
786        // Used to suggest if a user writes `foo!(a b);`
787        let mut suggestion = None;
788        let mut iter = self.0.iter().enumerate().peekable();
789        while let Some((pos, ts)) = iter.next() {
790            if let Some((_, next)) = iter.peek() {
791                let sp = match (&ts, &next) {
792                    (_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
793                    (
794                        TokenTree::Token(token_left, Spacing::Alone),
795                        TokenTree::Token(token_right, _),
796                    ) if (token_left.is_non_reserved_ident() || token_left.is_lit())
797                        && (token_right.is_non_reserved_ident() || token_right.is_lit()) =>
798                    {
799                        token_left.span
800                    }
801                    (TokenTree::Delimited(sp, ..), _) => sp.entire(),
802                    _ => continue,
803                };
804                let sp = sp.shrink_to_hi();
805                let comma = TokenTree::token_alone(token::Comma, sp);
806                suggestion = Some((pos, comma, sp));
807            }
808        }
809        if let Some((pos, comma, sp)) = suggestion {
810            let mut new_stream = Vec::with_capacity(self.0.len() + 1);
811            let parts = self.0.split_at(pos + 1);
812            new_stream.extend_from_slice(parts.0);
813            new_stream.push(comma);
814            new_stream.extend_from_slice(parts.1);
815            return Some((TokenStream::new(new_stream), sp));
816        }
817        None
818    }
819}
820
821impl FromIterator<TokenTree> for TokenStream {
822    fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
823        TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
824    }
825}
826
827impl<CTX> HashStable<CTX> for TokenStream
828where
829    CTX: crate::HashStableContext,
830{
831    fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
832        for sub_tt in self.iter() {
833            sub_tt.hash_stable(hcx, hasher);
834        }
835    }
836}
837
838#[derive(#[automatically_derived]
impl<'t> ::core::clone::Clone for TokenStreamIter<'t> {
    #[inline]
    fn clone(&self) -> TokenStreamIter<'t> {
        TokenStreamIter {
            stream: ::core::clone::Clone::clone(&self.stream),
            index: ::core::clone::Clone::clone(&self.index),
        }
    }
}Clone)]
839pub struct TokenStreamIter<'t> {
840    stream: &'t TokenStream,
841    index: usize,
842}
843
844impl<'t> TokenStreamIter<'t> {
845    fn new(stream: &'t TokenStream) -> Self {
846        TokenStreamIter { stream, index: 0 }
847    }
848
849    // Peeking could be done via `Peekable`, but most iterators need peeking,
850    // and this is simple and avoids the need to use `peekable` and `Peekable`
851    // at all the use sites.
852    pub fn peek(&self) -> Option<&'t TokenTree> {
853        self.stream.0.get(self.index)
854    }
855}
856
857impl<'t> Iterator for TokenStreamIter<'t> {
858    type Item = &'t TokenTree;
859
860    fn next(&mut self) -> Option<&'t TokenTree> {
861        self.stream.0.get(self.index).map(|tree| {
862            self.index += 1;
863            tree
864        })
865    }
866}
867
868#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenTreeCursor {
    #[inline]
    fn clone(&self) -> TokenTreeCursor {
        TokenTreeCursor {
            stream: ::core::clone::Clone::clone(&self.stream),
            index: ::core::clone::Clone::clone(&self.index),
        }
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenTreeCursor {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f,
            "TokenTreeCursor", "stream", &self.stream, "index", &&self.index)
    }
}Debug)]
869pub struct TokenTreeCursor {
870    stream: TokenStream,
871    /// Points to the current token tree in the stream. In `TokenCursor::curr`,
872    /// this can be any token tree. In `TokenCursor::stack`, this is always a
873    /// `TokenTree::Delimited`.
874    index: usize,
875}
876
877impl TokenTreeCursor {
878    #[inline]
879    pub fn new(stream: TokenStream) -> Self {
880        TokenTreeCursor { stream, index: 0 }
881    }
882
883    #[inline]
884    pub fn curr(&self) -> Option<&TokenTree> {
885        self.stream.get(self.index)
886    }
887
888    pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
889        self.stream.get(self.index + n)
890    }
891
892    #[inline]
893    pub fn bump(&mut self) {
894        self.index += 1;
895    }
896
897    // For skipping ahead in rare circumstances.
898    #[inline]
899    pub fn bump_to_end(&mut self) {
900        self.index = self.stream.len();
901    }
902}
903
904/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that
905/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
906/// use this type to emit them as a linear sequence. But a linear sequence is
907/// what the parser expects, for the most part.
908#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenCursor {
    #[inline]
    fn clone(&self) -> TokenCursor {
        TokenCursor {
            curr: ::core::clone::Clone::clone(&self.curr),
            stack: ::core::clone::Clone::clone(&self.stack),
        }
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenCursor {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "TokenCursor",
            "curr", &self.curr, "stack", &&self.stack)
    }
}Debug)]
909pub struct TokenCursor {
910    // Cursor for the current (innermost) token stream. The index within the
911    // cursor can point to any token tree in the stream (or one past the end).
912    // The delimiters for this token stream are found in `self.stack.last()`;
913    // if that is `None` we are in the outermost token stream which never has
914    // delimiters.
915    pub curr: TokenTreeCursor,
916
917    // Token streams surrounding the current one. The index within each cursor
918    // always points to a `TokenTree::Delimited`.
919    pub stack: Vec<TokenTreeCursor>,
920}
921
922impl TokenCursor {
923    pub fn next(&mut self) -> (Token, Spacing) {
924        self.inlined_next()
925    }
926
927    /// This always-inlined version should only be used on hot code paths.
928    #[inline(always)]
929    pub fn inlined_next(&mut self) -> (Token, Spacing) {
930        loop {
931            // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
932            // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
933            // below can be removed.
934            if let Some(tree) = self.curr.curr() {
935                match tree {
936                    &TokenTree::Token(token, spacing) => {
937                        if true {
    if !!token.kind.is_delim() {
        ::core::panicking::panic("assertion failed: !token.kind.is_delim()")
    };
};debug_assert!(!token.kind.is_delim());
938                        let res = (token, spacing);
939                        self.curr.bump();
940                        return res;
941                    }
942                    &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
943                        let trees = TokenTreeCursor::new(tts.clone());
944                        self.stack.push(mem::replace(&mut self.curr, trees));
945                        if !delim.skip() {
946                            return (Token::new(delim.as_open_token_kind(), sp.open), spacing.open);
947                        }
948                        // No open delimiter to return; continue on to the next iteration.
949                    }
950                };
951            } else if let Some(parent) = self.stack.pop() {
952                // We have exhausted this token stream. Move back to its parent token stream.
953                let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
954                    { ::core::panicking::panic_fmt(format_args!("parent should be Delimited")); }panic!("parent should be Delimited")
955                };
956                self.curr = parent;
957                self.curr.bump(); // move past the `Delimited`
958                if !delim.skip() {
959                    return (Token::new(delim.as_close_token_kind(), span.close), spacing.close);
960                }
961                // No close delimiter to return; continue on to the next iteration.
962            } else {
963                // We have exhausted the outermost token stream. The use of
964                // `Spacing::Alone` is arbitrary and immaterial, because the
965                // `Eof` token's spacing is never used.
966                return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
967            }
968        }
969    }
970}
971
972#[derive(#[automatically_derived]
impl ::core::fmt::Debug for DelimSpan {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "DelimSpan",
            "open", &self.open, "close", &&self.close)
    }
}Debug, #[automatically_derived]
impl ::core::marker::Copy for DelimSpan { }Copy, #[automatically_derived]
impl ::core::clone::Clone for DelimSpan {
    #[inline]
    fn clone(&self) -> DelimSpan {
        let _: ::core::clone::AssertParamIsClone<Span>;
        *self
    }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for DelimSpan {
    #[inline]
    fn eq(&self, other: &DelimSpan) -> bool {
        self.open == other.open && self.close == other.close
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DelimSpan {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) {
        let _: ::core::cmp::AssertParamIsEq<Span>;
    }
}Eq, #[automatically_derived]
impl ::core::hash::Hash for DelimSpan {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
        ::core::hash::Hash::hash(&self.open, state);
        ::core::hash::Hash::hash(&self.close, state)
    }
}Hash)]
973#[derive(const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for DelimSpan {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    DelimSpan { open: ref __binding_0, close: ref __binding_1 }
                        => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for DelimSpan {
            fn decode(__decoder: &mut __D) -> Self {
                DelimSpan {
                    open: ::rustc_serialize::Decodable::decode(__decoder),
                    close: ::rustc_serialize::Decodable::decode(__decoder),
                }
            }
        }
    };Decodable, const _: () =
    {
        impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
            for DelimSpan where __CTX: crate::HashStableContext {
            #[inline]
            fn hash_stable(&self, __hcx: &mut __CTX,
                __hasher:
                    &mut ::rustc_data_structures::stable_hasher::StableHasher) {
                match *self {
                    DelimSpan { open: ref __binding_0, close: ref __binding_1 }
                        => {
                        { __binding_0.hash_stable(__hcx, __hasher); }
                        { __binding_1.hash_stable(__hcx, __hasher); }
                    }
                }
            }
        }
    };HashStable_Generic, const _: () =
    {
        impl<'__ast, __V> crate::visit::Walkable<'__ast, __V> for DelimSpan
            where __V: crate::visit::Visitor<'__ast> {
            fn walk_ref(&'__ast self, __visitor: &mut __V) -> __V::Result {
                match *self {
                    DelimSpan { open: ref __binding_0, close: ref __binding_1 }
                        => {
                        {
                            match ::rustc_ast_ir::visit::VisitorResult::branch(crate::visit::Visitable::visit(__binding_0,
                                        __visitor, ())) {
                                core::ops::ControlFlow::Continue(()) =>
                                    (),
                                    #[allow(unreachable_code)]
                                    core::ops::ControlFlow::Break(r) => {
                                    return ::rustc_ast_ir::visit::VisitorResult::from_residual(r);
                                }
                            }
                        }
                        {
                            match ::rustc_ast_ir::visit::VisitorResult::branch(crate::visit::Visitable::visit(__binding_1,
                                        __visitor, ())) {
                                core::ops::ControlFlow::Continue(()) =>
                                    (),
                                    #[allow(unreachable_code)]
                                    core::ops::ControlFlow::Break(r) => {
                                    return ::rustc_ast_ir::visit::VisitorResult::from_residual(r);
                                }
                            }
                        }
                    }
                }
                <__V::Result as rustc_ast_ir::visit::VisitorResult>::output()
            }
        }
        impl<__V> crate::mut_visit::MutWalkable<__V> for DelimSpan where
            __V: crate::mut_visit::MutVisitor {
            fn walk_mut(&mut self, __visitor: &mut __V) {
                match *self {
                    DelimSpan {
                        open: ref mut __binding_0, close: ref mut __binding_1 } => {
                        {
                            crate::mut_visit::MutVisitable::visit_mut(__binding_0,
                                __visitor, ())
                        }
                        {
                            crate::mut_visit::MutVisitable::visit_mut(__binding_1,
                                __visitor, ())
                        }
                    }
                }
            }
        }
    };Walkable)]
974pub struct DelimSpan {
975    pub open: Span,
976    pub close: Span,
977}
978
979impl DelimSpan {
980    pub fn from_single(sp: Span) -> Self {
981        DelimSpan { open: sp, close: sp }
982    }
983
984    pub fn from_pair(open: Span, close: Span) -> Self {
985        DelimSpan { open, close }
986    }
987
988    pub fn dummy() -> Self {
989        Self::from_single(DUMMY_SP)
990    }
991
992    pub fn entire(self) -> Span {
993        self.open.with_hi(self.close.hi())
994    }
995}
996
997#[derive(#[automatically_derived]
impl ::core::marker::Copy for DelimSpacing { }Copy, #[automatically_derived]
impl ::core::clone::Clone for DelimSpacing {
    #[inline]
    fn clone(&self) -> DelimSpacing {
        let _: ::core::clone::AssertParamIsClone<Spacing>;
        *self
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for DelimSpacing {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "DelimSpacing",
            "open", &self.open, "close", &&self.close)
    }
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for DelimSpacing {
    #[inline]
    fn eq(&self, other: &DelimSpacing) -> bool {
        self.open == other.open && self.close == other.close
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DelimSpacing {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) {
        let _: ::core::cmp::AssertParamIsEq<Spacing>;
    }
}Eq, #[automatically_derived]
impl ::core::hash::Hash for DelimSpacing {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
        ::core::hash::Hash::hash(&self.open, state);
        ::core::hash::Hash::hash(&self.close, state)
    }
}Hash, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for DelimSpacing {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    DelimSpacing { open: ref __binding_0, close: ref __binding_1
                        } => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for DelimSpacing {
            fn decode(__decoder: &mut __D) -> Self {
                DelimSpacing {
                    open: ::rustc_serialize::Decodable::decode(__decoder),
                    close: ::rustc_serialize::Decodable::decode(__decoder),
                }
            }
        }
    };Decodable, const _: () =
    {
        impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
            for DelimSpacing where __CTX: crate::HashStableContext {
            #[inline]
            fn hash_stable(&self, __hcx: &mut __CTX,
                __hasher:
                    &mut ::rustc_data_structures::stable_hasher::StableHasher) {
                match *self {
                    DelimSpacing { open: ref __binding_0, close: ref __binding_1
                        } => {
                        { __binding_0.hash_stable(__hcx, __hasher); }
                        { __binding_1.hash_stable(__hcx, __hasher); }
                    }
                }
            }
        }
    };HashStable_Generic)]
998pub struct DelimSpacing {
999    pub open: Spacing,
1000    pub close: Spacing,
1001}
1002
1003impl DelimSpacing {
1004    pub fn new(open: Spacing, close: Spacing) -> DelimSpacing {
1005        DelimSpacing { open, close }
1006    }
1007}
1008
1009// Some types are used a lot. Make sure they don't unintentionally get bigger.
1010#[cfg(target_pointer_width = "64")]
1011mod size_asserts {
1012    use rustc_data_structures::static_assert_size;
1013
1014    use super::*;
1015    // tidy-alphabetical-start
1016    const _: [(); 8] = [(); ::std::mem::size_of::<AttrTokenStream>()];static_assert_size!(AttrTokenStream, 8);
1017    const _: [(); 32] = [(); ::std::mem::size_of::<AttrTokenTree>()];static_assert_size!(AttrTokenTree, 32);
1018    const _: [(); 8] = [(); ::std::mem::size_of::<LazyAttrTokenStream>()];static_assert_size!(LazyAttrTokenStream, 8);
1019    const _: [(); 88] = [(); ::std::mem::size_of::<LazyAttrTokenStreamInner>()];static_assert_size!(LazyAttrTokenStreamInner, 88);
1020    const _: [(); 8] = [(); ::std::mem::size_of::<Option<LazyAttrTokenStream>>()];static_assert_size!(Option<LazyAttrTokenStream>, 8); // must be small, used in many AST nodes
1021    const _: [(); 8] = [(); ::std::mem::size_of::<TokenStream>()];static_assert_size!(TokenStream, 8);
1022    const _: [(); 32] = [(); ::std::mem::size_of::<TokenTree>()];static_assert_size!(TokenTree, 32);
1023    // tidy-alphabetical-end
1024}