rustc_ast/
tokenstream.rs

1//! # Token Streams
2//!
3//! `TokenStream`s represent syntactic objects before they are converted into ASTs.
4//! A `TokenStream` is, roughly speaking, a sequence of [`TokenTree`]s,
5//! which are themselves a single [`Token`] or a `Delimited` subsequence of tokens.
6
7use std::borrow::Cow;
8use std::hash::Hash;
9use std::ops::Range;
10use std::sync::Arc;
11use std::{cmp, fmt, iter, mem};
12
13use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
14use rustc_data_structures::sync;
15use rustc_macros::{Decodable, Encodable, HashStable_Generic, Walkable};
16use rustc_serialize::{Decodable, Encodable};
17use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
18use thin_vec::ThinVec;
19
20use crate::ast::AttrStyle;
21use crate::ast_traits::{HasAttrs, HasTokens};
22use crate::token::{self, Delimiter, Token, TokenKind};
23use crate::{AttrVec, Attribute};
24
25/// Part of a `TokenStream`.
26#[derive(#[automatically_derived]
impl ::core::fmt::Debug for TokenTree {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            TokenTree::Token(__self_0, __self_1) =>
                ::core::fmt::Formatter::debug_tuple_field2_finish(f, "Token",
                    __self_0, &__self_1),
            TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) =>
                ::core::fmt::Formatter::debug_tuple_field4_finish(f,
                    "Delimited", __self_0, __self_1, __self_2, &__self_3),
        }
    }
}Debug, #[automatically_derived]
impl ::core::clone::Clone for TokenTree {
    #[inline]
    fn clone(&self) -> TokenTree {
        match self {
            TokenTree::Token(__self_0, __self_1) =>
                TokenTree::Token(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1)),
            TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) =>
                TokenTree::Delimited(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1),
                    ::core::clone::Clone::clone(__self_2),
                    ::core::clone::Clone::clone(__self_3)),
        }
    }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for TokenTree {
    #[inline]
    fn eq(&self, other: &TokenTree) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr &&
            match (self, other) {
                (TokenTree::Token(__self_0, __self_1),
                    TokenTree::Token(__arg1_0, __arg1_1)) =>
                    __self_0 == __arg1_0 && __self_1 == __arg1_1,
                (TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3),
                    TokenTree::Delimited(__arg1_0, __arg1_1, __arg1_2,
                    __arg1_3)) =>
                    __self_0 == __arg1_0 && __self_1 == __arg1_1 &&
                            __self_2 == __arg1_2 && __self_3 == __arg1_3,
                _ => unsafe { ::core::intrinsics::unreachable() }
            }
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for TokenTree {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) -> () {
        let _: ::core::cmp::AssertParamIsEq<Token>;
        let _: ::core::cmp::AssertParamIsEq<Spacing>;
        let _: ::core::cmp::AssertParamIsEq<DelimSpan>;
        let _: ::core::cmp::AssertParamIsEq<DelimSpacing>;
        let _: ::core::cmp::AssertParamIsEq<Delimiter>;
        let _: ::core::cmp::AssertParamIsEq<TokenStream>;
    }
}Eq, #[automatically_derived]
impl ::core::hash::Hash for TokenTree {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        ::core::hash::Hash::hash(&__self_discr, state);
        match self {
            TokenTree::Token(__self_0, __self_1) => {
                ::core::hash::Hash::hash(__self_0, state);
                ::core::hash::Hash::hash(__self_1, state)
            }
            TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) => {
                ::core::hash::Hash::hash(__self_0, state);
                ::core::hash::Hash::hash(__self_1, state);
                ::core::hash::Hash::hash(__self_2, state);
                ::core::hash::Hash::hash(__self_3, state)
            }
        }
    }
}Hash, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for TokenTree {
            fn encode(&self, __encoder: &mut __E) {
                let disc =
                    match *self {
                        TokenTree::Token(ref __binding_0, ref __binding_1) => {
                            0usize
                        }
                        TokenTree::Delimited(ref __binding_0, ref __binding_1,
                            ref __binding_2, ref __binding_3) => {
                            1usize
                        }
                    };
                ::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
                match *self {
                    TokenTree::Token(ref __binding_0, ref __binding_1) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                    TokenTree::Delimited(ref __binding_0, ref __binding_1,
                        ref __binding_2, ref __binding_3) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_2,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_3,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for TokenTree {
            fn decode(__decoder: &mut __D) -> Self {
                match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
                    {
                    0usize => {
                        TokenTree::Token(::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder))
                    }
                    1usize => {
                        TokenTree::Delimited(::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder))
                    }
                    n => {
                        ::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `TokenTree`, expected 0..2, actual {0}",
                                n));
                    }
                }
            }
        }
    };Decodable, const _: () =
    {
        impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
            for TokenTree where __CTX: crate::HashStableContext {
            #[inline]
            fn hash_stable(&self, __hcx: &mut __CTX,
                __hasher:
                    &mut ::rustc_data_structures::stable_hasher::StableHasher) {
                ::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
                match *self {
                    TokenTree::Token(ref __binding_0, ref __binding_1) => {
                        { __binding_0.hash_stable(__hcx, __hasher); }
                        { __binding_1.hash_stable(__hcx, __hasher); }
                    }
                    TokenTree::Delimited(ref __binding_0, ref __binding_1,
                        ref __binding_2, ref __binding_3) => {
                        { __binding_0.hash_stable(__hcx, __hasher); }
                        { __binding_1.hash_stable(__hcx, __hasher); }
                        { __binding_2.hash_stable(__hcx, __hasher); }
                        { __binding_3.hash_stable(__hcx, __hasher); }
                    }
                }
            }
        }
    };HashStable_Generic)]
27pub enum TokenTree {
28    /// A single token. Should never be `OpenDelim` or `CloseDelim`, because
29    /// delimiters are implicitly represented by `Delimited`.
30    Token(Token, Spacing),
31    /// A delimited sequence of token trees.
32    Delimited(DelimSpan, DelimSpacing, Delimiter, TokenStream),
33}
34
35// Ensure all fields of `TokenTree` are `DynSend` and `DynSync`.
36fn _dummy()
37where
38    Token: sync::DynSend + sync::DynSync,
39    Spacing: sync::DynSend + sync::DynSync,
40    DelimSpan: sync::DynSend + sync::DynSync,
41    Delimiter: sync::DynSend + sync::DynSync,
42    TokenStream: sync::DynSend + sync::DynSync,
43{
44}
45
46impl TokenTree {
47    /// Checks if this `TokenTree` is equal to the other, regardless of span/spacing information.
48    pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
49        match (self, other) {
50            (TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
51            (TokenTree::Delimited(.., delim, tts), TokenTree::Delimited(.., delim2, tts2)) => {
52                delim == delim2 && tts.iter().eq_by(tts2.iter(), |a, b| a.eq_unspanned(b))
53            }
54            _ => false,
55        }
56    }
57
58    /// Retrieves the `TokenTree`'s span.
59    pub fn span(&self) -> Span {
60        match self {
61            TokenTree::Token(token, _) => token.span,
62            TokenTree::Delimited(sp, ..) => sp.entire(),
63        }
64    }
65
66    /// Create a `TokenTree::Token` with alone spacing.
67    pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
68        TokenTree::Token(Token::new(kind, span), Spacing::Alone)
69    }
70
71    /// Create a `TokenTree::Token` with joint spacing.
72    pub fn token_joint(kind: TokenKind, span: Span) -> TokenTree {
73        TokenTree::Token(Token::new(kind, span), Spacing::Joint)
74    }
75
76    /// Create a `TokenTree::Token` with joint-hidden spacing.
77    pub fn token_joint_hidden(kind: TokenKind, span: Span) -> TokenTree {
78        TokenTree::Token(Token::new(kind, span), Spacing::JointHidden)
79    }
80
81    pub fn uninterpolate(&self) -> Cow<'_, TokenTree> {
82        match self {
83            TokenTree::Token(token, spacing) => match token.uninterpolate() {
84                Cow::Owned(token) => Cow::Owned(TokenTree::Token(token, *spacing)),
85                Cow::Borrowed(_) => Cow::Borrowed(self),
86            },
87            _ => Cow::Borrowed(self),
88        }
89    }
90}
91
92/// A lazy version of [`AttrTokenStream`], which defers creation of an actual
93/// `AttrTokenStream` until it is needed.
94#[derive(#[automatically_derived]
impl ::core::clone::Clone for LazyAttrTokenStream {
    #[inline]
    fn clone(&self) -> LazyAttrTokenStream {
        LazyAttrTokenStream(::core::clone::Clone::clone(&self.0))
    }
}Clone)]
95pub struct LazyAttrTokenStream(Arc<LazyAttrTokenStreamInner>);
96
97impl LazyAttrTokenStream {
98    pub fn new_direct(stream: AttrTokenStream) -> LazyAttrTokenStream {
99        LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Direct(stream)))
100    }
101
102    pub fn new_pending(
103        start_token: (Token, Spacing),
104        cursor_snapshot: TokenCursor,
105        num_calls: u32,
106        break_last_token: u32,
107        node_replacements: ThinVec<NodeReplacement>,
108    ) -> LazyAttrTokenStream {
109        LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Pending {
110            start_token,
111            cursor_snapshot,
112            num_calls,
113            break_last_token,
114            node_replacements,
115        }))
116    }
117
118    pub fn to_attr_token_stream(&self) -> AttrTokenStream {
119        self.0.to_attr_token_stream()
120    }
121}
122
123impl fmt::Debug for LazyAttrTokenStream {
124    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
125        f.write_fmt(format_args!("LazyAttrTokenStream({0:?})",
        self.to_attr_token_stream()))write!(f, "LazyAttrTokenStream({:?})", self.to_attr_token_stream())
126    }
127}
128
129impl<S: SpanEncoder> Encodable<S> for LazyAttrTokenStream {
130    fn encode(&self, _s: &mut S) {
131        {
    ::core::panicking::panic_fmt(format_args!("Attempted to encode LazyAttrTokenStream"));
};panic!("Attempted to encode LazyAttrTokenStream");
132    }
133}
134
135impl<D: SpanDecoder> Decodable<D> for LazyAttrTokenStream {
136    fn decode(_d: &mut D) -> Self {
137        {
    ::core::panicking::panic_fmt(format_args!("Attempted to decode LazyAttrTokenStream"));
};panic!("Attempted to decode LazyAttrTokenStream");
138    }
139}
140
141impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
142    fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
143        {
    ::core::panicking::panic_fmt(format_args!("Attempted to compute stable hash for LazyAttrTokenStream"));
};panic!("Attempted to compute stable hash for LazyAttrTokenStream");
144    }
145}
146
147/// A token range within a `Parser`'s full token stream.
148#[derive(#[automatically_derived]
impl ::core::clone::Clone for ParserRange {
    #[inline]
    fn clone(&self) -> ParserRange {
        ParserRange(::core::clone::Clone::clone(&self.0))
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ParserRange {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f, "ParserRange",
            &&self.0)
    }
}Debug)]
149pub struct ParserRange(pub Range<u32>);
150
151/// A token range within an individual AST node's (lazy) token stream, i.e.
152/// relative to that node's first token. Distinct from `ParserRange` so the two
153/// kinds of range can't be mixed up.
154#[derive(#[automatically_derived]
impl ::core::clone::Clone for NodeRange {
    #[inline]
    fn clone(&self) -> NodeRange {
        NodeRange(::core::clone::Clone::clone(&self.0))
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for NodeRange {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f, "NodeRange",
            &&self.0)
    }
}Debug)]
155pub struct NodeRange(pub Range<u32>);
156
157/// Indicates a range of tokens that should be replaced by an `AttrsTarget`
158/// (replacement) or be replaced by nothing (deletion). This is used in two
159/// places during token collection.
160///
161/// 1. Replacement. During the parsing of an AST node that may have a
162///    `#[derive]` attribute, when we parse a nested AST node that has `#[cfg]`
163///    or `#[cfg_attr]`, we replace the entire inner AST node with
164///    `FlatToken::AttrsTarget`. This lets us perform eager cfg-expansion on an
165///    `AttrTokenStream`.
166///
167/// 2. Deletion. We delete inner attributes from all collected token streams,
168///    and instead track them through the `attrs` field on the AST node. This
169///    lets us manipulate them similarly to outer attributes. When we create a
170///    `TokenStream`, the inner attributes are inserted into the proper place
171///    in the token stream.
172///
173/// Each replacement starts off in `ParserReplacement` form but is converted to
174/// `NodeReplacement` form when it is attached to a single AST node, via
175/// `LazyAttrTokenStreamImpl`.
176pub type ParserReplacement = (ParserRange, Option<AttrsTarget>);
177
178/// See the comment on `ParserReplacement`.
179pub type NodeReplacement = (NodeRange, Option<AttrsTarget>);
180
181impl NodeRange {
182    // Converts a range within a parser's tokens to a range within a
183    // node's tokens beginning at `start_pos`.
184    //
185    // For example, imagine a parser with 50 tokens in its token stream, a
186    // function that spans `ParserRange(20..40)` and an inner attribute within
187    // that function that spans `ParserRange(30..35)`. We would find the inner
188    // attribute's range within the function's tokens by subtracting 20, which
189    // is the position of the function's start token. This gives
190    // `NodeRange(10..15)`.
191    pub fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
192        if !!parser_range.is_empty() {
    ::core::panicking::panic("assertion failed: !parser_range.is_empty()")
};assert!(!parser_range.is_empty());
193        if !(parser_range.start >= start_pos) {
    ::core::panicking::panic("assertion failed: parser_range.start >= start_pos")
};assert!(parser_range.start >= start_pos);
194        NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
195    }
196}
197
198enum LazyAttrTokenStreamInner {
199    // The token stream has already been produced.
200    Direct(AttrTokenStream),
201
202    // From a value of this type we can reconstruct the `TokenStream` seen by
203    // the `f` callback passed to a call to `Parser::collect_tokens`, by
204    // replaying the getting of the tokens. This saves us producing a
205    // `TokenStream` if it is never needed, e.g. a captured `macro_rules!`
206    // argument that is never passed to a proc macro. In practice, token stream
207    // creation happens rarely compared to calls to `collect_tokens` (see some
208    // statistics in #78736) so we are doing as little up-front work as
209    // possible.
210    //
211    // This also makes `Parser` very cheap to clone, since there is no
212    // intermediate collection buffer to clone.
213    Pending {
214        start_token: (Token, Spacing),
215        cursor_snapshot: TokenCursor,
216        num_calls: u32,
217        break_last_token: u32,
218        node_replacements: ThinVec<NodeReplacement>,
219    },
220}
221
222impl LazyAttrTokenStreamInner {
223    fn to_attr_token_stream(&self) -> AttrTokenStream {
224        match self {
225            LazyAttrTokenStreamInner::Direct(stream) => stream.clone(),
226            LazyAttrTokenStreamInner::Pending {
227                start_token,
228                cursor_snapshot,
229                num_calls,
230                break_last_token,
231                node_replacements,
232            } => {
233                // The token produced by the final call to `{,inlined_}next` was not
234                // actually consumed by the callback. The combination of chaining the
235                // initial token and using `take` produces the desired result - we
236                // produce an empty `TokenStream` if no calls were made, and omit the
237                // final token otherwise.
238                let mut cursor_snapshot = cursor_snapshot.clone();
239                let tokens = iter::once(FlatToken::Token(*start_token))
240                    .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
241                    .take(*num_calls as usize);
242
243                if node_replacements.is_empty() {
244                    make_attr_token_stream(tokens, *break_last_token)
245                } else {
246                    let mut tokens: Vec<_> = tokens.collect();
247                    let mut node_replacements = node_replacements.to_vec();
248                    node_replacements.sort_by_key(|(range, _)| range.0.start);
249
250                    #[cfg(debug_assertions)]
251                    for [(node_range, tokens), (next_node_range, next_tokens)] in
252                        node_replacements.array_windows()
253                    {
254                        if !(node_range.0.end <= next_node_range.0.start ||
            node_range.0.end >= next_node_range.0.end) {
    {
        ::core::panicking::panic_fmt(format_args!("Node ranges should be disjoint or nested: ({0:?}, {1:?}) ({2:?}, {3:?})",
                node_range, tokens, next_node_range, next_tokens));
    }
};assert!(
255                            node_range.0.end <= next_node_range.0.start
256                                || node_range.0.end >= next_node_range.0.end,
257                            "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
258                            node_range,
259                            tokens,
260                            next_node_range,
261                            next_tokens,
262                        );
263                    }
264
265                    // Process the replace ranges, starting from the highest start
266                    // position and working our way back. If have tokens like:
267                    //
268                    // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
269                    //
270                    // Then we will generate replace ranges for both
271                    // the `#[cfg(FALSE)] field: bool` and the entire
272                    // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
273                    //
274                    // By starting processing from the replace range with the greatest
275                    // start position, we ensure that any (outer) replace range which
276                    // encloses another (inner) replace range will fully overwrite the
277                    // inner range's replacement.
278                    for (node_range, target) in node_replacements.into_iter().rev() {
279                        if !!node_range.0.is_empty() {
    {
        ::core::panicking::panic_fmt(format_args!("Cannot replace an empty node range: {0:?}",
                node_range.0));
    }
};assert!(
280                            !node_range.0.is_empty(),
281                            "Cannot replace an empty node range: {:?}",
282                            node_range.0
283                        );
284
285                        // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s,
286                        // plus enough `FlatToken::Empty`s to fill up the rest of the range. This
287                        // keeps the total length of `tokens` constant throughout the replacement
288                        // process, allowing us to do all replacements without adjusting indices.
289                        let target_len = target.is_some() as usize;
290                        tokens.splice(
291                            (node_range.0.start as usize)..(node_range.0.end as usize),
292                            target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
293                                iter::repeat(FlatToken::Empty)
294                                    .take(node_range.0.len() - target_len),
295                            ),
296                        );
297                    }
298                    make_attr_token_stream(tokens.into_iter(), *break_last_token)
299                }
300            }
301        }
302    }
303}
304
305/// A helper struct used when building an `AttrTokenStream` from
306/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
307/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
308/// is then 'parsed' to build up an `AttrTokenStream` with nested
309/// `AttrTokenTree::Delimited` tokens.
310#[derive(#[automatically_derived]
impl ::core::fmt::Debug for FlatToken {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            FlatToken::Token(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Token",
                    &__self_0),
            FlatToken::AttrsTarget(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "AttrsTarget", &__self_0),
            FlatToken::Empty => ::core::fmt::Formatter::write_str(f, "Empty"),
        }
    }
}Debug, #[automatically_derived]
impl ::core::clone::Clone for FlatToken {
    #[inline]
    fn clone(&self) -> FlatToken {
        match self {
            FlatToken::Token(__self_0) =>
                FlatToken::Token(::core::clone::Clone::clone(__self_0)),
            FlatToken::AttrsTarget(__self_0) =>
                FlatToken::AttrsTarget(::core::clone::Clone::clone(__self_0)),
            FlatToken::Empty => FlatToken::Empty,
        }
    }
}Clone)]
311enum FlatToken {
312    /// A token - this holds both delimiter (e.g. '{' and '}')
313    /// and non-delimiter tokens
314    Token((Token, Spacing)),
315    /// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
316    /// directly into the constructed `AttrTokenStream` as an
317    /// `AttrTokenTree::AttrsTarget`.
318    AttrsTarget(AttrsTarget),
319    /// A special 'empty' token that is ignored during the conversion
320    /// to an `AttrTokenStream`. This is used to simplify the
321    /// handling of replace ranges.
322    Empty,
323}
324
325/// An `AttrTokenStream` is similar to a `TokenStream`, but with extra
326/// information about the tokens for attribute targets. This is used
327/// during expansion to perform early cfg-expansion, and to process attributes
328/// during proc-macro invocations.
329#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrTokenStream {
    #[inline]
    fn clone(&self) -> AttrTokenStream {
        AttrTokenStream(::core::clone::Clone::clone(&self.0))
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrTokenStream {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f,
            "AttrTokenStream", &&self.0)
    }
}Debug, #[automatically_derived]
impl ::core::default::Default for AttrTokenStream {
    #[inline]
    fn default() -> AttrTokenStream {
        AttrTokenStream(::core::default::Default::default())
    }
}Default, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for AttrTokenStream {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    AttrTokenStream(ref __binding_0) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for AttrTokenStream {
            fn decode(__decoder: &mut __D) -> Self {
                AttrTokenStream(::rustc_serialize::Decodable::decode(__decoder))
            }
        }
    };Decodable)]
330pub struct AttrTokenStream(pub Arc<Vec<AttrTokenTree>>);
331
332/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an
333/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and
334/// close delims.
335fn make_attr_token_stream(
336    iter: impl Iterator<Item = FlatToken>,
337    break_last_token: u32,
338) -> AttrTokenStream {
339    #[derive(#[automatically_derived]
impl ::core::fmt::Debug for FrameData {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "FrameData",
            "open_delim_sp", &self.open_delim_sp, "inner", &&self.inner)
    }
}Debug)]
340    struct FrameData {
341        // This is `None` for the first frame, `Some` for all others.
342        open_delim_sp: Option<(Delimiter, Span, Spacing)>,
343        inner: Vec<AttrTokenTree>,
344    }
345    // The stack always has at least one element. Storing it separately makes for shorter code.
346    let mut stack_top = FrameData { open_delim_sp: None, inner: ::alloc::vec::Vec::new()vec![] };
347    let mut stack_rest = ::alloc::vec::Vec::new()vec![];
348    for flat_token in iter {
349        match flat_token {
350            FlatToken::Token((token @ Token { kind, span }, spacing)) => {
351                if let Some(delim) = kind.open_delim() {
352                    stack_rest.push(mem::replace(
353                        &mut stack_top,
354                        FrameData { open_delim_sp: Some((delim, span, spacing)), inner: ::alloc::vec::Vec::new()vec![] },
355                    ));
356                } else if let Some(delim) = kind.close_delim() {
357                    let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap());
358                    let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
359                    if !open_delim.eq_ignoring_invisible_origin(&delim) {
    {
        ::core::panicking::panic_fmt(format_args!("Mismatched open/close delims: open={0:?} close={1:?}",
                open_delim, span));
    }
};assert!(
360                        open_delim.eq_ignoring_invisible_origin(&delim),
361                        "Mismatched open/close delims: open={open_delim:?} close={span:?}"
362                    );
363                    let dspan = DelimSpan::from_pair(open_sp, span);
364                    let dspacing = DelimSpacing::new(open_spacing, spacing);
365                    let stream = AttrTokenStream::new(frame_data.inner);
366                    let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
367                    stack_top.inner.push(delimited);
368                } else {
369                    stack_top.inner.push(AttrTokenTree::Token(token, spacing))
370                }
371            }
372            FlatToken::AttrsTarget(target) => {
373                stack_top.inner.push(AttrTokenTree::AttrsTarget(target))
374            }
375            FlatToken::Empty => {}
376        }
377    }
378
379    if break_last_token > 0 {
380        let last_token = stack_top.inner.pop().unwrap();
381        if let AttrTokenTree::Token(last_token, spacing) = last_token {
382            let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap();
383
384            // Tokens are always ASCII chars, so we can use byte arithmetic here.
385            let mut first_span = last_token.span.shrink_to_lo();
386            first_span =
387                first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token));
388
389            stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing));
390        } else {
391            {
    ::core::panicking::panic_fmt(format_args!("Unexpected last token {0:?}",
            last_token));
}panic!("Unexpected last token {last_token:?}")
392        }
393    }
394    AttrTokenStream::new(stack_top.inner)
395}
396
397/// Like `TokenTree`, but for `AttrTokenStream`.
398#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrTokenTree {
    #[inline]
    fn clone(&self) -> AttrTokenTree {
        match self {
            AttrTokenTree::Token(__self_0, __self_1) =>
                AttrTokenTree::Token(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1)),
            AttrTokenTree::Delimited(__self_0, __self_1, __self_2, __self_3)
                =>
                AttrTokenTree::Delimited(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1),
                    ::core::clone::Clone::clone(__self_2),
                    ::core::clone::Clone::clone(__self_3)),
            AttrTokenTree::AttrsTarget(__self_0) =>
                AttrTokenTree::AttrsTarget(::core::clone::Clone::clone(__self_0)),
        }
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrTokenTree {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            AttrTokenTree::Token(__self_0, __self_1) =>
                ::core::fmt::Formatter::debug_tuple_field2_finish(f, "Token",
                    __self_0, &__self_1),
            AttrTokenTree::Delimited(__self_0, __self_1, __self_2, __self_3)
                =>
                ::core::fmt::Formatter::debug_tuple_field4_finish(f,
                    "Delimited", __self_0, __self_1, __self_2, &__self_3),
            AttrTokenTree::AttrsTarget(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "AttrsTarget", &__self_0),
        }
    }
}Debug, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for AttrTokenTree {
            fn encode(&self, __encoder: &mut __E) {
                let disc =
                    match *self {
                        AttrTokenTree::Token(ref __binding_0, ref __binding_1) => {
                            0usize
                        }
                        AttrTokenTree::Delimited(ref __binding_0, ref __binding_1,
                            ref __binding_2, ref __binding_3) => {
                            1usize
                        }
                        AttrTokenTree::AttrsTarget(ref __binding_0) => { 2usize }
                    };
                ::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
                match *self {
                    AttrTokenTree::Token(ref __binding_0, ref __binding_1) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                    AttrTokenTree::Delimited(ref __binding_0, ref __binding_1,
                        ref __binding_2, ref __binding_3) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_2,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_3,
                            __encoder);
                    }
                    AttrTokenTree::AttrsTarget(ref __binding_0) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for AttrTokenTree {
            fn decode(__decoder: &mut __D) -> Self {
                match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
                    {
                    0usize => {
                        AttrTokenTree::Token(::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder))
                    }
                    1usize => {
                        AttrTokenTree::Delimited(::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder),
                            ::rustc_serialize::Decodable::decode(__decoder))
                    }
                    2usize => {
                        AttrTokenTree::AttrsTarget(::rustc_serialize::Decodable::decode(__decoder))
                    }
                    n => {
                        ::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `AttrTokenTree`, expected 0..3, actual {0}",
                                n));
                    }
                }
            }
        }
    };Decodable)]
399pub enum AttrTokenTree {
400    Token(Token, Spacing),
401    Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
402    /// Stores the attributes for an attribute target,
403    /// along with the tokens for that attribute target.
404    /// See `AttrsTarget` for more information
405    AttrsTarget(AttrsTarget),
406}
407
408impl AttrTokenStream {
409    pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
410        AttrTokenStream(Arc::new(tokens))
411    }
412
413    /// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`. During
414    /// conversion, any `AttrTokenTree::AttrsTarget` gets "flattened" back to a
415    /// `TokenStream`, as described in the comment on
416    /// `attrs_and_tokens_to_token_trees`.
417    pub fn to_token_trees(&self) -> Vec<TokenTree> {
418        let mut res = Vec::with_capacity(self.0.len());
419        for tree in self.0.iter() {
420            match tree {
421                AttrTokenTree::Token(inner, spacing) => {
422                    res.push(TokenTree::Token(inner.clone(), *spacing));
423                }
424                AttrTokenTree::Delimited(span, spacing, delim, stream) => {
425                    res.push(TokenTree::Delimited(
426                        *span,
427                        *spacing,
428                        *delim,
429                        TokenStream::new(stream.to_token_trees()),
430                    ))
431                }
432                AttrTokenTree::AttrsTarget(target) => {
433                    attrs_and_tokens_to_token_trees(&target.attrs, &target.tokens, &mut res);
434                }
435            }
436        }
437        res
438    }
439}
440
441// Converts multiple attributes and the tokens for a target AST node into token trees, and appends
442// them to `res`.
443//
444// Example: if the AST node is "fn f() { blah(); }", then:
445// - Simple if no attributes are present, e.g. "fn f() { blah(); }"
446// - Simple if only outer attribute are present, e.g. "#[outer1] #[outer2] fn f() { blah(); }"
447// - Trickier if inner attributes are present, because they must be moved within the AST node's
448//   tokens, e.g. "#[outer] fn f() { #![inner] blah() }"
449fn attrs_and_tokens_to_token_trees(
450    attrs: &[Attribute],
451    target_tokens: &LazyAttrTokenStream,
452    res: &mut Vec<TokenTree>,
453) {
454    let idx = attrs.partition_point(|attr| #[allow(non_exhaustive_omitted_patterns)] match attr.style {
    crate::AttrStyle::Outer => true,
    _ => false,
}matches!(attr.style, crate::AttrStyle::Outer));
455    let (outer_attrs, inner_attrs) = attrs.split_at(idx);
456
457    // Add outer attribute tokens.
458    for attr in outer_attrs {
459        res.extend(attr.token_trees());
460    }
461
462    // Add target AST node tokens.
463    res.extend(target_tokens.to_attr_token_stream().to_token_trees());
464
465    // Insert inner attribute tokens.
466    if !inner_attrs.is_empty() {
467        let found = insert_inner_attrs(inner_attrs, res);
468        if !found {
    {
        ::core::panicking::panic_fmt(format_args!("Failed to find trailing delimited group in: {0:?}",
                res));
    }
};assert!(found, "Failed to find trailing delimited group in: {res:?}");
469    }
470
471    // Inner attributes are only supported on blocks, functions, impls, and
472    // modules. All of these have their inner attributes placed at the
473    // beginning of the rightmost outermost braced group:
474    // e.g. `fn foo() { #![my_attr] }`. (Note: the braces may be within
475    // invisible delimiters.)
476    //
477    // Therefore, we can insert them back into the right location without
478    // needing to do any extra position tracking.
479    //
480    // Note: Outline modules are an exception - they can have attributes like
481    // `#![my_attr]` at the start of a file. Support for custom attributes in
482    // this position is not properly implemented - we always synthesize fake
483    // tokens, so we never reach this code.
484    fn insert_inner_attrs(inner_attrs: &[Attribute], tts: &mut Vec<TokenTree>) -> bool {
485        for tree in tts.iter_mut().rev() {
486            if let TokenTree::Delimited(span, spacing, Delimiter::Brace, stream) = tree {
487                // Found it: the rightmost, outermost braced group.
488                let mut tts = ::alloc::vec::Vec::new()vec![];
489                for inner_attr in inner_attrs {
490                    tts.extend(inner_attr.token_trees());
491                }
492                tts.extend(stream.0.iter().cloned());
493                let stream = TokenStream::new(tts);
494                *tree = TokenTree::Delimited(*span, *spacing, Delimiter::Brace, stream);
495                return true;
496            } else if let TokenTree::Delimited(span, spacing, Delimiter::Invisible(src), stream) =
497                tree
498            {
499                // Recurse inside invisible delimiters.
500                let mut vec: Vec<_> = stream.iter().cloned().collect();
501                if insert_inner_attrs(inner_attrs, &mut vec) {
502                    *tree = TokenTree::Delimited(
503                        *span,
504                        *spacing,
505                        Delimiter::Invisible(*src),
506                        TokenStream::new(vec),
507                    );
508                    return true;
509                }
510            }
511        }
512        false
513    }
514}
515
516/// Stores the tokens for an attribute target, along
517/// with its attributes.
518///
519/// This is constructed during parsing when we need to capture
520/// tokens, for `cfg` and `cfg_attr` attributes.
521///
522/// For example, `#[cfg(FALSE)] struct Foo {}` would
523/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
524/// and a `tokens` field storing the (unparsed) tokens `struct Foo {}`
525///
526/// The `cfg`/`cfg_attr` processing occurs in
527/// `StripUnconfigured::configure_tokens`.
528#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrsTarget {
    #[inline]
    fn clone(&self) -> AttrsTarget {
        AttrsTarget {
            attrs: ::core::clone::Clone::clone(&self.attrs),
            tokens: ::core::clone::Clone::clone(&self.tokens),
        }
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrsTarget {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "AttrsTarget",
            "attrs", &self.attrs, "tokens", &&self.tokens)
    }
}Debug, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for AttrsTarget {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    AttrsTarget {
                        attrs: ref __binding_0, tokens: ref __binding_1 } => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for AttrsTarget {
            fn decode(__decoder: &mut __D) -> Self {
                AttrsTarget {
                    attrs: ::rustc_serialize::Decodable::decode(__decoder),
                    tokens: ::rustc_serialize::Decodable::decode(__decoder),
                }
            }
        }
    };Decodable)]
529pub struct AttrsTarget {
530    /// Attributes, both outer and inner.
531    /// These are stored in the original order that they were parsed in.
532    pub attrs: AttrVec,
533    /// The underlying tokens for the attribute target that `attrs`
534    /// are applied to
535    pub tokens: LazyAttrTokenStream,
536}
537
538/// Indicates whether a token can join with the following token to form a
539/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
540/// guide pretty-printing, which is where the `JointHidden` value (which isn't
541/// part of `proc_macro::Spacing`) comes in useful.
542#[derive(#[automatically_derived]
impl ::core::clone::Clone for Spacing {
    #[inline]
    fn clone(&self) -> Spacing { *self }
}Clone, #[automatically_derived]
impl ::core::marker::Copy for Spacing { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for Spacing {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f,
            match self {
                Spacing::Alone => "Alone",
                Spacing::Joint => "Joint",
                Spacing::JointHidden => "JointHidden",
            })
    }
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for Spacing {
    #[inline]
    fn eq(&self, other: &Spacing) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for Spacing {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) -> () {}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for Spacing {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        ::core::hash::Hash::hash(&__self_discr, state)
    }
}Hash, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for Spacing {
            fn encode(&self, __encoder: &mut __E) {
                let disc =
                    match *self {
                        Spacing::Alone => { 0usize }
                        Spacing::Joint => { 1usize }
                        Spacing::JointHidden => { 2usize }
                    };
                ::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
                match *self {
                    Spacing::Alone => {}
                    Spacing::Joint => {}
                    Spacing::JointHidden => {}
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for Spacing {
            fn decode(__decoder: &mut __D) -> Self {
                match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
                    {
                    0usize => { Spacing::Alone }
                    1usize => { Spacing::Joint }
                    2usize => { Spacing::JointHidden }
                    n => {
                        ::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `Spacing`, expected 0..3, actual {0}",
                                n));
                    }
                }
            }
        }
    };Decodable, const _: () =
    {
        impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
            for Spacing where __CTX: crate::HashStableContext {
            #[inline]
            fn hash_stable(&self, __hcx: &mut __CTX,
                __hasher:
                    &mut ::rustc_data_structures::stable_hasher::StableHasher) {
                ::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
                match *self {
                    Spacing::Alone => {}
                    Spacing::Joint => {}
                    Spacing::JointHidden => {}
                }
            }
        }
    };HashStable_Generic)]
543pub enum Spacing {
544    /// The token cannot join with the following token to form a compound
545    /// token.
546    ///
547    /// In token streams parsed from source code, the compiler will use `Alone`
548    /// for any token immediately followed by whitespace, a non-doc comment, or
549    /// EOF.
550    ///
551    /// When constructing token streams within the compiler, use this for each
552    /// token that (a) should be pretty-printed with a space after it, or (b)
553    /// is the last token in the stream. (In the latter case the choice of
554    /// spacing doesn't matter because it is never used for the last token. We
555    /// arbitrarily use `Alone`.)
556    ///
557    /// Converts to `proc_macro::Spacing::Alone`, and
558    /// `proc_macro::Spacing::Alone` converts back to this.
559    Alone,
560
561    /// The token can join with the following token to form a compound token.
562    ///
563    /// In token streams parsed from source code, the compiler will use `Joint`
564    /// for any token immediately followed by punctuation (as determined by
565    /// `Token::is_punct`).
566    ///
567    /// When constructing token streams within the compiler, use this for each
568    /// token that (a) should be pretty-printed without a space after it, and
569    /// (b) is followed by a punctuation token.
570    ///
571    /// Converts to `proc_macro::Spacing::Joint`, and
572    /// `proc_macro::Spacing::Joint` converts back to this.
573    Joint,
574
575    /// The token can join with the following token to form a compound token,
576    /// but this will not be visible at the proc macro level. (This is what the
577    /// `Hidden` means; see below.)
578    ///
579    /// In token streams parsed from source code, the compiler will use
580    /// `JointHidden` for any token immediately followed by anything not
581    /// covered by the `Alone` and `Joint` cases: an identifier, lifetime,
582    /// literal, delimiter, doc comment.
583    ///
584    /// When constructing token streams, use this for each token that (a)
585    /// should be pretty-printed without a space after it, and (b) is followed
586    /// by a non-punctuation token.
587    ///
588    /// Converts to `proc_macro::Spacing::Alone`, but
589    /// `proc_macro::Spacing::Alone` converts back to `token::Spacing::Alone`.
590    /// Because of that, pretty-printing of `TokenStream`s produced by proc
591    /// macros is unavoidably uglier (with more whitespace between tokens) than
592    /// pretty-printing of `TokenStream`'s produced by other means (i.e. parsed
593    /// source code, internally constructed token streams, and token streams
594    /// produced by declarative macros).
595    JointHidden,
596}
597
598/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
599#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenStream {
    #[inline]
    fn clone(&self) -> TokenStream {
        TokenStream(::core::clone::Clone::clone(&self.0))
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenStream {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f, "TokenStream",
            &&self.0)
    }
}Debug, #[automatically_derived]
impl ::core::default::Default for TokenStream {
    #[inline]
    fn default() -> TokenStream {
        TokenStream(::core::default::Default::default())
    }
}Default, #[automatically_derived]
impl ::core::cmp::PartialEq for TokenStream {
    #[inline]
    fn eq(&self, other: &TokenStream) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for TokenStream {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) -> () {
        let _: ::core::cmp::AssertParamIsEq<Arc<Vec<TokenTree>>>;
    }
}Eq, #[automatically_derived]
impl ::core::hash::Hash for TokenStream {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
        ::core::hash::Hash::hash(&self.0, state)
    }
}Hash, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for TokenStream {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    TokenStream(ref __binding_0) => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for TokenStream {
            fn decode(__decoder: &mut __D) -> Self {
                TokenStream(::rustc_serialize::Decodable::decode(__decoder))
            }
        }
    };Decodable)]
600pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
601
602impl TokenStream {
603    pub fn new(tts: Vec<TokenTree>) -> TokenStream {
604        TokenStream(Arc::new(tts))
605    }
606
607    pub fn is_empty(&self) -> bool {
608        self.0.is_empty()
609    }
610
611    pub fn len(&self) -> usize {
612        self.0.len()
613    }
614
615    pub fn get(&self, index: usize) -> Option<&TokenTree> {
616        self.0.get(index)
617    }
618
619    pub fn iter(&self) -> TokenStreamIter<'_> {
620        TokenStreamIter::new(self)
621    }
622
623    /// Create a token stream containing a single token with alone spacing. The
624    /// spacing used for the final token in a constructed stream doesn't matter
625    /// because it's never used. In practice we arbitrarily use
626    /// `Spacing::Alone`.
627    pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
628        TokenStream::new(<[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_alone(kind, span)]))vec![TokenTree::token_alone(kind, span)])
629    }
630
631    pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream {
632        let tokens = node.tokens().unwrap_or_else(|| {
    ::core::panicking::panic_fmt(format_args!("missing tokens for node: {0:?}",
            node));
}panic!("missing tokens for node: {:?}", node));
633        let mut tts = ::alloc::vec::Vec::new()vec![];
634        attrs_and_tokens_to_token_trees(node.attrs(), tokens, &mut tts);
635        TokenStream::new(tts)
636    }
637
638    // If `vec` is not empty, try to glue `tt` onto its last token. The return
639    // value indicates if gluing took place.
640    fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
641        if let Some(TokenTree::Token(last_tok, Spacing::Joint | Spacing::JointHidden)) = vec.last()
642            && let TokenTree::Token(tok, spacing) = tt
643            && let Some(glued_tok) = last_tok.glue(tok)
644        {
645            // ...then overwrite the last token tree in `vec` with the
646            // glued token, and skip the first token tree from `stream`.
647            *vec.last_mut().unwrap() = TokenTree::Token(glued_tok, *spacing);
648            true
649        } else {
650            false
651        }
652    }
653
654    /// Push `tt` onto the end of the stream, possibly gluing it to the last
655    /// token. Uses `make_mut` to maximize efficiency.
656    pub fn push_tree(&mut self, tt: TokenTree) {
657        let vec_mut = Arc::make_mut(&mut self.0);
658
659        if Self::try_glue_to_last(vec_mut, &tt) {
660            // nothing else to do
661        } else {
662            vec_mut.push(tt);
663        }
664    }
665
666    /// Push `stream` onto the end of the stream, possibly gluing the first
667    /// token tree to the last token. (No other token trees will be glued.)
668    /// Uses `make_mut` to maximize efficiency.
669    pub fn push_stream(&mut self, stream: TokenStream) {
670        let vec_mut = Arc::make_mut(&mut self.0);
671
672        let stream_iter = stream.0.iter().cloned();
673
674        if let Some(first) = stream.0.first()
675            && Self::try_glue_to_last(vec_mut, first)
676        {
677            // Now skip the first token tree from `stream`.
678            vec_mut.extend(stream_iter.skip(1));
679        } else {
680            // Append all of `stream`.
681            vec_mut.extend(stream_iter);
682        }
683    }
684
685    pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree> {
686        self.0.chunks(chunk_size)
687    }
688
689    /// Desugar doc comments like `/// foo` in the stream into `#[doc =
690    /// r"foo"]`. Modifies the `TokenStream` via `Arc::make_mut`, but as little
691    /// as possible.
692    pub fn desugar_doc_comments(&mut self) {
693        if let Some(desugared_stream) = desugar_inner(self.clone()) {
694            *self = desugared_stream;
695        }
696
697        // The return value is `None` if nothing in `stream` changed.
698        fn desugar_inner(mut stream: TokenStream) -> Option<TokenStream> {
699            let mut i = 0;
700            let mut modified = false;
701            while let Some(tt) = stream.0.get(i) {
702                match tt {
703                    &TokenTree::Token(
704                        Token { kind: token::DocComment(_, attr_style, data), span },
705                        _spacing,
706                    ) => {
707                        let desugared = desugared_tts(attr_style, data, span);
708                        let desugared_len = desugared.len();
709                        Arc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
710                        modified = true;
711                        i += desugared_len;
712                    }
713
714                    &TokenTree::Token(..) => i += 1,
715
716                    &TokenTree::Delimited(sp, spacing, delim, ref delim_stream) => {
717                        if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
718                            let new_tt =
719                                TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
720                            Arc::make_mut(&mut stream.0)[i] = new_tt;
721                            modified = true;
722                        }
723                        i += 1;
724                    }
725                }
726            }
727            if modified { Some(stream) } else { None }
728        }
729
730        fn desugared_tts(attr_style: AttrStyle, data: Symbol, span: Span) -> Vec<TokenTree> {
731            // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
732            // required to wrap the text. E.g.
733            // - `abc d` is wrapped as `r"abc d"` (num_of_hashes = 0)
734            // - `abc "d"` is wrapped as `r#"abc "d""#` (num_of_hashes = 1)
735            // - `abc "##d##"` is wrapped as `r###"abc ##"d"##"###` (num_of_hashes = 3)
736            let mut num_of_hashes = 0;
737            let mut count = 0;
738            for ch in data.as_str().chars() {
739                count = match ch {
740                    '"' => 1,
741                    '#' if count > 0 => count + 1,
742                    _ => 0,
743                };
744                num_of_hashes = cmp::max(num_of_hashes, count);
745            }
746
747            // `/// foo` becomes `[doc = r"foo"]`.
748            let delim_span = DelimSpan::from_single(span);
749            let body = TokenTree::Delimited(
750                delim_span,
751                DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
752                Delimiter::Bracket,
753                [
754                    TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span),
755                    TokenTree::token_alone(token::Eq, span),
756                    TokenTree::token_alone(
757                        TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
758                        span,
759                    ),
760                ]
761                .into_iter()
762                .collect::<TokenStream>(),
763            );
764
765            if attr_style == AttrStyle::Inner {
766                <[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_joint(token::Pound,
                    span), TokenTree::token_joint_hidden(token::Bang, span),
                body]))vec![
767                    TokenTree::token_joint(token::Pound, span),
768                    TokenTree::token_joint_hidden(token::Bang, span),
769                    body,
770                ]
771            } else {
772                <[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_joint_hidden(token::Pound,
                    span), body]))vec![TokenTree::token_joint_hidden(token::Pound, span), body]
773            }
774        }
775    }
776
777    /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
778    /// separating the two arguments with a comma for diagnostic suggestions.
779    pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
780        // Used to suggest if a user writes `foo!(a b);`
781        let mut suggestion = None;
782        let mut iter = self.0.iter().enumerate().peekable();
783        while let Some((pos, ts)) = iter.next() {
784            if let Some((_, next)) = iter.peek() {
785                let sp = match (&ts, &next) {
786                    (_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
787                    (
788                        TokenTree::Token(token_left, Spacing::Alone),
789                        TokenTree::Token(token_right, _),
790                    ) if (token_left.is_non_reserved_ident() || token_left.is_lit())
791                        && (token_right.is_non_reserved_ident() || token_right.is_lit()) =>
792                    {
793                        token_left.span
794                    }
795                    (TokenTree::Delimited(sp, ..), _) => sp.entire(),
796                    _ => continue,
797                };
798                let sp = sp.shrink_to_hi();
799                let comma = TokenTree::token_alone(token::Comma, sp);
800                suggestion = Some((pos, comma, sp));
801            }
802        }
803        if let Some((pos, comma, sp)) = suggestion {
804            let mut new_stream = Vec::with_capacity(self.0.len() + 1);
805            let parts = self.0.split_at(pos + 1);
806            new_stream.extend_from_slice(parts.0);
807            new_stream.push(comma);
808            new_stream.extend_from_slice(parts.1);
809            return Some((TokenStream::new(new_stream), sp));
810        }
811        None
812    }
813}
814
815impl FromIterator<TokenTree> for TokenStream {
816    fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
817        TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
818    }
819}
820
821impl<CTX> HashStable<CTX> for TokenStream
822where
823    CTX: crate::HashStableContext,
824{
825    fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
826        for sub_tt in self.iter() {
827            sub_tt.hash_stable(hcx, hasher);
828        }
829    }
830}
831
832#[derive(#[automatically_derived]
impl<'t> ::core::clone::Clone for TokenStreamIter<'t> {
    #[inline]
    fn clone(&self) -> TokenStreamIter<'t> {
        TokenStreamIter {
            stream: ::core::clone::Clone::clone(&self.stream),
            index: ::core::clone::Clone::clone(&self.index),
        }
    }
}Clone)]
833pub struct TokenStreamIter<'t> {
834    stream: &'t TokenStream,
835    index: usize,
836}
837
838impl<'t> TokenStreamIter<'t> {
839    fn new(stream: &'t TokenStream) -> Self {
840        TokenStreamIter { stream, index: 0 }
841    }
842
843    // Peeking could be done via `Peekable`, but most iterators need peeking,
844    // and this is simple and avoids the need to use `peekable` and `Peekable`
845    // at all the use sites.
846    pub fn peek(&self) -> Option<&'t TokenTree> {
847        self.stream.0.get(self.index)
848    }
849}
850
851impl<'t> Iterator for TokenStreamIter<'t> {
852    type Item = &'t TokenTree;
853
854    fn next(&mut self) -> Option<&'t TokenTree> {
855        self.stream.0.get(self.index).map(|tree| {
856            self.index += 1;
857            tree
858        })
859    }
860}
861
862#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenTreeCursor {
    #[inline]
    fn clone(&self) -> TokenTreeCursor {
        TokenTreeCursor {
            stream: ::core::clone::Clone::clone(&self.stream),
            index: ::core::clone::Clone::clone(&self.index),
        }
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenTreeCursor {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f,
            "TokenTreeCursor", "stream", &self.stream, "index", &&self.index)
    }
}Debug)]
863pub struct TokenTreeCursor {
864    stream: TokenStream,
865    /// Points to the current token tree in the stream. In `TokenCursor::curr`,
866    /// this can be any token tree. In `TokenCursor::stack`, this is always a
867    /// `TokenTree::Delimited`.
868    index: usize,
869}
870
871impl TokenTreeCursor {
872    #[inline]
873    pub fn new(stream: TokenStream) -> Self {
874        TokenTreeCursor { stream, index: 0 }
875    }
876
877    #[inline]
878    pub fn curr(&self) -> Option<&TokenTree> {
879        self.stream.get(self.index)
880    }
881
882    pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
883        self.stream.get(self.index + n)
884    }
885
886    #[inline]
887    pub fn bump(&mut self) {
888        self.index += 1;
889    }
890
891    // For skipping ahead in rare circumstances.
892    #[inline]
893    pub fn bump_to_end(&mut self) {
894        self.index = self.stream.len();
895    }
896}
897
898/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that
899/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
900/// use this type to emit them as a linear sequence. But a linear sequence is
901/// what the parser expects, for the most part.
902#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenCursor {
    #[inline]
    fn clone(&self) -> TokenCursor {
        TokenCursor {
            curr: ::core::clone::Clone::clone(&self.curr),
            stack: ::core::clone::Clone::clone(&self.stack),
        }
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenCursor {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "TokenCursor",
            "curr", &self.curr, "stack", &&self.stack)
    }
}Debug)]
903pub struct TokenCursor {
904    // Cursor for the current (innermost) token stream. The index within the
905    // cursor can point to any token tree in the stream (or one past the end).
906    // The delimiters for this token stream are found in `self.stack.last()`;
907    // if that is `None` we are in the outermost token stream which never has
908    // delimiters.
909    pub curr: TokenTreeCursor,
910
911    // Token streams surrounding the current one. The index within each cursor
912    // always points to a `TokenTree::Delimited`.
913    pub stack: Vec<TokenTreeCursor>,
914}
915
916impl TokenCursor {
917    pub fn next(&mut self) -> (Token, Spacing) {
918        self.inlined_next()
919    }
920
921    /// This always-inlined version should only be used on hot code paths.
922    #[inline(always)]
923    pub fn inlined_next(&mut self) -> (Token, Spacing) {
924        loop {
925            // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
926            // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
927            // below can be removed.
928            if let Some(tree) = self.curr.curr() {
929                match tree {
930                    &TokenTree::Token(token, spacing) => {
931                        if true {
    if !!token.kind.is_delim() {
        ::core::panicking::panic("assertion failed: !token.kind.is_delim()")
    };
};debug_assert!(!token.kind.is_delim());
932                        let res = (token, spacing);
933                        self.curr.bump();
934                        return res;
935                    }
936                    &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
937                        let trees = TokenTreeCursor::new(tts.clone());
938                        self.stack.push(mem::replace(&mut self.curr, trees));
939                        if !delim.skip() {
940                            return (Token::new(delim.as_open_token_kind(), sp.open), spacing.open);
941                        }
942                        // No open delimiter to return; continue on to the next iteration.
943                    }
944                };
945            } else if let Some(parent) = self.stack.pop() {
946                // We have exhausted this token stream. Move back to its parent token stream.
947                let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
948                    { ::core::panicking::panic_fmt(format_args!("parent should be Delimited")); }panic!("parent should be Delimited")
949                };
950                self.curr = parent;
951                self.curr.bump(); // move past the `Delimited`
952                if !delim.skip() {
953                    return (Token::new(delim.as_close_token_kind(), span.close), spacing.close);
954                }
955                // No close delimiter to return; continue on to the next iteration.
956            } else {
957                // We have exhausted the outermost token stream. The use of
958                // `Spacing::Alone` is arbitrary and immaterial, because the
959                // `Eof` token's spacing is never used.
960                return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
961            }
962        }
963    }
964}
965
966#[derive(#[automatically_derived]
impl ::core::fmt::Debug for DelimSpan {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "DelimSpan",
            "open", &self.open, "close", &&self.close)
    }
}Debug, #[automatically_derived]
impl ::core::marker::Copy for DelimSpan { }Copy, #[automatically_derived]
impl ::core::clone::Clone for DelimSpan {
    #[inline]
    fn clone(&self) -> DelimSpan {
        let _: ::core::clone::AssertParamIsClone<Span>;
        *self
    }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for DelimSpan {
    #[inline]
    fn eq(&self, other: &DelimSpan) -> bool {
        self.open == other.open && self.close == other.close
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DelimSpan {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) -> () {
        let _: ::core::cmp::AssertParamIsEq<Span>;
    }
}Eq, #[automatically_derived]
impl ::core::hash::Hash for DelimSpan {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
        ::core::hash::Hash::hash(&self.open, state);
        ::core::hash::Hash::hash(&self.close, state)
    }
}Hash)]
967#[derive(const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for DelimSpan {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    DelimSpan { open: ref __binding_0, close: ref __binding_1 }
                        => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for DelimSpan {
            fn decode(__decoder: &mut __D) -> Self {
                DelimSpan {
                    open: ::rustc_serialize::Decodable::decode(__decoder),
                    close: ::rustc_serialize::Decodable::decode(__decoder),
                }
            }
        }
    };Decodable, const _: () =
    {
        impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
            for DelimSpan where __CTX: crate::HashStableContext {
            #[inline]
            fn hash_stable(&self, __hcx: &mut __CTX,
                __hasher:
                    &mut ::rustc_data_structures::stable_hasher::StableHasher) {
                match *self {
                    DelimSpan { open: ref __binding_0, close: ref __binding_1 }
                        => {
                        { __binding_0.hash_stable(__hcx, __hasher); }
                        { __binding_1.hash_stable(__hcx, __hasher); }
                    }
                }
            }
        }
    };HashStable_Generic, const _: () =
    {
        impl<'__ast, __V> crate::visit::Walkable<'__ast, __V> for DelimSpan
            where __V: crate::visit::Visitor<'__ast> {
            fn walk_ref(&'__ast self, __visitor: &mut __V) -> __V::Result {
                match *self {
                    DelimSpan { open: ref __binding_0, close: ref __binding_1 }
                        => {
                        {
                            match ::rustc_ast_ir::visit::VisitorResult::branch(crate::visit::Visitable::visit(__binding_0,
                                        __visitor, ())) {
                                core::ops::ControlFlow::Continue(()) =>
                                    (),
                                    #[allow(unreachable_code)]
                                    core::ops::ControlFlow::Break(r) => {
                                    return ::rustc_ast_ir::visit::VisitorResult::from_residual(r);
                                }
                            }
                        }
                        {
                            match ::rustc_ast_ir::visit::VisitorResult::branch(crate::visit::Visitable::visit(__binding_1,
                                        __visitor, ())) {
                                core::ops::ControlFlow::Continue(()) =>
                                    (),
                                    #[allow(unreachable_code)]
                                    core::ops::ControlFlow::Break(r) => {
                                    return ::rustc_ast_ir::visit::VisitorResult::from_residual(r);
                                }
                            }
                        }
                    }
                }
                <__V::Result as rustc_ast_ir::visit::VisitorResult>::output()
            }
        }
        impl<__V> crate::mut_visit::MutWalkable<__V> for DelimSpan where
            __V: crate::mut_visit::MutVisitor {
            fn walk_mut(&mut self, __visitor: &mut __V) {
                match *self {
                    DelimSpan {
                        open: ref mut __binding_0, close: ref mut __binding_1 } => {
                        {
                            crate::mut_visit::MutVisitable::visit_mut(__binding_0,
                                __visitor, ())
                        }
                        {
                            crate::mut_visit::MutVisitable::visit_mut(__binding_1,
                                __visitor, ())
                        }
                    }
                }
            }
        }
    };Walkable)]
968pub struct DelimSpan {
969    pub open: Span,
970    pub close: Span,
971}
972
973impl DelimSpan {
974    pub fn from_single(sp: Span) -> Self {
975        DelimSpan { open: sp, close: sp }
976    }
977
978    pub fn from_pair(open: Span, close: Span) -> Self {
979        DelimSpan { open, close }
980    }
981
982    pub fn dummy() -> Self {
983        Self::from_single(DUMMY_SP)
984    }
985
986    pub fn entire(self) -> Span {
987        self.open.with_hi(self.close.hi())
988    }
989}
990
991#[derive(#[automatically_derived]
impl ::core::marker::Copy for DelimSpacing { }Copy, #[automatically_derived]
impl ::core::clone::Clone for DelimSpacing {
    #[inline]
    fn clone(&self) -> DelimSpacing {
        let _: ::core::clone::AssertParamIsClone<Spacing>;
        *self
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for DelimSpacing {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "DelimSpacing",
            "open", &self.open, "close", &&self.close)
    }
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for DelimSpacing {
    #[inline]
    fn eq(&self, other: &DelimSpacing) -> bool {
        self.open == other.open && self.close == other.close
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DelimSpacing {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) -> () {
        let _: ::core::cmp::AssertParamIsEq<Spacing>;
    }
}Eq, #[automatically_derived]
impl ::core::hash::Hash for DelimSpacing {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
        ::core::hash::Hash::hash(&self.open, state);
        ::core::hash::Hash::hash(&self.close, state)
    }
}Hash, const _: () =
    {
        impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
            for DelimSpacing {
            fn encode(&self, __encoder: &mut __E) {
                match *self {
                    DelimSpacing { open: ref __binding_0, close: ref __binding_1
                        } => {
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_0,
                            __encoder);
                        ::rustc_serialize::Encodable::<__E>::encode(__binding_1,
                            __encoder);
                    }
                }
            }
        }
    };Encodable, const _: () =
    {
        impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
            for DelimSpacing {
            fn decode(__decoder: &mut __D) -> Self {
                DelimSpacing {
                    open: ::rustc_serialize::Decodable::decode(__decoder),
                    close: ::rustc_serialize::Decodable::decode(__decoder),
                }
            }
        }
    };Decodable, const _: () =
    {
        impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
            for DelimSpacing where __CTX: crate::HashStableContext {
            #[inline]
            fn hash_stable(&self, __hcx: &mut __CTX,
                __hasher:
                    &mut ::rustc_data_structures::stable_hasher::StableHasher) {
                match *self {
                    DelimSpacing { open: ref __binding_0, close: ref __binding_1
                        } => {
                        { __binding_0.hash_stable(__hcx, __hasher); }
                        { __binding_1.hash_stable(__hcx, __hasher); }
                    }
                }
            }
        }
    };HashStable_Generic)]
992pub struct DelimSpacing {
993    pub open: Spacing,
994    pub close: Spacing,
995}
996
997impl DelimSpacing {
998    pub fn new(open: Spacing, close: Spacing) -> DelimSpacing {
999        DelimSpacing { open, close }
1000    }
1001}
1002
1003// Some types are used a lot. Make sure they don't unintentionally get bigger.
1004#[cfg(target_pointer_width = "64")]
1005mod size_asserts {
1006    use rustc_data_structures::static_assert_size;
1007
1008    use super::*;
1009    // tidy-alphabetical-start
1010    const _: [(); 8] = [(); ::std::mem::size_of::<AttrTokenStream>()];static_assert_size!(AttrTokenStream, 8);
1011    const _: [(); 32] = [(); ::std::mem::size_of::<AttrTokenTree>()];static_assert_size!(AttrTokenTree, 32);
1012    const _: [(); 8] = [(); ::std::mem::size_of::<LazyAttrTokenStream>()];static_assert_size!(LazyAttrTokenStream, 8);
1013    const _: [(); 88] = [(); ::std::mem::size_of::<LazyAttrTokenStreamInner>()];static_assert_size!(LazyAttrTokenStreamInner, 88);
1014    const _: [(); 8] = [(); ::std::mem::size_of::<Option<LazyAttrTokenStream>>()];static_assert_size!(Option<LazyAttrTokenStream>, 8); // must be small, used in many AST nodes
1015    const _: [(); 8] = [(); ::std::mem::size_of::<TokenStream>()];static_assert_size!(TokenStream, 8);
1016    const _: [(); 32] = [(); ::std::mem::size_of::<TokenTree>()];static_assert_size!(TokenTree, 32);
1017    // tidy-alphabetical-end
1018}