1use std::borrow::Cow;
8use std::hash::Hash;
9use std::ops::Range;
10use std::sync::Arc;
11use std::{cmp, fmt, iter, mem};
12
13use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
14use rustc_data_structures::sync;
15use rustc_macros::{Decodable, Encodable, HashStable_Generic, Walkable};
16use rustc_serialize::{Decodable, Encodable};
17use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
18use thin_vec::ThinVec;
19
20use crate::ast::AttrStyle;
21use crate::ast_traits::{HasAttrs, HasTokens};
22use crate::token::{self, Delimiter, Token, TokenKind};
23use crate::{AttrVec, Attribute};
24
25#[derive(#[automatically_derived]
impl ::core::fmt::Debug for TokenTree {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
TokenTree::Token(__self_0, __self_1) =>
::core::fmt::Formatter::debug_tuple_field2_finish(f, "Token",
__self_0, &__self_1),
TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) =>
::core::fmt::Formatter::debug_tuple_field4_finish(f,
"Delimited", __self_0, __self_1, __self_2, &__self_3),
}
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for TokenTree {
#[inline]
fn clone(&self) -> TokenTree {
match self {
TokenTree::Token(__self_0, __self_1) =>
TokenTree::Token(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) =>
TokenTree::Delimited(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1),
::core::clone::Clone::clone(__self_2),
::core::clone::Clone::clone(__self_3)),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for TokenTree {
#[inline]
fn eq(&self, other: &TokenTree) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(TokenTree::Token(__self_0, __self_1),
TokenTree::Token(__arg1_0, __arg1_1)) =>
__self_0 == __arg1_0 && __self_1 == __arg1_1,
(TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3),
TokenTree::Delimited(__arg1_0, __arg1_1, __arg1_2,
__arg1_3)) =>
__self_0 == __arg1_0 && __self_1 == __arg1_1 &&
__self_2 == __arg1_2 && __self_3 == __arg1_3,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for TokenTree {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Token>;
let _: ::core::cmp::AssertParamIsEq<Spacing>;
let _: ::core::cmp::AssertParamIsEq<DelimSpan>;
let _: ::core::cmp::AssertParamIsEq<DelimSpacing>;
let _: ::core::cmp::AssertParamIsEq<Delimiter>;
let _: ::core::cmp::AssertParamIsEq<TokenStream>;
}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for TokenTree {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
TokenTree::Token(__self_0, __self_1) => {
::core::hash::Hash::hash(__self_0, state);
::core::hash::Hash::hash(__self_1, state)
}
TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) => {
::core::hash::Hash::hash(__self_0, state);
::core::hash::Hash::hash(__self_1, state);
::core::hash::Hash::hash(__self_2, state);
::core::hash::Hash::hash(__self_3, state)
}
}
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for TokenTree {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
TokenTree::Token(ref __binding_0, ref __binding_1) => {
0usize
}
TokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
1usize
}
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
TokenTree::Token(ref __binding_0, ref __binding_1) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
TokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_3,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for TokenTree {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
TokenTree::Token(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
TokenTree::Delimited(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `TokenTree`, expected 0..2, actual {0}",
n));
}
}
}
}
};Decodable, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for TokenTree where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
TokenTree::Token(ref __binding_0, ref __binding_1) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
TokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
{ __binding_2.hash_stable(__hcx, __hasher); }
{ __binding_3.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
27pub enum TokenTree {
28 Token(Token, Spacing),
31 Delimited(DelimSpan, DelimSpacing, Delimiter, TokenStream),
33}
34
35fn _dummy()
37where
38 Token: sync::DynSend + sync::DynSync,
39 Spacing: sync::DynSend + sync::DynSync,
40 DelimSpan: sync::DynSend + sync::DynSync,
41 Delimiter: sync::DynSend + sync::DynSync,
42 TokenStream: sync::DynSend + sync::DynSync,
43{
44}
45
46impl TokenTree {
47 pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
49 match (self, other) {
50 (TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
51 (TokenTree::Delimited(.., delim, tts), TokenTree::Delimited(.., delim2, tts2)) => {
52 delim == delim2 && tts.iter().eq_by(tts2.iter(), |a, b| a.eq_unspanned(b))
53 }
54 _ => false,
55 }
56 }
57
58 pub fn span(&self) -> Span {
60 match self {
61 TokenTree::Token(token, _) => token.span,
62 TokenTree::Delimited(sp, ..) => sp.entire(),
63 }
64 }
65
66 pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
68 TokenTree::Token(Token::new(kind, span), Spacing::Alone)
69 }
70
71 pub fn token_joint(kind: TokenKind, span: Span) -> TokenTree {
73 TokenTree::Token(Token::new(kind, span), Spacing::Joint)
74 }
75
76 pub fn token_joint_hidden(kind: TokenKind, span: Span) -> TokenTree {
78 TokenTree::Token(Token::new(kind, span), Spacing::JointHidden)
79 }
80
81 pub fn uninterpolate(&self) -> Cow<'_, TokenTree> {
82 match self {
83 TokenTree::Token(token, spacing) => match token.uninterpolate() {
84 Cow::Owned(token) => Cow::Owned(TokenTree::Token(token, *spacing)),
85 Cow::Borrowed(_) => Cow::Borrowed(self),
86 },
87 _ => Cow::Borrowed(self),
88 }
89 }
90}
91
92#[derive(#[automatically_derived]
impl ::core::clone::Clone for LazyAttrTokenStream {
#[inline]
fn clone(&self) -> LazyAttrTokenStream {
LazyAttrTokenStream(::core::clone::Clone::clone(&self.0))
}
}Clone)]
95pub struct LazyAttrTokenStream(Arc<LazyAttrTokenStreamInner>);
96
97impl LazyAttrTokenStream {
98 pub fn new_direct(stream: AttrTokenStream) -> LazyAttrTokenStream {
99 LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Direct(stream)))
100 }
101
102 pub fn new_pending(
103 start_token: (Token, Spacing),
104 cursor_snapshot: TokenCursor,
105 num_calls: u32,
106 break_last_token: u32,
107 node_replacements: ThinVec<NodeReplacement>,
108 ) -> LazyAttrTokenStream {
109 LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Pending {
110 start_token,
111 cursor_snapshot,
112 num_calls,
113 break_last_token,
114 node_replacements,
115 }))
116 }
117
118 pub fn to_attr_token_stream(&self) -> AttrTokenStream {
119 self.0.to_attr_token_stream()
120 }
121}
122
123impl fmt::Debug for LazyAttrTokenStream {
124 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
125 f.write_fmt(format_args!("LazyAttrTokenStream({0:?})",
self.to_attr_token_stream()))write!(f, "LazyAttrTokenStream({:?})", self.to_attr_token_stream())
126 }
127}
128
129impl<S: SpanEncoder> Encodable<S> for LazyAttrTokenStream {
130 fn encode(&self, _s: &mut S) {
131 {
::core::panicking::panic_fmt(format_args!("Attempted to encode LazyAttrTokenStream"));
};panic!("Attempted to encode LazyAttrTokenStream");
132 }
133}
134
135impl<D: SpanDecoder> Decodable<D> for LazyAttrTokenStream {
136 fn decode(_d: &mut D) -> Self {
137 {
::core::panicking::panic_fmt(format_args!("Attempted to decode LazyAttrTokenStream"));
};panic!("Attempted to decode LazyAttrTokenStream");
138 }
139}
140
141impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
142 fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
143 {
::core::panicking::panic_fmt(format_args!("Attempted to compute stable hash for LazyAttrTokenStream"));
};panic!("Attempted to compute stable hash for LazyAttrTokenStream");
144 }
145}
146
147#[derive(#[automatically_derived]
impl ::core::clone::Clone for ParserRange {
#[inline]
fn clone(&self) -> ParserRange {
ParserRange(::core::clone::Clone::clone(&self.0))
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ParserRange {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "ParserRange",
&&self.0)
}
}Debug)]
149pub struct ParserRange(pub Range<u32>);
150
151#[derive(#[automatically_derived]
impl ::core::clone::Clone for NodeRange {
#[inline]
fn clone(&self) -> NodeRange {
NodeRange(::core::clone::Clone::clone(&self.0))
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for NodeRange {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "NodeRange",
&&self.0)
}
}Debug)]
155pub struct NodeRange(pub Range<u32>);
156
157pub type ParserReplacement = (ParserRange, Option<AttrsTarget>);
177
178pub type NodeReplacement = (NodeRange, Option<AttrsTarget>);
180
181impl NodeRange {
182 pub fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
192 if !!parser_range.is_empty() {
::core::panicking::panic("assertion failed: !parser_range.is_empty()")
};assert!(!parser_range.is_empty());
193 if !(parser_range.start >= start_pos) {
::core::panicking::panic("assertion failed: parser_range.start >= start_pos")
};assert!(parser_range.start >= start_pos);
194 NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
195 }
196}
197
198enum LazyAttrTokenStreamInner {
199 Direct(AttrTokenStream),
201
202 Pending {
214 start_token: (Token, Spacing),
215 cursor_snapshot: TokenCursor,
216 num_calls: u32,
217 break_last_token: u32,
218 node_replacements: ThinVec<NodeReplacement>,
219 },
220}
221
222impl LazyAttrTokenStreamInner {
223 fn to_attr_token_stream(&self) -> AttrTokenStream {
224 match self {
225 LazyAttrTokenStreamInner::Direct(stream) => stream.clone(),
226 LazyAttrTokenStreamInner::Pending {
227 start_token,
228 cursor_snapshot,
229 num_calls,
230 break_last_token,
231 node_replacements,
232 } => {
233 let mut cursor_snapshot = cursor_snapshot.clone();
239 let tokens = iter::once(FlatToken::Token(*start_token))
240 .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
241 .take(*num_calls as usize);
242
243 if node_replacements.is_empty() {
244 make_attr_token_stream(tokens, *break_last_token)
245 } else {
246 let mut tokens: Vec<_> = tokens.collect();
247 let mut node_replacements = node_replacements.to_vec();
248 node_replacements.sort_by_key(|(range, _)| range.0.start);
249
250 #[cfg(debug_assertions)]
251 for [(node_range, tokens), (next_node_range, next_tokens)] in
252 node_replacements.array_windows()
253 {
254 if !(node_range.0.end <= next_node_range.0.start ||
node_range.0.end >= next_node_range.0.end) {
{
::core::panicking::panic_fmt(format_args!("Node ranges should be disjoint or nested: ({0:?}, {1:?}) ({2:?}, {3:?})",
node_range, tokens, next_node_range, next_tokens));
}
};assert!(
255 node_range.0.end <= next_node_range.0.start
256 || node_range.0.end >= next_node_range.0.end,
257 "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
258 node_range,
259 tokens,
260 next_node_range,
261 next_tokens,
262 );
263 }
264
265 for (node_range, target) in node_replacements.into_iter().rev() {
279 if !!node_range.0.is_empty() {
{
::core::panicking::panic_fmt(format_args!("Cannot replace an empty node range: {0:?}",
node_range.0));
}
};assert!(
280 !node_range.0.is_empty(),
281 "Cannot replace an empty node range: {:?}",
282 node_range.0
283 );
284
285 let target_len = target.is_some() as usize;
290 tokens.splice(
291 (node_range.0.start as usize)..(node_range.0.end as usize),
292 target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
293 iter::repeat(FlatToken::Empty)
294 .take(node_range.0.len() - target_len),
295 ),
296 );
297 }
298 make_attr_token_stream(tokens.into_iter(), *break_last_token)
299 }
300 }
301 }
302 }
303}
304
305#[derive(#[automatically_derived]
impl ::core::fmt::Debug for FlatToken {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
FlatToken::Token(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Token",
&__self_0),
FlatToken::AttrsTarget(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"AttrsTarget", &__self_0),
FlatToken::Empty => ::core::fmt::Formatter::write_str(f, "Empty"),
}
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for FlatToken {
#[inline]
fn clone(&self) -> FlatToken {
match self {
FlatToken::Token(__self_0) =>
FlatToken::Token(::core::clone::Clone::clone(__self_0)),
FlatToken::AttrsTarget(__self_0) =>
FlatToken::AttrsTarget(::core::clone::Clone::clone(__self_0)),
FlatToken::Empty => FlatToken::Empty,
}
}
}Clone)]
311enum FlatToken {
312 Token((Token, Spacing)),
315 AttrsTarget(AttrsTarget),
319 Empty,
323}
324
325#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrTokenStream {
#[inline]
fn clone(&self) -> AttrTokenStream {
AttrTokenStream(::core::clone::Clone::clone(&self.0))
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrTokenStream {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"AttrTokenStream", &&self.0)
}
}Debug, #[automatically_derived]
impl ::core::default::Default for AttrTokenStream {
#[inline]
fn default() -> AttrTokenStream {
AttrTokenStream(::core::default::Default::default())
}
}Default, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for AttrTokenStream {
fn encode(&self, __encoder: &mut __E) {
match *self {
AttrTokenStream(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for AttrTokenStream {
fn decode(__decoder: &mut __D) -> Self {
AttrTokenStream(::rustc_serialize::Decodable::decode(__decoder))
}
}
};Decodable)]
330pub struct AttrTokenStream(pub Arc<Vec<AttrTokenTree>>);
331
332fn make_attr_token_stream(
336 iter: impl Iterator<Item = FlatToken>,
337 break_last_token: u32,
338) -> AttrTokenStream {
339 #[derive(#[automatically_derived]
impl ::core::fmt::Debug for FrameData {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "FrameData",
"open_delim_sp", &self.open_delim_sp, "inner", &&self.inner)
}
}Debug)]
340 struct FrameData {
341 open_delim_sp: Option<(Delimiter, Span, Spacing)>,
343 inner: Vec<AttrTokenTree>,
344 }
345 let mut stack_top = FrameData { open_delim_sp: None, inner: ::alloc::vec::Vec::new()vec![] };
347 let mut stack_rest = ::alloc::vec::Vec::new()vec![];
348 for flat_token in iter {
349 match flat_token {
350 FlatToken::Token((token @ Token { kind, span }, spacing)) => {
351 if let Some(delim) = kind.open_delim() {
352 stack_rest.push(mem::replace(
353 &mut stack_top,
354 FrameData { open_delim_sp: Some((delim, span, spacing)), inner: ::alloc::vec::Vec::new()vec![] },
355 ));
356 } else if let Some(delim) = kind.close_delim() {
357 let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap());
358 let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
359 if !open_delim.eq_ignoring_invisible_origin(&delim) {
{
::core::panicking::panic_fmt(format_args!("Mismatched open/close delims: open={0:?} close={1:?}",
open_delim, span));
}
};assert!(
360 open_delim.eq_ignoring_invisible_origin(&delim),
361 "Mismatched open/close delims: open={open_delim:?} close={span:?}"
362 );
363 let dspan = DelimSpan::from_pair(open_sp, span);
364 let dspacing = DelimSpacing::new(open_spacing, spacing);
365 let stream = AttrTokenStream::new(frame_data.inner);
366 let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
367 stack_top.inner.push(delimited);
368 } else {
369 stack_top.inner.push(AttrTokenTree::Token(token, spacing))
370 }
371 }
372 FlatToken::AttrsTarget(target) => {
373 stack_top.inner.push(AttrTokenTree::AttrsTarget(target))
374 }
375 FlatToken::Empty => {}
376 }
377 }
378
379 if break_last_token > 0 {
380 let last_token = stack_top.inner.pop().unwrap();
381 if let AttrTokenTree::Token(last_token, spacing) = last_token {
382 let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap();
383
384 let mut first_span = last_token.span.shrink_to_lo();
386 first_span =
387 first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token));
388
389 stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing));
390 } else {
391 {
::core::panicking::panic_fmt(format_args!("Unexpected last token {0:?}",
last_token));
}panic!("Unexpected last token {last_token:?}")
392 }
393 }
394 AttrTokenStream::new(stack_top.inner)
395}
396
397#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrTokenTree {
#[inline]
fn clone(&self) -> AttrTokenTree {
match self {
AttrTokenTree::Token(__self_0, __self_1) =>
AttrTokenTree::Token(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
AttrTokenTree::Delimited(__self_0, __self_1, __self_2, __self_3)
=>
AttrTokenTree::Delimited(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1),
::core::clone::Clone::clone(__self_2),
::core::clone::Clone::clone(__self_3)),
AttrTokenTree::AttrsTarget(__self_0) =>
AttrTokenTree::AttrsTarget(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrTokenTree {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
AttrTokenTree::Token(__self_0, __self_1) =>
::core::fmt::Formatter::debug_tuple_field2_finish(f, "Token",
__self_0, &__self_1),
AttrTokenTree::Delimited(__self_0, __self_1, __self_2, __self_3)
=>
::core::fmt::Formatter::debug_tuple_field4_finish(f,
"Delimited", __self_0, __self_1, __self_2, &__self_3),
AttrTokenTree::AttrsTarget(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"AttrsTarget", &__self_0),
}
}
}Debug, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for AttrTokenTree {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
AttrTokenTree::Token(ref __binding_0, ref __binding_1) => {
0usize
}
AttrTokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
1usize
}
AttrTokenTree::AttrsTarget(ref __binding_0) => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
AttrTokenTree::Token(ref __binding_0, ref __binding_1) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
AttrTokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_3,
__encoder);
}
AttrTokenTree::AttrsTarget(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for AttrTokenTree {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
AttrTokenTree::Token(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
AttrTokenTree::Delimited(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
2usize => {
AttrTokenTree::AttrsTarget(::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `AttrTokenTree`, expected 0..3, actual {0}",
n));
}
}
}
}
};Decodable)]
399pub enum AttrTokenTree {
400 Token(Token, Spacing),
401 Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
402 AttrsTarget(AttrsTarget),
406}
407
408impl AttrTokenStream {
409 pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
410 AttrTokenStream(Arc::new(tokens))
411 }
412
413 pub fn to_token_trees(&self) -> Vec<TokenTree> {
418 let mut res = Vec::with_capacity(self.0.len());
419 for tree in self.0.iter() {
420 match tree {
421 AttrTokenTree::Token(inner, spacing) => {
422 res.push(TokenTree::Token(inner.clone(), *spacing));
423 }
424 AttrTokenTree::Delimited(span, spacing, delim, stream) => {
425 res.push(TokenTree::Delimited(
426 *span,
427 *spacing,
428 *delim,
429 TokenStream::new(stream.to_token_trees()),
430 ))
431 }
432 AttrTokenTree::AttrsTarget(target) => {
433 attrs_and_tokens_to_token_trees(&target.attrs, &target.tokens, &mut res);
434 }
435 }
436 }
437 res
438 }
439}
440
441fn attrs_and_tokens_to_token_trees(
450 attrs: &[Attribute],
451 target_tokens: &LazyAttrTokenStream,
452 res: &mut Vec<TokenTree>,
453) {
454 let idx = attrs.partition_point(|attr| #[allow(non_exhaustive_omitted_patterns)] match attr.style {
crate::AttrStyle::Outer => true,
_ => false,
}matches!(attr.style, crate::AttrStyle::Outer));
455 let (outer_attrs, inner_attrs) = attrs.split_at(idx);
456
457 for attr in outer_attrs {
459 res.extend(attr.token_trees());
460 }
461
462 res.extend(target_tokens.to_attr_token_stream().to_token_trees());
464
465 if !inner_attrs.is_empty() {
467 let found = insert_inner_attrs(inner_attrs, res);
468 if !found {
{
::core::panicking::panic_fmt(format_args!("Failed to find trailing delimited group in: {0:?}",
res));
}
};assert!(found, "Failed to find trailing delimited group in: {res:?}");
469 }
470
471 fn insert_inner_attrs(inner_attrs: &[Attribute], tts: &mut Vec<TokenTree>) -> bool {
485 for tree in tts.iter_mut().rev() {
486 if let TokenTree::Delimited(span, spacing, Delimiter::Brace, stream) = tree {
487 let mut tts = ::alloc::vec::Vec::new()vec![];
489 for inner_attr in inner_attrs {
490 tts.extend(inner_attr.token_trees());
491 }
492 tts.extend(stream.0.iter().cloned());
493 let stream = TokenStream::new(tts);
494 *tree = TokenTree::Delimited(*span, *spacing, Delimiter::Brace, stream);
495 return true;
496 } else if let TokenTree::Delimited(span, spacing, Delimiter::Invisible(src), stream) =
497 tree
498 {
499 let mut vec: Vec<_> = stream.iter().cloned().collect();
501 if insert_inner_attrs(inner_attrs, &mut vec) {
502 *tree = TokenTree::Delimited(
503 *span,
504 *spacing,
505 Delimiter::Invisible(*src),
506 TokenStream::new(vec),
507 );
508 return true;
509 }
510 }
511 }
512 false
513 }
514}
515
516#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrsTarget {
#[inline]
fn clone(&self) -> AttrsTarget {
AttrsTarget {
attrs: ::core::clone::Clone::clone(&self.attrs),
tokens: ::core::clone::Clone::clone(&self.tokens),
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrsTarget {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "AttrsTarget",
"attrs", &self.attrs, "tokens", &&self.tokens)
}
}Debug, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for AttrsTarget {
fn encode(&self, __encoder: &mut __E) {
match *self {
AttrsTarget {
attrs: ref __binding_0, tokens: ref __binding_1 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for AttrsTarget {
fn decode(__decoder: &mut __D) -> Self {
AttrsTarget {
attrs: ::rustc_serialize::Decodable::decode(__decoder),
tokens: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable)]
529pub struct AttrsTarget {
530 pub attrs: AttrVec,
533 pub tokens: LazyAttrTokenStream,
536}
537
538#[derive(#[automatically_derived]
impl ::core::clone::Clone for Spacing {
#[inline]
fn clone(&self) -> Spacing { *self }
}Clone, #[automatically_derived]
impl ::core::marker::Copy for Spacing { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for Spacing {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
Spacing::Alone => "Alone",
Spacing::Joint => "Joint",
Spacing::JointHidden => "JointHidden",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for Spacing {
#[inline]
fn eq(&self, other: &Spacing) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for Spacing {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for Spacing {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state)
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for Spacing {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
Spacing::Alone => { 0usize }
Spacing::Joint => { 1usize }
Spacing::JointHidden => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
Spacing::Alone => {}
Spacing::Joint => {}
Spacing::JointHidden => {}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for Spacing {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => { Spacing::Alone }
1usize => { Spacing::Joint }
2usize => { Spacing::JointHidden }
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `Spacing`, expected 0..3, actual {0}",
n));
}
}
}
}
};Decodable, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for Spacing where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
Spacing::Alone => {}
Spacing::Joint => {}
Spacing::JointHidden => {}
}
}
}
};HashStable_Generic)]
543pub enum Spacing {
544 Alone,
560
561 Joint,
574
575 JointHidden,
596}
597
598#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenStream {
#[inline]
fn clone(&self) -> TokenStream {
TokenStream(::core::clone::Clone::clone(&self.0))
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenStream {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "TokenStream",
&&self.0)
}
}Debug, #[automatically_derived]
impl ::core::default::Default for TokenStream {
#[inline]
fn default() -> TokenStream {
TokenStream(::core::default::Default::default())
}
}Default, #[automatically_derived]
impl ::core::cmp::PartialEq for TokenStream {
#[inline]
fn eq(&self, other: &TokenStream) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for TokenStream {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Arc<Vec<TokenTree>>>;
}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for TokenStream {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.0, state)
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for TokenStream {
fn encode(&self, __encoder: &mut __E) {
match *self {
TokenStream(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for TokenStream {
fn decode(__decoder: &mut __D) -> Self {
TokenStream(::rustc_serialize::Decodable::decode(__decoder))
}
}
};Decodable)]
600pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
601
602impl TokenStream {
603 pub fn new(tts: Vec<TokenTree>) -> TokenStream {
604 TokenStream(Arc::new(tts))
605 }
606
607 pub fn is_empty(&self) -> bool {
608 self.0.is_empty()
609 }
610
611 pub fn len(&self) -> usize {
612 self.0.len()
613 }
614
615 pub fn get(&self, index: usize) -> Option<&TokenTree> {
616 self.0.get(index)
617 }
618
619 pub fn iter(&self) -> TokenStreamIter<'_> {
620 TokenStreamIter::new(self)
621 }
622
623 pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
628 TokenStream::new(<[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_alone(kind, span)]))vec![TokenTree::token_alone(kind, span)])
629 }
630
631 pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream {
632 let tokens = node.tokens().unwrap_or_else(|| {
::core::panicking::panic_fmt(format_args!("missing tokens for node: {0:?}",
node));
}panic!("missing tokens for node: {:?}", node));
633 let mut tts = ::alloc::vec::Vec::new()vec![];
634 attrs_and_tokens_to_token_trees(node.attrs(), tokens, &mut tts);
635 TokenStream::new(tts)
636 }
637
638 fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
641 if let Some(TokenTree::Token(last_tok, Spacing::Joint | Spacing::JointHidden)) = vec.last()
642 && let TokenTree::Token(tok, spacing) = tt
643 && let Some(glued_tok) = last_tok.glue(tok)
644 {
645 *vec.last_mut().unwrap() = TokenTree::Token(glued_tok, *spacing);
648 true
649 } else {
650 false
651 }
652 }
653
654 pub fn push_tree(&mut self, tt: TokenTree) {
657 let vec_mut = Arc::make_mut(&mut self.0);
658
659 if Self::try_glue_to_last(vec_mut, &tt) {
660 } else {
662 vec_mut.push(tt);
663 }
664 }
665
666 pub fn push_stream(&mut self, stream: TokenStream) {
670 let vec_mut = Arc::make_mut(&mut self.0);
671
672 let stream_iter = stream.0.iter().cloned();
673
674 if let Some(first) = stream.0.first()
675 && Self::try_glue_to_last(vec_mut, first)
676 {
677 vec_mut.extend(stream_iter.skip(1));
679 } else {
680 vec_mut.extend(stream_iter);
682 }
683 }
684
685 pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree> {
686 self.0.chunks(chunk_size)
687 }
688
689 pub fn desugar_doc_comments(&mut self) {
693 if let Some(desugared_stream) = desugar_inner(self.clone()) {
694 *self = desugared_stream;
695 }
696
697 fn desugar_inner(mut stream: TokenStream) -> Option<TokenStream> {
699 let mut i = 0;
700 let mut modified = false;
701 while let Some(tt) = stream.0.get(i) {
702 match tt {
703 &TokenTree::Token(
704 Token { kind: token::DocComment(_, attr_style, data), span },
705 _spacing,
706 ) => {
707 let desugared = desugared_tts(attr_style, data, span);
708 let desugared_len = desugared.len();
709 Arc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
710 modified = true;
711 i += desugared_len;
712 }
713
714 &TokenTree::Token(..) => i += 1,
715
716 &TokenTree::Delimited(sp, spacing, delim, ref delim_stream) => {
717 if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
718 let new_tt =
719 TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
720 Arc::make_mut(&mut stream.0)[i] = new_tt;
721 modified = true;
722 }
723 i += 1;
724 }
725 }
726 }
727 if modified { Some(stream) } else { None }
728 }
729
730 fn desugared_tts(attr_style: AttrStyle, data: Symbol, span: Span) -> Vec<TokenTree> {
731 let mut num_of_hashes = 0;
737 let mut count = 0;
738 for ch in data.as_str().chars() {
739 count = match ch {
740 '"' => 1,
741 '#' if count > 0 => count + 1,
742 _ => 0,
743 };
744 num_of_hashes = cmp::max(num_of_hashes, count);
745 }
746
747 let delim_span = DelimSpan::from_single(span);
749 let body = TokenTree::Delimited(
750 delim_span,
751 DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
752 Delimiter::Bracket,
753 [
754 TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span),
755 TokenTree::token_alone(token::Eq, span),
756 TokenTree::token_alone(
757 TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
758 span,
759 ),
760 ]
761 .into_iter()
762 .collect::<TokenStream>(),
763 );
764
765 if attr_style == AttrStyle::Inner {
766 <[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_joint(token::Pound,
span), TokenTree::token_joint_hidden(token::Bang, span),
body]))vec![
767 TokenTree::token_joint(token::Pound, span),
768 TokenTree::token_joint_hidden(token::Bang, span),
769 body,
770 ]
771 } else {
772 <[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_joint_hidden(token::Pound,
span), body]))vec![TokenTree::token_joint_hidden(token::Pound, span), body]
773 }
774 }
775 }
776
777 pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
780 let mut suggestion = None;
782 let mut iter = self.0.iter().enumerate().peekable();
783 while let Some((pos, ts)) = iter.next() {
784 if let Some((_, next)) = iter.peek() {
785 let sp = match (&ts, &next) {
786 (_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
787 (
788 TokenTree::Token(token_left, Spacing::Alone),
789 TokenTree::Token(token_right, _),
790 ) if (token_left.is_non_reserved_ident() || token_left.is_lit())
791 && (token_right.is_non_reserved_ident() || token_right.is_lit()) =>
792 {
793 token_left.span
794 }
795 (TokenTree::Delimited(sp, ..), _) => sp.entire(),
796 _ => continue,
797 };
798 let sp = sp.shrink_to_hi();
799 let comma = TokenTree::token_alone(token::Comma, sp);
800 suggestion = Some((pos, comma, sp));
801 }
802 }
803 if let Some((pos, comma, sp)) = suggestion {
804 let mut new_stream = Vec::with_capacity(self.0.len() + 1);
805 let parts = self.0.split_at(pos + 1);
806 new_stream.extend_from_slice(parts.0);
807 new_stream.push(comma);
808 new_stream.extend_from_slice(parts.1);
809 return Some((TokenStream::new(new_stream), sp));
810 }
811 None
812 }
813}
814
815impl FromIterator<TokenTree> for TokenStream {
816 fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
817 TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
818 }
819}
820
821impl<CTX> HashStable<CTX> for TokenStream
822where
823 CTX: crate::HashStableContext,
824{
825 fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
826 for sub_tt in self.iter() {
827 sub_tt.hash_stable(hcx, hasher);
828 }
829 }
830}
831
832#[derive(#[automatically_derived]
impl<'t> ::core::clone::Clone for TokenStreamIter<'t> {
#[inline]
fn clone(&self) -> TokenStreamIter<'t> {
TokenStreamIter {
stream: ::core::clone::Clone::clone(&self.stream),
index: ::core::clone::Clone::clone(&self.index),
}
}
}Clone)]
833pub struct TokenStreamIter<'t> {
834 stream: &'t TokenStream,
835 index: usize,
836}
837
838impl<'t> TokenStreamIter<'t> {
839 fn new(stream: &'t TokenStream) -> Self {
840 TokenStreamIter { stream, index: 0 }
841 }
842
843 pub fn peek(&self) -> Option<&'t TokenTree> {
847 self.stream.0.get(self.index)
848 }
849}
850
851impl<'t> Iterator for TokenStreamIter<'t> {
852 type Item = &'t TokenTree;
853
854 fn next(&mut self) -> Option<&'t TokenTree> {
855 self.stream.0.get(self.index).map(|tree| {
856 self.index += 1;
857 tree
858 })
859 }
860}
861
862#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenTreeCursor {
#[inline]
fn clone(&self) -> TokenTreeCursor {
TokenTreeCursor {
stream: ::core::clone::Clone::clone(&self.stream),
index: ::core::clone::Clone::clone(&self.index),
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenTreeCursor {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"TokenTreeCursor", "stream", &self.stream, "index", &&self.index)
}
}Debug)]
863pub struct TokenTreeCursor {
864 stream: TokenStream,
865 index: usize,
869}
870
871impl TokenTreeCursor {
872 #[inline]
873 pub fn new(stream: TokenStream) -> Self {
874 TokenTreeCursor { stream, index: 0 }
875 }
876
877 #[inline]
878 pub fn curr(&self) -> Option<&TokenTree> {
879 self.stream.get(self.index)
880 }
881
882 pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
883 self.stream.get(self.index + n)
884 }
885
886 #[inline]
887 pub fn bump(&mut self) {
888 self.index += 1;
889 }
890
891 #[inline]
893 pub fn bump_to_end(&mut self) {
894 self.index = self.stream.len();
895 }
896}
897
898#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenCursor {
#[inline]
fn clone(&self) -> TokenCursor {
TokenCursor {
curr: ::core::clone::Clone::clone(&self.curr),
stack: ::core::clone::Clone::clone(&self.stack),
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenCursor {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "TokenCursor",
"curr", &self.curr, "stack", &&self.stack)
}
}Debug)]
903pub struct TokenCursor {
904 pub curr: TokenTreeCursor,
910
911 pub stack: Vec<TokenTreeCursor>,
914}
915
916impl TokenCursor {
917 pub fn next(&mut self) -> (Token, Spacing) {
918 self.inlined_next()
919 }
920
921 #[inline(always)]
923 pub fn inlined_next(&mut self) -> (Token, Spacing) {
924 loop {
925 if let Some(tree) = self.curr.curr() {
929 match tree {
930 &TokenTree::Token(token, spacing) => {
931 if true {
if !!token.kind.is_delim() {
::core::panicking::panic("assertion failed: !token.kind.is_delim()")
};
};debug_assert!(!token.kind.is_delim());
932 let res = (token, spacing);
933 self.curr.bump();
934 return res;
935 }
936 &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
937 let trees = TokenTreeCursor::new(tts.clone());
938 self.stack.push(mem::replace(&mut self.curr, trees));
939 if !delim.skip() {
940 return (Token::new(delim.as_open_token_kind(), sp.open), spacing.open);
941 }
942 }
944 };
945 } else if let Some(parent) = self.stack.pop() {
946 let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
948 { ::core::panicking::panic_fmt(format_args!("parent should be Delimited")); }panic!("parent should be Delimited")
949 };
950 self.curr = parent;
951 self.curr.bump(); if !delim.skip() {
953 return (Token::new(delim.as_close_token_kind(), span.close), spacing.close);
954 }
955 } else {
957 return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
961 }
962 }
963 }
964}
965
966#[derive(#[automatically_derived]
impl ::core::fmt::Debug for DelimSpan {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "DelimSpan",
"open", &self.open, "close", &&self.close)
}
}Debug, #[automatically_derived]
impl ::core::marker::Copy for DelimSpan { }Copy, #[automatically_derived]
impl ::core::clone::Clone for DelimSpan {
#[inline]
fn clone(&self) -> DelimSpan {
let _: ::core::clone::AssertParamIsClone<Span>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for DelimSpan {
#[inline]
fn eq(&self, other: &DelimSpan) -> bool {
self.open == other.open && self.close == other.close
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DelimSpan {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Span>;
}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for DelimSpan {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.open, state);
::core::hash::Hash::hash(&self.close, state)
}
}Hash)]
967#[derive(const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for DelimSpan {
fn encode(&self, __encoder: &mut __E) {
match *self {
DelimSpan { open: ref __binding_0, close: ref __binding_1 }
=> {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for DelimSpan {
fn decode(__decoder: &mut __D) -> Self {
DelimSpan {
open: ::rustc_serialize::Decodable::decode(__decoder),
close: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for DelimSpan where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
DelimSpan { open: ref __binding_0, close: ref __binding_1 }
=> {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic, const _: () =
{
impl<'__ast, __V> crate::visit::Walkable<'__ast, __V> for DelimSpan
where __V: crate::visit::Visitor<'__ast> {
fn walk_ref(&'__ast self, __visitor: &mut __V) -> __V::Result {
match *self {
DelimSpan { open: ref __binding_0, close: ref __binding_1 }
=> {
{
match ::rustc_ast_ir::visit::VisitorResult::branch(crate::visit::Visitable::visit(__binding_0,
__visitor, ())) {
core::ops::ControlFlow::Continue(()) =>
(),
#[allow(unreachable_code)]
core::ops::ControlFlow::Break(r) => {
return ::rustc_ast_ir::visit::VisitorResult::from_residual(r);
}
}
}
{
match ::rustc_ast_ir::visit::VisitorResult::branch(crate::visit::Visitable::visit(__binding_1,
__visitor, ())) {
core::ops::ControlFlow::Continue(()) =>
(),
#[allow(unreachable_code)]
core::ops::ControlFlow::Break(r) => {
return ::rustc_ast_ir::visit::VisitorResult::from_residual(r);
}
}
}
}
}
<__V::Result as rustc_ast_ir::visit::VisitorResult>::output()
}
}
impl<__V> crate::mut_visit::MutWalkable<__V> for DelimSpan where
__V: crate::mut_visit::MutVisitor {
fn walk_mut(&mut self, __visitor: &mut __V) {
match *self {
DelimSpan {
open: ref mut __binding_0, close: ref mut __binding_1 } => {
{
crate::mut_visit::MutVisitable::visit_mut(__binding_0,
__visitor, ())
}
{
crate::mut_visit::MutVisitable::visit_mut(__binding_1,
__visitor, ())
}
}
}
}
}
};Walkable)]
968pub struct DelimSpan {
969 pub open: Span,
970 pub close: Span,
971}
972
973impl DelimSpan {
974 pub fn from_single(sp: Span) -> Self {
975 DelimSpan { open: sp, close: sp }
976 }
977
978 pub fn from_pair(open: Span, close: Span) -> Self {
979 DelimSpan { open, close }
980 }
981
982 pub fn dummy() -> Self {
983 Self::from_single(DUMMY_SP)
984 }
985
986 pub fn entire(self) -> Span {
987 self.open.with_hi(self.close.hi())
988 }
989}
990
991#[derive(#[automatically_derived]
impl ::core::marker::Copy for DelimSpacing { }Copy, #[automatically_derived]
impl ::core::clone::Clone for DelimSpacing {
#[inline]
fn clone(&self) -> DelimSpacing {
let _: ::core::clone::AssertParamIsClone<Spacing>;
*self
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for DelimSpacing {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "DelimSpacing",
"open", &self.open, "close", &&self.close)
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for DelimSpacing {
#[inline]
fn eq(&self, other: &DelimSpacing) -> bool {
self.open == other.open && self.close == other.close
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DelimSpacing {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Spacing>;
}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for DelimSpacing {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.open, state);
::core::hash::Hash::hash(&self.close, state)
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for DelimSpacing {
fn encode(&self, __encoder: &mut __E) {
match *self {
DelimSpacing { open: ref __binding_0, close: ref __binding_1
} => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for DelimSpacing {
fn decode(__decoder: &mut __D) -> Self {
DelimSpacing {
open: ::rustc_serialize::Decodable::decode(__decoder),
close: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for DelimSpacing where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
DelimSpacing { open: ref __binding_0, close: ref __binding_1
} => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
992pub struct DelimSpacing {
993 pub open: Spacing,
994 pub close: Spacing,
995}
996
997impl DelimSpacing {
998 pub fn new(open: Spacing, close: Spacing) -> DelimSpacing {
999 DelimSpacing { open, close }
1000 }
1001}
1002
1003#[cfg(target_pointer_width = "64")]
1005mod size_asserts {
1006 use rustc_data_structures::static_assert_size;
1007
1008 use super::*;
1009 const _: [(); 8] = [(); ::std::mem::size_of::<AttrTokenStream>()];static_assert_size!(AttrTokenStream, 8);
1011 const _: [(); 32] = [(); ::std::mem::size_of::<AttrTokenTree>()];static_assert_size!(AttrTokenTree, 32);
1012 const _: [(); 8] = [(); ::std::mem::size_of::<LazyAttrTokenStream>()];static_assert_size!(LazyAttrTokenStream, 8);
1013 const _: [(); 88] = [(); ::std::mem::size_of::<LazyAttrTokenStreamInner>()];static_assert_size!(LazyAttrTokenStreamInner, 88);
1014 const _: [(); 8] = [(); ::std::mem::size_of::<Option<LazyAttrTokenStream>>()];static_assert_size!(Option<LazyAttrTokenStream>, 8); const _: [(); 8] = [(); ::std::mem::size_of::<TokenStream>()];static_assert_size!(TokenStream, 8);
1016 const _: [(); 32] = [(); ::std::mem::size_of::<TokenTree>()];static_assert_size!(TokenTree, 32);
1017 }