1use std::borrow::Cow;
8use std::hash::Hash;
9use std::ops::Range;
10use std::sync::Arc;
11use std::{cmp, fmt, iter, mem};
12
13use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
14use rustc_data_structures::sync;
15use rustc_macros::{Decodable, Encodable, HashStable_Generic, Walkable};
16use rustc_serialize::{Decodable, Encodable};
17use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
18use thin_vec::ThinVec;
19
20use crate::ast::AttrStyle;
21use crate::ast_traits::{HasAttrs, HasTokens};
22use crate::token::{self, Delimiter, Token, TokenKind};
23use crate::{AttrVec, Attribute};
24
25#[derive(#[automatically_derived]
impl ::core::fmt::Debug for TokenTree {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
TokenTree::Token(__self_0, __self_1) =>
::core::fmt::Formatter::debug_tuple_field2_finish(f, "Token",
__self_0, &__self_1),
TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) =>
::core::fmt::Formatter::debug_tuple_field4_finish(f,
"Delimited", __self_0, __self_1, __self_2, &__self_3),
}
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for TokenTree {
#[inline]
fn clone(&self) -> TokenTree {
match self {
TokenTree::Token(__self_0, __self_1) =>
TokenTree::Token(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) =>
TokenTree::Delimited(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1),
::core::clone::Clone::clone(__self_2),
::core::clone::Clone::clone(__self_3)),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for TokenTree {
#[inline]
fn eq(&self, other: &TokenTree) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(TokenTree::Token(__self_0, __self_1),
TokenTree::Token(__arg1_0, __arg1_1)) =>
__self_0 == __arg1_0 && __self_1 == __arg1_1,
(TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3),
TokenTree::Delimited(__arg1_0, __arg1_1, __arg1_2,
__arg1_3)) =>
__self_0 == __arg1_0 && __self_1 == __arg1_1 &&
__self_2 == __arg1_2 && __self_3 == __arg1_3,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for TokenTree {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Token>;
let _: ::core::cmp::AssertParamIsEq<Spacing>;
let _: ::core::cmp::AssertParamIsEq<DelimSpan>;
let _: ::core::cmp::AssertParamIsEq<DelimSpacing>;
let _: ::core::cmp::AssertParamIsEq<Delimiter>;
let _: ::core::cmp::AssertParamIsEq<TokenStream>;
}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for TokenTree {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
TokenTree::Token(__self_0, __self_1) => {
::core::hash::Hash::hash(__self_0, state);
::core::hash::Hash::hash(__self_1, state)
}
TokenTree::Delimited(__self_0, __self_1, __self_2, __self_3) => {
::core::hash::Hash::hash(__self_0, state);
::core::hash::Hash::hash(__self_1, state);
::core::hash::Hash::hash(__self_2, state);
::core::hash::Hash::hash(__self_3, state)
}
}
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for TokenTree {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
TokenTree::Token(ref __binding_0, ref __binding_1) => {
0usize
}
TokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
1usize
}
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
TokenTree::Token(ref __binding_0, ref __binding_1) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
TokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_3,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for TokenTree {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
TokenTree::Token(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
TokenTree::Delimited(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `TokenTree`, expected 0..2, actual {0}",
n));
}
}
}
}
};Decodable, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for TokenTree where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
TokenTree::Token(ref __binding_0, ref __binding_1) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
TokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
{ __binding_2.hash_stable(__hcx, __hasher); }
{ __binding_3.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
27pub enum TokenTree {
28 Token(Token, Spacing),
31 Delimited(DelimSpan, DelimSpacing, Delimiter, TokenStream),
33}
34
35fn _dummy()
37where
38 Token: sync::DynSend + sync::DynSync,
39 Spacing: sync::DynSend + sync::DynSync,
40 DelimSpan: sync::DynSend + sync::DynSync,
41 Delimiter: sync::DynSend + sync::DynSync,
42 TokenStream: sync::DynSend + sync::DynSync,
43{
44}
45
46impl TokenTree {
47 pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
49 match (self, other) {
50 (TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
51 (TokenTree::Delimited(.., delim, tts), TokenTree::Delimited(.., delim2, tts2)) => {
52 delim == delim2 && tts.iter().eq_by(tts2.iter(), |a, b| a.eq_unspanned(b))
53 }
54 _ => false,
55 }
56 }
57
58 pub fn span(&self) -> Span {
60 match self {
61 TokenTree::Token(token, _) => token.span,
62 TokenTree::Delimited(sp, ..) => sp.entire(),
63 }
64 }
65
66 pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
68 TokenTree::Token(Token::new(kind, span), Spacing::Alone)
69 }
70
71 pub fn token_joint(kind: TokenKind, span: Span) -> TokenTree {
73 TokenTree::Token(Token::new(kind, span), Spacing::Joint)
74 }
75
76 pub fn token_joint_hidden(kind: TokenKind, span: Span) -> TokenTree {
78 TokenTree::Token(Token::new(kind, span), Spacing::JointHidden)
79 }
80
81 pub fn uninterpolate(&self) -> Cow<'_, TokenTree> {
82 match self {
83 TokenTree::Token(token, spacing) => match token.uninterpolate() {
84 Cow::Owned(token) => Cow::Owned(TokenTree::Token(token, *spacing)),
85 Cow::Borrowed(_) => Cow::Borrowed(self),
86 },
87 _ => Cow::Borrowed(self),
88 }
89 }
90}
91
92#[derive(#[automatically_derived]
impl ::core::clone::Clone for LazyAttrTokenStream {
#[inline]
fn clone(&self) -> LazyAttrTokenStream {
LazyAttrTokenStream(::core::clone::Clone::clone(&self.0))
}
}Clone)]
95pub struct LazyAttrTokenStream(Arc<LazyAttrTokenStreamInner>);
96
97impl LazyAttrTokenStream {
98 pub fn new_direct(stream: AttrTokenStream) -> LazyAttrTokenStream {
99 LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Direct(stream)))
100 }
101
102 pub fn new_pending(
103 start_token: (Token, Spacing),
104 cursor_snapshot: TokenCursor,
105 num_calls: u32,
106 break_last_token: u32,
107 node_replacements: ThinVec<NodeReplacement>,
108 ) -> LazyAttrTokenStream {
109 LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Pending {
110 start_token,
111 cursor_snapshot,
112 num_calls,
113 break_last_token,
114 node_replacements,
115 }))
116 }
117
118 pub fn to_attr_token_stream(&self) -> AttrTokenStream {
119 self.0.to_attr_token_stream()
120 }
121}
122
123impl fmt::Debug for LazyAttrTokenStream {
124 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
125 f.write_fmt(format_args!("LazyAttrTokenStream({0:?})",
self.to_attr_token_stream()))write!(f, "LazyAttrTokenStream({:?})", self.to_attr_token_stream())
126 }
127}
128
129impl<S: SpanEncoder> Encodable<S> for LazyAttrTokenStream {
130 fn encode(&self, _s: &mut S) {
131 {
::core::panicking::panic_fmt(format_args!("Attempted to encode LazyAttrTokenStream"));
};panic!("Attempted to encode LazyAttrTokenStream");
132 }
133}
134
135impl<D: SpanDecoder> Decodable<D> for LazyAttrTokenStream {
136 fn decode(_d: &mut D) -> Self {
137 {
::core::panicking::panic_fmt(format_args!("Attempted to decode LazyAttrTokenStream"));
};panic!("Attempted to decode LazyAttrTokenStream");
138 }
139}
140
141impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
142 fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
143 {
::core::panicking::panic_fmt(format_args!("Attempted to compute stable hash for LazyAttrTokenStream"));
};panic!("Attempted to compute stable hash for LazyAttrTokenStream");
144 }
145}
146
147#[derive(#[automatically_derived]
impl ::core::clone::Clone for ParserRange {
#[inline]
fn clone(&self) -> ParserRange {
ParserRange(::core::clone::Clone::clone(&self.0))
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ParserRange {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "ParserRange",
&&self.0)
}
}Debug)]
149pub struct ParserRange(pub Range<u32>);
150
151#[derive(#[automatically_derived]
impl ::core::clone::Clone for NodeRange {
#[inline]
fn clone(&self) -> NodeRange {
NodeRange(::core::clone::Clone::clone(&self.0))
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for NodeRange {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "NodeRange",
&&self.0)
}
}Debug)]
155pub struct NodeRange(pub Range<u32>);
156
157pub type ParserReplacement = (ParserRange, Option<AttrsTarget>);
177
178pub type NodeReplacement = (NodeRange, Option<AttrsTarget>);
180
181impl NodeRange {
182 pub fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
192 if !!parser_range.is_empty() {
::core::panicking::panic("assertion failed: !parser_range.is_empty()")
};assert!(!parser_range.is_empty());
193 if !(parser_range.start >= start_pos) {
::core::panicking::panic("assertion failed: parser_range.start >= start_pos")
};assert!(parser_range.start >= start_pos);
194 NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
195 }
196}
197
198enum LazyAttrTokenStreamInner {
199 Direct(AttrTokenStream),
201
202 Pending {
214 start_token: (Token, Spacing),
215 cursor_snapshot: TokenCursor,
216 num_calls: u32,
217 break_last_token: u32,
218 node_replacements: ThinVec<NodeReplacement>,
219 },
220}
221
222impl LazyAttrTokenStreamInner {
223 fn to_attr_token_stream(&self) -> AttrTokenStream {
224 match self {
225 LazyAttrTokenStreamInner::Direct(stream) => stream.clone(),
226 LazyAttrTokenStreamInner::Pending {
227 start_token,
228 cursor_snapshot,
229 num_calls,
230 break_last_token,
231 node_replacements,
232 } => {
233 let mut cursor_snapshot = cursor_snapshot.clone();
239 let tokens = iter::once(FlatToken::Token(*start_token))
240 .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
241 .take(*num_calls as usize);
242
243 if node_replacements.is_empty() {
244 make_attr_token_stream(tokens, *break_last_token)
245 } else {
246 let mut tokens: Vec<_> = tokens.collect();
247 let mut node_replacements = node_replacements.to_vec();
248 node_replacements.sort_by_key(|(range, _)| range.0.start);
249
250 #[cfg(debug_assertions)]
251 for [(node_range, tokens), (next_node_range, next_tokens)] in
252 node_replacements.array_windows()
253 {
254 if !(node_range.0.end <= next_node_range.0.start ||
node_range.0.end >= next_node_range.0.end) {
{
::core::panicking::panic_fmt(format_args!("Node ranges should be disjoint or nested: ({0:?}, {1:?}) ({2:?}, {3:?})",
node_range, tokens, next_node_range, next_tokens));
}
};assert!(
255 node_range.0.end <= next_node_range.0.start
256 || node_range.0.end >= next_node_range.0.end,
257 "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
258 node_range,
259 tokens,
260 next_node_range,
261 next_tokens,
262 );
263 }
264
265 for (node_range, target) in node_replacements.into_iter().rev() {
279 if !!node_range.0.is_empty() {
{
::core::panicking::panic_fmt(format_args!("Cannot replace an empty node range: {0:?}",
node_range.0));
}
};assert!(
280 !node_range.0.is_empty(),
281 "Cannot replace an empty node range: {:?}",
282 node_range.0
283 );
284
285 let target_len = target.is_some() as usize;
290 tokens.splice(
291 (node_range.0.start as usize)..(node_range.0.end as usize),
292 target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
293 iter::repeat(FlatToken::Empty)
294 .take(node_range.0.len() - target_len),
295 ),
296 );
297 }
298 make_attr_token_stream(tokens.into_iter(), *break_last_token)
299 }
300 }
301 }
302 }
303}
304
305#[derive(#[automatically_derived]
impl ::core::fmt::Debug for FlatToken {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
FlatToken::Token(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Token",
&__self_0),
FlatToken::AttrsTarget(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"AttrsTarget", &__self_0),
FlatToken::Empty => ::core::fmt::Formatter::write_str(f, "Empty"),
}
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for FlatToken {
#[inline]
fn clone(&self) -> FlatToken {
match self {
FlatToken::Token(__self_0) =>
FlatToken::Token(::core::clone::Clone::clone(__self_0)),
FlatToken::AttrsTarget(__self_0) =>
FlatToken::AttrsTarget(::core::clone::Clone::clone(__self_0)),
FlatToken::Empty => FlatToken::Empty,
}
}
}Clone)]
311enum FlatToken {
312 Token((Token, Spacing)),
315 AttrsTarget(AttrsTarget),
319 Empty,
323}
324
325#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrTokenStream {
#[inline]
fn clone(&self) -> AttrTokenStream {
AttrTokenStream(::core::clone::Clone::clone(&self.0))
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrTokenStream {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"AttrTokenStream", &&self.0)
}
}Debug, #[automatically_derived]
impl ::core::default::Default for AttrTokenStream {
#[inline]
fn default() -> AttrTokenStream {
AttrTokenStream(::core::default::Default::default())
}
}Default, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for AttrTokenStream {
fn encode(&self, __encoder: &mut __E) {
match *self {
AttrTokenStream(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for AttrTokenStream {
fn decode(__decoder: &mut __D) -> Self {
AttrTokenStream(::rustc_serialize::Decodable::decode(__decoder))
}
}
};Decodable)]
330pub struct AttrTokenStream(pub Arc<Vec<AttrTokenTree>>);
331
332fn make_attr_token_stream(
336 iter: impl Iterator<Item = FlatToken>,
337 break_last_token: u32,
338) -> AttrTokenStream {
339 #[derive(#[automatically_derived]
impl ::core::fmt::Debug for FrameData {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "FrameData",
"open_delim_sp", &self.open_delim_sp, "inner", &&self.inner)
}
}Debug)]
340 struct FrameData {
341 open_delim_sp: Option<(Delimiter, Span, Spacing)>,
343 inner: Vec<AttrTokenTree>,
344 }
345 let mut stack_top = FrameData { open_delim_sp: None, inner: ::alloc::vec::Vec::new()vec![] };
347 let mut stack_rest = ::alloc::vec::Vec::new()vec![];
348 for flat_token in iter {
349 match flat_token {
350 FlatToken::Token((token @ Token { kind, span }, spacing)) => {
351 if let Some(delim) = kind.open_delim() {
352 stack_rest.push(mem::replace(
353 &mut stack_top,
354 FrameData { open_delim_sp: Some((delim, span, spacing)), inner: ::alloc::vec::Vec::new()vec![] },
355 ));
356 } else if let Some(delim) = kind.close_delim() {
357 let Some(frame) = stack_rest.pop() else {
361 return AttrTokenStream::new(stack_top.inner);
362 };
363 let frame_data = mem::replace(&mut stack_top, frame);
364 let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
365 if !open_delim.eq_ignoring_invisible_origin(&delim) {
{
::core::panicking::panic_fmt(format_args!("Mismatched open/close delims: open={0:?} close={1:?}",
open_delim, span));
}
};assert!(
366 open_delim.eq_ignoring_invisible_origin(&delim),
367 "Mismatched open/close delims: open={open_delim:?} close={span:?}"
368 );
369 let dspan = DelimSpan::from_pair(open_sp, span);
370 let dspacing = DelimSpacing::new(open_spacing, spacing);
371 let stream = AttrTokenStream::new(frame_data.inner);
372 let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
373 stack_top.inner.push(delimited);
374 } else {
375 stack_top.inner.push(AttrTokenTree::Token(token, spacing))
376 }
377 }
378 FlatToken::AttrsTarget(target) => {
379 stack_top.inner.push(AttrTokenTree::AttrsTarget(target))
380 }
381 FlatToken::Empty => {}
382 }
383 }
384
385 if break_last_token > 0 {
386 let last_token = stack_top.inner.pop().unwrap();
387 if let AttrTokenTree::Token(last_token, spacing) = last_token {
388 let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap();
389
390 let mut first_span = last_token.span.shrink_to_lo();
392 first_span =
393 first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token));
394
395 stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing));
396 } else {
397 {
::core::panicking::panic_fmt(format_args!("Unexpected last token {0:?}",
last_token));
}panic!("Unexpected last token {last_token:?}")
398 }
399 }
400 AttrTokenStream::new(stack_top.inner)
401}
402
403#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrTokenTree {
#[inline]
fn clone(&self) -> AttrTokenTree {
match self {
AttrTokenTree::Token(__self_0, __self_1) =>
AttrTokenTree::Token(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
AttrTokenTree::Delimited(__self_0, __self_1, __self_2, __self_3)
=>
AttrTokenTree::Delimited(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1),
::core::clone::Clone::clone(__self_2),
::core::clone::Clone::clone(__self_3)),
AttrTokenTree::AttrsTarget(__self_0) =>
AttrTokenTree::AttrsTarget(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrTokenTree {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
AttrTokenTree::Token(__self_0, __self_1) =>
::core::fmt::Formatter::debug_tuple_field2_finish(f, "Token",
__self_0, &__self_1),
AttrTokenTree::Delimited(__self_0, __self_1, __self_2, __self_3)
=>
::core::fmt::Formatter::debug_tuple_field4_finish(f,
"Delimited", __self_0, __self_1, __self_2, &__self_3),
AttrTokenTree::AttrsTarget(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"AttrsTarget", &__self_0),
}
}
}Debug, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for AttrTokenTree {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
AttrTokenTree::Token(ref __binding_0, ref __binding_1) => {
0usize
}
AttrTokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
1usize
}
AttrTokenTree::AttrsTarget(ref __binding_0) => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
AttrTokenTree::Token(ref __binding_0, ref __binding_1) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
AttrTokenTree::Delimited(ref __binding_0, ref __binding_1,
ref __binding_2, ref __binding_3) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_3,
__encoder);
}
AttrTokenTree::AttrsTarget(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for AttrTokenTree {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
AttrTokenTree::Token(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
AttrTokenTree::Delimited(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
2usize => {
AttrTokenTree::AttrsTarget(::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `AttrTokenTree`, expected 0..3, actual {0}",
n));
}
}
}
}
};Decodable)]
405pub enum AttrTokenTree {
406 Token(Token, Spacing),
407 Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
408 AttrsTarget(AttrsTarget),
412}
413
414impl AttrTokenStream {
415 pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
416 AttrTokenStream(Arc::new(tokens))
417 }
418
419 pub fn to_token_trees(&self) -> Vec<TokenTree> {
424 let mut res = Vec::with_capacity(self.0.len());
425 for tree in self.0.iter() {
426 match tree {
427 AttrTokenTree::Token(inner, spacing) => {
428 res.push(TokenTree::Token(inner.clone(), *spacing));
429 }
430 AttrTokenTree::Delimited(span, spacing, delim, stream) => {
431 res.push(TokenTree::Delimited(
432 *span,
433 *spacing,
434 *delim,
435 TokenStream::new(stream.to_token_trees()),
436 ))
437 }
438 AttrTokenTree::AttrsTarget(target) => {
439 attrs_and_tokens_to_token_trees(&target.attrs, &target.tokens, &mut res);
440 }
441 }
442 }
443 res
444 }
445}
446
447fn attrs_and_tokens_to_token_trees(
456 attrs: &[Attribute],
457 target_tokens: &LazyAttrTokenStream,
458 res: &mut Vec<TokenTree>,
459) {
460 let idx = attrs.partition_point(|attr| #[allow(non_exhaustive_omitted_patterns)] match attr.style {
crate::AttrStyle::Outer => true,
_ => false,
}matches!(attr.style, crate::AttrStyle::Outer));
461 let (outer_attrs, inner_attrs) = attrs.split_at(idx);
462
463 for attr in outer_attrs {
465 res.extend(attr.token_trees());
466 }
467
468 res.extend(target_tokens.to_attr_token_stream().to_token_trees());
470
471 if !inner_attrs.is_empty() {
473 let found = insert_inner_attrs(inner_attrs, res);
474 if !found {
{
::core::panicking::panic_fmt(format_args!("Failed to find trailing delimited group in: {0:?}",
res));
}
};assert!(found, "Failed to find trailing delimited group in: {res:?}");
475 }
476
477 fn insert_inner_attrs(inner_attrs: &[Attribute], tts: &mut Vec<TokenTree>) -> bool {
491 for tree in tts.iter_mut().rev() {
492 if let TokenTree::Delimited(span, spacing, Delimiter::Brace, stream) = tree {
493 let mut tts = ::alloc::vec::Vec::new()vec![];
495 for inner_attr in inner_attrs {
496 tts.extend(inner_attr.token_trees());
497 }
498 tts.extend(stream.0.iter().cloned());
499 let stream = TokenStream::new(tts);
500 *tree = TokenTree::Delimited(*span, *spacing, Delimiter::Brace, stream);
501 return true;
502 } else if let TokenTree::Delimited(span, spacing, Delimiter::Invisible(src), stream) =
503 tree
504 {
505 let mut vec: Vec<_> = stream.iter().cloned().collect();
507 if insert_inner_attrs(inner_attrs, &mut vec) {
508 *tree = TokenTree::Delimited(
509 *span,
510 *spacing,
511 Delimiter::Invisible(*src),
512 TokenStream::new(vec),
513 );
514 return true;
515 }
516 }
517 }
518 false
519 }
520}
521
522#[derive(#[automatically_derived]
impl ::core::clone::Clone for AttrsTarget {
#[inline]
fn clone(&self) -> AttrsTarget {
AttrsTarget {
attrs: ::core::clone::Clone::clone(&self.attrs),
tokens: ::core::clone::Clone::clone(&self.tokens),
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AttrsTarget {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "AttrsTarget",
"attrs", &self.attrs, "tokens", &&self.tokens)
}
}Debug, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for AttrsTarget {
fn encode(&self, __encoder: &mut __E) {
match *self {
AttrsTarget {
attrs: ref __binding_0, tokens: ref __binding_1 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for AttrsTarget {
fn decode(__decoder: &mut __D) -> Self {
AttrsTarget {
attrs: ::rustc_serialize::Decodable::decode(__decoder),
tokens: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable)]
535pub struct AttrsTarget {
536 pub attrs: AttrVec,
539 pub tokens: LazyAttrTokenStream,
542}
543
544#[derive(#[automatically_derived]
impl ::core::clone::Clone for Spacing {
#[inline]
fn clone(&self) -> Spacing { *self }
}Clone, #[automatically_derived]
impl ::core::marker::Copy for Spacing { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for Spacing {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
Spacing::Alone => "Alone",
Spacing::Joint => "Joint",
Spacing::JointHidden => "JointHidden",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for Spacing {
#[inline]
fn eq(&self, other: &Spacing) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for Spacing {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for Spacing {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state)
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for Spacing {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
Spacing::Alone => { 0usize }
Spacing::Joint => { 1usize }
Spacing::JointHidden => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
Spacing::Alone => {}
Spacing::Joint => {}
Spacing::JointHidden => {}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for Spacing {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => { Spacing::Alone }
1usize => { Spacing::Joint }
2usize => { Spacing::JointHidden }
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `Spacing`, expected 0..3, actual {0}",
n));
}
}
}
}
};Decodable, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for Spacing where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
Spacing::Alone => {}
Spacing::Joint => {}
Spacing::JointHidden => {}
}
}
}
};HashStable_Generic)]
549pub enum Spacing {
550 Alone,
566
567 Joint,
580
581 JointHidden,
602}
603
604#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenStream {
#[inline]
fn clone(&self) -> TokenStream {
TokenStream(::core::clone::Clone::clone(&self.0))
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenStream {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "TokenStream",
&&self.0)
}
}Debug, #[automatically_derived]
impl ::core::default::Default for TokenStream {
#[inline]
fn default() -> TokenStream {
TokenStream(::core::default::Default::default())
}
}Default, #[automatically_derived]
impl ::core::cmp::PartialEq for TokenStream {
#[inline]
fn eq(&self, other: &TokenStream) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for TokenStream {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Arc<Vec<TokenTree>>>;
}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for TokenStream {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.0, state)
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for TokenStream {
fn encode(&self, __encoder: &mut __E) {
match *self {
TokenStream(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for TokenStream {
fn decode(__decoder: &mut __D) -> Self {
TokenStream(::rustc_serialize::Decodable::decode(__decoder))
}
}
};Decodable)]
606pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
607
608impl TokenStream {
609 pub fn new(tts: Vec<TokenTree>) -> TokenStream {
610 TokenStream(Arc::new(tts))
611 }
612
613 pub fn is_empty(&self) -> bool {
614 self.0.is_empty()
615 }
616
617 pub fn len(&self) -> usize {
618 self.0.len()
619 }
620
621 pub fn get(&self, index: usize) -> Option<&TokenTree> {
622 self.0.get(index)
623 }
624
625 pub fn iter(&self) -> TokenStreamIter<'_> {
626 TokenStreamIter::new(self)
627 }
628
629 pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
634 TokenStream::new(<[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_alone(kind, span)]))vec![TokenTree::token_alone(kind, span)])
635 }
636
637 pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream {
638 let tokens = node.tokens().unwrap_or_else(|| {
::core::panicking::panic_fmt(format_args!("missing tokens for node: {0:?}",
node));
}panic!("missing tokens for node: {:?}", node));
639 let mut tts = ::alloc::vec::Vec::new()vec![];
640 attrs_and_tokens_to_token_trees(node.attrs(), tokens, &mut tts);
641 TokenStream::new(tts)
642 }
643
644 fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
647 if let Some(TokenTree::Token(last_tok, Spacing::Joint | Spacing::JointHidden)) = vec.last()
648 && let TokenTree::Token(tok, spacing) = tt
649 && let Some(glued_tok) = last_tok.glue(tok)
650 {
651 *vec.last_mut().unwrap() = TokenTree::Token(glued_tok, *spacing);
654 true
655 } else {
656 false
657 }
658 }
659
660 pub fn push_tree(&mut self, tt: TokenTree) {
663 let vec_mut = Arc::make_mut(&mut self.0);
664
665 if Self::try_glue_to_last(vec_mut, &tt) {
666 } else {
668 vec_mut.push(tt);
669 }
670 }
671
672 pub fn push_stream(&mut self, stream: TokenStream) {
676 let vec_mut = Arc::make_mut(&mut self.0);
677
678 let stream_iter = stream.0.iter().cloned();
679
680 if let Some(first) = stream.0.first()
681 && Self::try_glue_to_last(vec_mut, first)
682 {
683 vec_mut.extend(stream_iter.skip(1));
685 } else {
686 vec_mut.extend(stream_iter);
688 }
689 }
690
691 pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree> {
692 self.0.chunks(chunk_size)
693 }
694
695 pub fn desugar_doc_comments(&mut self) {
699 if let Some(desugared_stream) = desugar_inner(self.clone()) {
700 *self = desugared_stream;
701 }
702
703 fn desugar_inner(mut stream: TokenStream) -> Option<TokenStream> {
705 let mut i = 0;
706 let mut modified = false;
707 while let Some(tt) = stream.0.get(i) {
708 match tt {
709 &TokenTree::Token(
710 Token { kind: token::DocComment(_, attr_style, data), span },
711 _spacing,
712 ) => {
713 let desugared = desugared_tts(attr_style, data, span);
714 let desugared_len = desugared.len();
715 Arc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
716 modified = true;
717 i += desugared_len;
718 }
719
720 &TokenTree::Token(..) => i += 1,
721
722 &TokenTree::Delimited(sp, spacing, delim, ref delim_stream) => {
723 if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
724 let new_tt =
725 TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
726 Arc::make_mut(&mut stream.0)[i] = new_tt;
727 modified = true;
728 }
729 i += 1;
730 }
731 }
732 }
733 if modified { Some(stream) } else { None }
734 }
735
736 fn desugared_tts(attr_style: AttrStyle, data: Symbol, span: Span) -> Vec<TokenTree> {
737 let mut num_of_hashes = 0;
743 let mut count = 0;
744 for ch in data.as_str().chars() {
745 count = match ch {
746 '"' => 1,
747 '#' if count > 0 => count + 1,
748 _ => 0,
749 };
750 num_of_hashes = cmp::max(num_of_hashes, count);
751 }
752
753 let delim_span = DelimSpan::from_single(span);
755 let body = TokenTree::Delimited(
756 delim_span,
757 DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
758 Delimiter::Bracket,
759 [
760 TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span),
761 TokenTree::token_alone(token::Eq, span),
762 TokenTree::token_alone(
763 TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
764 span,
765 ),
766 ]
767 .into_iter()
768 .collect::<TokenStream>(),
769 );
770
771 if attr_style == AttrStyle::Inner {
772 <[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_joint(token::Pound,
span), TokenTree::token_joint_hidden(token::Bang, span),
body]))vec![
773 TokenTree::token_joint(token::Pound, span),
774 TokenTree::token_joint_hidden(token::Bang, span),
775 body,
776 ]
777 } else {
778 <[_]>::into_vec(::alloc::boxed::box_new([TokenTree::token_joint_hidden(token::Pound,
span), body]))vec![TokenTree::token_joint_hidden(token::Pound, span), body]
779 }
780 }
781 }
782
783 pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
786 let mut suggestion = None;
788 let mut iter = self.0.iter().enumerate().peekable();
789 while let Some((pos, ts)) = iter.next() {
790 if let Some((_, next)) = iter.peek() {
791 let sp = match (&ts, &next) {
792 (_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
793 (
794 TokenTree::Token(token_left, Spacing::Alone),
795 TokenTree::Token(token_right, _),
796 ) if (token_left.is_non_reserved_ident() || token_left.is_lit())
797 && (token_right.is_non_reserved_ident() || token_right.is_lit()) =>
798 {
799 token_left.span
800 }
801 (TokenTree::Delimited(sp, ..), _) => sp.entire(),
802 _ => continue,
803 };
804 let sp = sp.shrink_to_hi();
805 let comma = TokenTree::token_alone(token::Comma, sp);
806 suggestion = Some((pos, comma, sp));
807 }
808 }
809 if let Some((pos, comma, sp)) = suggestion {
810 let mut new_stream = Vec::with_capacity(self.0.len() + 1);
811 let parts = self.0.split_at(pos + 1);
812 new_stream.extend_from_slice(parts.0);
813 new_stream.push(comma);
814 new_stream.extend_from_slice(parts.1);
815 return Some((TokenStream::new(new_stream), sp));
816 }
817 None
818 }
819}
820
821impl FromIterator<TokenTree> for TokenStream {
822 fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
823 TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
824 }
825}
826
827impl<CTX> HashStable<CTX> for TokenStream
828where
829 CTX: crate::HashStableContext,
830{
831 fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
832 for sub_tt in self.iter() {
833 sub_tt.hash_stable(hcx, hasher);
834 }
835 }
836}
837
838#[derive(#[automatically_derived]
impl<'t> ::core::clone::Clone for TokenStreamIter<'t> {
#[inline]
fn clone(&self) -> TokenStreamIter<'t> {
TokenStreamIter {
stream: ::core::clone::Clone::clone(&self.stream),
index: ::core::clone::Clone::clone(&self.index),
}
}
}Clone)]
839pub struct TokenStreamIter<'t> {
840 stream: &'t TokenStream,
841 index: usize,
842}
843
844impl<'t> TokenStreamIter<'t> {
845 fn new(stream: &'t TokenStream) -> Self {
846 TokenStreamIter { stream, index: 0 }
847 }
848
849 pub fn peek(&self) -> Option<&'t TokenTree> {
853 self.stream.0.get(self.index)
854 }
855}
856
857impl<'t> Iterator for TokenStreamIter<'t> {
858 type Item = &'t TokenTree;
859
860 fn next(&mut self) -> Option<&'t TokenTree> {
861 self.stream.0.get(self.index).map(|tree| {
862 self.index += 1;
863 tree
864 })
865 }
866}
867
868#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenTreeCursor {
#[inline]
fn clone(&self) -> TokenTreeCursor {
TokenTreeCursor {
stream: ::core::clone::Clone::clone(&self.stream),
index: ::core::clone::Clone::clone(&self.index),
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenTreeCursor {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"TokenTreeCursor", "stream", &self.stream, "index", &&self.index)
}
}Debug)]
869pub struct TokenTreeCursor {
870 stream: TokenStream,
871 index: usize,
875}
876
877impl TokenTreeCursor {
878 #[inline]
879 pub fn new(stream: TokenStream) -> Self {
880 TokenTreeCursor { stream, index: 0 }
881 }
882
883 #[inline]
884 pub fn curr(&self) -> Option<&TokenTree> {
885 self.stream.get(self.index)
886 }
887
888 pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
889 self.stream.get(self.index + n)
890 }
891
892 #[inline]
893 pub fn bump(&mut self) {
894 self.index += 1;
895 }
896
897 #[inline]
899 pub fn bump_to_end(&mut self) {
900 self.index = self.stream.len();
901 }
902}
903
904#[derive(#[automatically_derived]
impl ::core::clone::Clone for TokenCursor {
#[inline]
fn clone(&self) -> TokenCursor {
TokenCursor {
curr: ::core::clone::Clone::clone(&self.curr),
stack: ::core::clone::Clone::clone(&self.stack),
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for TokenCursor {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "TokenCursor",
"curr", &self.curr, "stack", &&self.stack)
}
}Debug)]
909pub struct TokenCursor {
910 pub curr: TokenTreeCursor,
916
917 pub stack: Vec<TokenTreeCursor>,
920}
921
922impl TokenCursor {
923 pub fn next(&mut self) -> (Token, Spacing) {
924 self.inlined_next()
925 }
926
927 #[inline(always)]
929 pub fn inlined_next(&mut self) -> (Token, Spacing) {
930 loop {
931 if let Some(tree) = self.curr.curr() {
935 match tree {
936 &TokenTree::Token(token, spacing) => {
937 if true {
if !!token.kind.is_delim() {
::core::panicking::panic("assertion failed: !token.kind.is_delim()")
};
};debug_assert!(!token.kind.is_delim());
938 let res = (token, spacing);
939 self.curr.bump();
940 return res;
941 }
942 &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
943 let trees = TokenTreeCursor::new(tts.clone());
944 self.stack.push(mem::replace(&mut self.curr, trees));
945 if !delim.skip() {
946 return (Token::new(delim.as_open_token_kind(), sp.open), spacing.open);
947 }
948 }
950 };
951 } else if let Some(parent) = self.stack.pop() {
952 let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
954 { ::core::panicking::panic_fmt(format_args!("parent should be Delimited")); }panic!("parent should be Delimited")
955 };
956 self.curr = parent;
957 self.curr.bump(); if !delim.skip() {
959 return (Token::new(delim.as_close_token_kind(), span.close), spacing.close);
960 }
961 } else {
963 return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
967 }
968 }
969 }
970}
971
972#[derive(#[automatically_derived]
impl ::core::fmt::Debug for DelimSpan {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "DelimSpan",
"open", &self.open, "close", &&self.close)
}
}Debug, #[automatically_derived]
impl ::core::marker::Copy for DelimSpan { }Copy, #[automatically_derived]
impl ::core::clone::Clone for DelimSpan {
#[inline]
fn clone(&self) -> DelimSpan {
let _: ::core::clone::AssertParamIsClone<Span>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for DelimSpan {
#[inline]
fn eq(&self, other: &DelimSpan) -> bool {
self.open == other.open && self.close == other.close
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DelimSpan {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Span>;
}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for DelimSpan {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.open, state);
::core::hash::Hash::hash(&self.close, state)
}
}Hash)]
973#[derive(const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for DelimSpan {
fn encode(&self, __encoder: &mut __E) {
match *self {
DelimSpan { open: ref __binding_0, close: ref __binding_1 }
=> {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for DelimSpan {
fn decode(__decoder: &mut __D) -> Self {
DelimSpan {
open: ::rustc_serialize::Decodable::decode(__decoder),
close: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for DelimSpan where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
DelimSpan { open: ref __binding_0, close: ref __binding_1 }
=> {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic, const _: () =
{
impl<'__ast, __V> crate::visit::Walkable<'__ast, __V> for DelimSpan
where __V: crate::visit::Visitor<'__ast> {
fn walk_ref(&'__ast self, __visitor: &mut __V) -> __V::Result {
match *self {
DelimSpan { open: ref __binding_0, close: ref __binding_1 }
=> {
{
match ::rustc_ast_ir::visit::VisitorResult::branch(crate::visit::Visitable::visit(__binding_0,
__visitor, ())) {
core::ops::ControlFlow::Continue(()) =>
(),
#[allow(unreachable_code)]
core::ops::ControlFlow::Break(r) => {
return ::rustc_ast_ir::visit::VisitorResult::from_residual(r);
}
}
}
{
match ::rustc_ast_ir::visit::VisitorResult::branch(crate::visit::Visitable::visit(__binding_1,
__visitor, ())) {
core::ops::ControlFlow::Continue(()) =>
(),
#[allow(unreachable_code)]
core::ops::ControlFlow::Break(r) => {
return ::rustc_ast_ir::visit::VisitorResult::from_residual(r);
}
}
}
}
}
<__V::Result as rustc_ast_ir::visit::VisitorResult>::output()
}
}
impl<__V> crate::mut_visit::MutWalkable<__V> for DelimSpan where
__V: crate::mut_visit::MutVisitor {
fn walk_mut(&mut self, __visitor: &mut __V) {
match *self {
DelimSpan {
open: ref mut __binding_0, close: ref mut __binding_1 } => {
{
crate::mut_visit::MutVisitable::visit_mut(__binding_0,
__visitor, ())
}
{
crate::mut_visit::MutVisitable::visit_mut(__binding_1,
__visitor, ())
}
}
}
}
}
};Walkable)]
974pub struct DelimSpan {
975 pub open: Span,
976 pub close: Span,
977}
978
979impl DelimSpan {
980 pub fn from_single(sp: Span) -> Self {
981 DelimSpan { open: sp, close: sp }
982 }
983
984 pub fn from_pair(open: Span, close: Span) -> Self {
985 DelimSpan { open, close }
986 }
987
988 pub fn dummy() -> Self {
989 Self::from_single(DUMMY_SP)
990 }
991
992 pub fn entire(self) -> Span {
993 self.open.with_hi(self.close.hi())
994 }
995}
996
997#[derive(#[automatically_derived]
impl ::core::marker::Copy for DelimSpacing { }Copy, #[automatically_derived]
impl ::core::clone::Clone for DelimSpacing {
#[inline]
fn clone(&self) -> DelimSpacing {
let _: ::core::clone::AssertParamIsClone<Spacing>;
*self
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for DelimSpacing {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "DelimSpacing",
"open", &self.open, "close", &&self.close)
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for DelimSpacing {
#[inline]
fn eq(&self, other: &DelimSpacing) -> bool {
self.open == other.open && self.close == other.close
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DelimSpacing {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Spacing>;
}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for DelimSpacing {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.open, state);
::core::hash::Hash::hash(&self.close, state)
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for DelimSpacing {
fn encode(&self, __encoder: &mut __E) {
match *self {
DelimSpacing { open: ref __binding_0, close: ref __binding_1
} => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for DelimSpacing {
fn decode(__decoder: &mut __D) -> Self {
DelimSpacing {
open: ::rustc_serialize::Decodable::decode(__decoder),
close: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for DelimSpacing where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
DelimSpacing { open: ref __binding_0, close: ref __binding_1
} => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
998pub struct DelimSpacing {
999 pub open: Spacing,
1000 pub close: Spacing,
1001}
1002
1003impl DelimSpacing {
1004 pub fn new(open: Spacing, close: Spacing) -> DelimSpacing {
1005 DelimSpacing { open, close }
1006 }
1007}
1008
1009#[cfg(target_pointer_width = "64")]
1011mod size_asserts {
1012 use rustc_data_structures::static_assert_size;
1013
1014 use super::*;
1015 const _: [(); 8] = [(); ::std::mem::size_of::<AttrTokenStream>()];static_assert_size!(AttrTokenStream, 8);
1017 const _: [(); 32] = [(); ::std::mem::size_of::<AttrTokenTree>()];static_assert_size!(AttrTokenTree, 32);
1018 const _: [(); 8] = [(); ::std::mem::size_of::<LazyAttrTokenStream>()];static_assert_size!(LazyAttrTokenStream, 8);
1019 const _: [(); 88] = [(); ::std::mem::size_of::<LazyAttrTokenStreamInner>()];static_assert_size!(LazyAttrTokenStreamInner, 88);
1020 const _: [(); 8] = [(); ::std::mem::size_of::<Option<LazyAttrTokenStream>>()];static_assert_size!(Option<LazyAttrTokenStream>, 8); const _: [(); 8] = [(); ::std::mem::size_of::<TokenStream>()];static_assert_size!(TokenStream, 8);
1022 const _: [(); 32] = [(); ::std::mem::size_of::<TokenTree>()];static_assert_size!(TokenTree, 32);
1023 }