1#![allow(internal_features)]
20#![cfg_attr(bootstrap, feature(array_windows))]
21#![cfg_attr(target_arch = "loongarch64", feature(stdarch_loongarch))]
22#![feature(cfg_select)]
23#![feature(core_io_borrowed_buf)]
24#![feature(if_let_guard)]
25#![feature(map_try_insert)]
26#![feature(negative_impls)]
27#![feature(read_buf)]
28#![feature(rustc_attrs)]
29extern crate self as rustc_span;
35
36use derive_where::derive_where;
37use rustc_data_structures::{AtomicRef, outline};
38use rustc_macros::{Decodable, Encodable, HashStable_Generic};
39use rustc_serialize::opaque::{FileEncoder, MemDecoder};
40use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
41use tracing::debug;
42pub use unicode_width::UNICODE_VERSION;
43
44mod caching_source_map_view;
45pub mod source_map;
46use source_map::{SourceMap, SourceMapInputs};
47
48pub use self::caching_source_map_view::CachingSourceMapView;
49use crate::fatal_error::FatalError;
50
51pub mod edition;
52use edition::Edition;
53pub mod hygiene;
54use hygiene::Transparency;
55pub use hygiene::{
56 DesugaringKind, ExpnData, ExpnHash, ExpnId, ExpnKind, LocalExpnId, MacroKind, SyntaxContext,
57};
58use rustc_data_structures::stable_hasher::HashingControls;
59pub mod def_id;
60use def_id::{CrateNum, DefId, DefIndex, DefPathHash, LOCAL_CRATE, LocalDefId, StableCrateId};
61pub mod edit_distance;
62mod span_encoding;
63pub use span_encoding::{DUMMY_SP, Span};
64
65pub mod symbol;
66pub use symbol::{
67 ByteSymbol, Ident, MacroRulesNormalizedIdent, Macros20NormalizedIdent, STDLIB_STABLE_CRATES,
68 Symbol, kw, sym,
69};
70
71mod analyze_source_file;
72pub mod fatal_error;
73
74pub mod profiling;
75
76use std::borrow::Cow;
77use std::cmp::{self, Ordering};
78use std::fmt::Display;
79use std::hash::Hash;
80use std::io::{self, Read};
81use std::ops::{Add, Range, Sub};
82use std::path::{Path, PathBuf};
83use std::str::FromStr;
84use std::sync::Arc;
85use std::{fmt, iter};
86
87use md5::{Digest, Md5};
88use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
89use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock};
90use rustc_data_structures::unord::UnordMap;
91use rustc_hashes::{Hash64, Hash128};
92use sha1::Sha1;
93use sha2::Sha256;
94
95#[cfg(test)]
96mod tests;
97
98pub struct SessionGlobals {
103 symbol_interner: symbol::Interner,
104 span_interner: Lock<span_encoding::SpanInterner>,
105 metavar_spans: MetavarSpansMap,
108 hygiene_data: Lock<hygiene::HygieneData>,
109
110 source_map: Option<Arc<SourceMap>>,
114}
115
116impl SessionGlobals {
117 pub fn new(
118 edition: Edition,
119 extra_symbols: &[&'static str],
120 sm_inputs: Option<SourceMapInputs>,
121 ) -> SessionGlobals {
122 SessionGlobals {
123 symbol_interner: symbol::Interner::with_extra_symbols(extra_symbols),
124 span_interner: Lock::new(span_encoding::SpanInterner::default()),
125 metavar_spans: Default::default(),
126 hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
127 source_map: sm_inputs.map(|inputs| Arc::new(SourceMap::with_inputs(inputs))),
128 }
129 }
130}
131
132pub fn create_session_globals_then<R>(
133 edition: Edition,
134 extra_symbols: &[&'static str],
135 sm_inputs: Option<SourceMapInputs>,
136 f: impl FnOnce() -> R,
137) -> R {
138 if !!SESSION_GLOBALS.is_set() {
{
::core::panicking::panic_fmt(format_args!("SESSION_GLOBALS should never be overwritten! Use another thread if you need another SessionGlobals"));
}
};assert!(
139 !SESSION_GLOBALS.is_set(),
140 "SESSION_GLOBALS should never be overwritten! \
141 Use another thread if you need another SessionGlobals"
142 );
143 let session_globals = SessionGlobals::new(edition, extra_symbols, sm_inputs);
144 SESSION_GLOBALS.set(&session_globals, f)
145}
146
147pub fn set_session_globals_then<R>(session_globals: &SessionGlobals, f: impl FnOnce() -> R) -> R {
148 if !!SESSION_GLOBALS.is_set() {
{
::core::panicking::panic_fmt(format_args!("SESSION_GLOBALS should never be overwritten! Use another thread if you need another SessionGlobals"));
}
};assert!(
149 !SESSION_GLOBALS.is_set(),
150 "SESSION_GLOBALS should never be overwritten! \
151 Use another thread if you need another SessionGlobals"
152 );
153 SESSION_GLOBALS.set(session_globals, f)
154}
155
156pub fn create_session_if_not_set_then<R, F>(edition: Edition, f: F) -> R
158where
159 F: FnOnce(&SessionGlobals) -> R,
160{
161 if !SESSION_GLOBALS.is_set() {
162 let session_globals = SessionGlobals::new(edition, &[], None);
163 SESSION_GLOBALS.set(&session_globals, || SESSION_GLOBALS.with(f))
164 } else {
165 SESSION_GLOBALS.with(f)
166 }
167}
168
169#[inline]
170pub fn with_session_globals<R, F>(f: F) -> R
171where
172 F: FnOnce(&SessionGlobals) -> R,
173{
174 SESSION_GLOBALS.with(f)
175}
176
177pub fn create_default_session_globals_then<R>(f: impl FnOnce() -> R) -> R {
179 create_session_globals_then(edition::DEFAULT_EDITION, &[], None, f)
180}
181
182static SESSION_GLOBALS: ::scoped_tls::ScopedKey<SessionGlobals> =
::scoped_tls::ScopedKey {
inner: {
const FOO: ::std::thread::LocalKey<::std::cell::Cell<*const ()>> =
{
const __RUST_STD_INTERNAL_INIT: ::std::cell::Cell<*const ()>
=
{ ::std::cell::Cell::new(::std::ptr::null()) };
unsafe {
::std::thread::LocalKey::new(const {
if ::std::mem::needs_drop::<::std::cell::Cell<*const ()>>()
{
|_|
{
#[thread_local]
static __RUST_STD_INTERNAL_VAL:
::std::thread::local_impl::EagerStorage<::std::cell::Cell<*const ()>>
=
::std::thread::local_impl::EagerStorage::new(__RUST_STD_INTERNAL_INIT);
__RUST_STD_INTERNAL_VAL.get()
}
} else {
|_|
{
#[thread_local]
static __RUST_STD_INTERNAL_VAL: ::std::cell::Cell<*const ()>
=
__RUST_STD_INTERNAL_INIT;
&__RUST_STD_INTERNAL_VAL
}
}
})
}
};
&FOO
},
_marker: ::std::marker::PhantomData,
};scoped_tls::scoped_thread_local!(static SESSION_GLOBALS: SessionGlobals);
186
187#[derive(#[automatically_derived]
impl ::core::default::Default for MetavarSpansMap {
#[inline]
fn default() -> MetavarSpansMap {
MetavarSpansMap(::core::default::Default::default())
}
}Default)]
188pub struct MetavarSpansMap(FreezeLock<UnordMap<Span, (Span, bool)>>);
189
190impl MetavarSpansMap {
191 pub fn insert(&self, span: Span, var_span: Span) -> bool {
192 match self.0.write().try_insert(span, (var_span, false)) {
193 Ok(_) => true,
194 Err(entry) => entry.entry.get().0 == var_span,
195 }
196 }
197
198 pub fn get(&self, span: Span) -> Option<Span> {
200 if let Some(mut mspans) = self.0.try_write() {
201 if let Some((var_span, read)) = mspans.get_mut(&span) {
202 *read = true;
203 Some(*var_span)
204 } else {
205 None
206 }
207 } else {
208 if let Some((span, true)) = self.0.read().get(&span) { Some(*span) } else { None }
209 }
210 }
211
212 pub fn freeze_and_get_read_spans(&self) -> UnordMap<Span, Span> {
216 self.0.freeze().items().filter(|(_, (_, b))| *b).map(|(s1, (s2, _))| (*s1, *s2)).collect()
217 }
218}
219
220#[inline]
221pub fn with_metavar_spans<R>(f: impl FnOnce(&MetavarSpansMap) -> R) -> R {
222 with_session_globals(|session_globals| f(&session_globals.metavar_spans))
223}
224
225#[doc =
r" Scopes used to determined if it need to apply to `--remap-path-prefix`"]
pub struct RemapPathScopeComponents(<RemapPathScopeComponents as
::bitflags::__private::PublicFlags>::Internal);
#[automatically_derived]
impl ::core::fmt::Debug for RemapPathScopeComponents {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"RemapPathScopeComponents", &&self.0)
}
}
#[automatically_derived]
impl ::core::cmp::Eq for RemapPathScopeComponents {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _:
::core::cmp::AssertParamIsEq<<RemapPathScopeComponents as
::bitflags::__private::PublicFlags>::Internal>;
}
}
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for RemapPathScopeComponents { }
#[automatically_derived]
impl ::core::cmp::PartialEq for RemapPathScopeComponents {
#[inline]
fn eq(&self, other: &RemapPathScopeComponents) -> bool {
self.0 == other.0
}
}
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for RemapPathScopeComponents { }
#[automatically_derived]
impl ::core::clone::Clone for RemapPathScopeComponents {
#[inline]
fn clone(&self) -> RemapPathScopeComponents {
let _:
::core::clone::AssertParamIsClone<<RemapPathScopeComponents as
::bitflags::__private::PublicFlags>::Internal>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for RemapPathScopeComponents { }
#[automatically_derived]
impl ::core::cmp::Ord for RemapPathScopeComponents {
#[inline]
fn cmp(&self, other: &RemapPathScopeComponents) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::cmp::PartialOrd for RemapPathScopeComponents {
#[inline]
fn partial_cmp(&self, other: &RemapPathScopeComponents)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::hash::Hash for RemapPathScopeComponents {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.0, state)
}
}
impl RemapPathScopeComponents {
#[doc = r" Apply remappings to the expansion of `std::file!()` macro"]
#[allow(deprecated, non_upper_case_globals,)]
pub const MACRO: Self = Self::from_bits_retain(1 << 0);
#[doc = r" Apply remappings to printed compiler diagnostics"]
#[allow(deprecated, non_upper_case_globals,)]
pub const DIAGNOSTICS: Self = Self::from_bits_retain(1 << 1);
#[doc = r" Apply remappings to debug information"]
#[allow(deprecated, non_upper_case_globals,)]
pub const DEBUGINFO: Self = Self::from_bits_retain(1 << 3);
#[doc = r" Apply remappings to coverage information"]
#[allow(deprecated, non_upper_case_globals,)]
pub const COVERAGE: Self = Self::from_bits_retain(1 << 4);
#[doc =
r" An alias for `macro`, `debuginfo` and `coverage`. This ensures all paths in compiled"]
#[doc =
r" executables, libraries and objects are remapped but not elsewhere."]
#[allow(deprecated, non_upper_case_globals,)]
pub const OBJECT: Self =
Self::from_bits_retain(Self::MACRO.bits() | Self::DEBUGINFO.bits() |
Self::COVERAGE.bits());
}
impl ::bitflags::Flags for RemapPathScopeComponents {
const FLAGS: &'static [::bitflags::Flag<RemapPathScopeComponents>] =
&[{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("MACRO",
RemapPathScopeComponents::MACRO)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("DIAGNOSTICS",
RemapPathScopeComponents::DIAGNOSTICS)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("DEBUGINFO",
RemapPathScopeComponents::DEBUGINFO)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("COVERAGE",
RemapPathScopeComponents::COVERAGE)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("OBJECT",
RemapPathScopeComponents::OBJECT)
}];
type Bits = u8;
fn bits(&self) -> u8 { RemapPathScopeComponents::bits(self) }
fn from_bits_retain(bits: u8) -> RemapPathScopeComponents {
RemapPathScopeComponents::from_bits_retain(bits)
}
}
#[allow(dead_code, deprecated, unused_doc_comments, unused_attributes,
unused_mut, unused_imports, non_upper_case_globals, clippy ::
assign_op_pattern, clippy :: indexing_slicing, clippy :: same_name_method,
clippy :: iter_without_into_iter,)]
const _: () =
{
#[repr(transparent)]
pub struct InternalBitFlags(u8);
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for InternalBitFlags { }
#[automatically_derived]
impl ::core::clone::Clone for InternalBitFlags {
#[inline]
fn clone(&self) -> InternalBitFlags {
let _: ::core::clone::AssertParamIsClone<u8>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for InternalBitFlags { }
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for InternalBitFlags { }
#[automatically_derived]
impl ::core::cmp::PartialEq for InternalBitFlags {
#[inline]
fn eq(&self, other: &InternalBitFlags) -> bool {
self.0 == other.0
}
}
#[automatically_derived]
impl ::core::cmp::Eq for InternalBitFlags {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<u8>;
}
}
#[automatically_derived]
impl ::core::cmp::PartialOrd for InternalBitFlags {
#[inline]
fn partial_cmp(&self, other: &InternalBitFlags)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::cmp::Ord for InternalBitFlags {
#[inline]
fn cmp(&self, other: &InternalBitFlags) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::hash::Hash for InternalBitFlags {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.0, state)
}
}
impl ::bitflags::__private::PublicFlags for RemapPathScopeComponents {
type Primitive = u8;
type Internal = InternalBitFlags;
}
impl ::bitflags::__private::core::default::Default for
InternalBitFlags {
#[inline]
fn default() -> Self { InternalBitFlags::empty() }
}
impl ::bitflags::__private::core::fmt::Debug for InternalBitFlags {
fn fmt(&self,
f: &mut ::bitflags::__private::core::fmt::Formatter<'_>)
-> ::bitflags::__private::core::fmt::Result {
if self.is_empty() {
f.write_fmt(format_args!("{0:#x}",
<u8 as ::bitflags::Bits>::EMPTY))
} else {
::bitflags::__private::core::fmt::Display::fmt(self, f)
}
}
}
impl ::bitflags::__private::core::fmt::Display for InternalBitFlags {
fn fmt(&self,
f: &mut ::bitflags::__private::core::fmt::Formatter<'_>)
-> ::bitflags::__private::core::fmt::Result {
::bitflags::parser::to_writer(&RemapPathScopeComponents(*self),
f)
}
}
impl ::bitflags::__private::core::str::FromStr for InternalBitFlags {
type Err = ::bitflags::parser::ParseError;
fn from_str(s: &str)
->
::bitflags::__private::core::result::Result<Self,
Self::Err> {
::bitflags::parser::from_str::<RemapPathScopeComponents>(s).map(|flags|
flags.0)
}
}
impl ::bitflags::__private::core::convert::AsRef<u8> for
InternalBitFlags {
fn as_ref(&self) -> &u8 { &self.0 }
}
impl ::bitflags::__private::core::convert::From<u8> for
InternalBitFlags {
fn from(bits: u8) -> Self { Self::from_bits_retain(bits) }
}
#[allow(dead_code, deprecated, unused_attributes)]
impl InternalBitFlags {
#[inline]
pub const fn empty() -> Self {
Self(<u8 as ::bitflags::Bits>::EMPTY)
}
#[inline]
pub const fn all() -> Self {
let mut truncated = <u8 as ::bitflags::Bits>::EMPTY;
let mut i = 0;
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
let _ = i;
Self(truncated)
}
#[inline]
pub const fn bits(&self) -> u8 { self.0 }
#[inline]
pub const fn from_bits(bits: u8)
-> ::bitflags::__private::core::option::Option<Self> {
let truncated = Self::from_bits_truncate(bits).0;
if truncated == bits {
::bitflags::__private::core::option::Option::Some(Self(bits))
} else { ::bitflags::__private::core::option::Option::None }
}
#[inline]
pub const fn from_bits_truncate(bits: u8) -> Self {
Self(bits & Self::all().0)
}
#[inline]
pub const fn from_bits_retain(bits: u8) -> Self { Self(bits) }
#[inline]
pub fn from_name(name: &str)
-> ::bitflags::__private::core::option::Option<Self> {
{
if name == "MACRO" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::MACRO.bits()));
}
};
;
{
if name == "DIAGNOSTICS" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::DIAGNOSTICS.bits()));
}
};
;
{
if name == "DEBUGINFO" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::DEBUGINFO.bits()));
}
};
;
{
if name == "COVERAGE" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::COVERAGE.bits()));
}
};
;
{
if name == "OBJECT" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::OBJECT.bits()));
}
};
;
let _ = name;
::bitflags::__private::core::option::Option::None
}
#[inline]
pub const fn is_empty(&self) -> bool {
self.0 == <u8 as ::bitflags::Bits>::EMPTY
}
#[inline]
pub const fn is_all(&self) -> bool {
Self::all().0 | self.0 == self.0
}
#[inline]
pub const fn intersects(&self, other: Self) -> bool {
self.0 & other.0 != <u8 as ::bitflags::Bits>::EMPTY
}
#[inline]
pub const fn contains(&self, other: Self) -> bool {
self.0 & other.0 == other.0
}
#[inline]
pub fn insert(&mut self, other: Self) {
*self = Self(self.0).union(other);
}
#[inline]
pub fn remove(&mut self, other: Self) {
*self = Self(self.0).difference(other);
}
#[inline]
pub fn toggle(&mut self, other: Self) {
*self = Self(self.0).symmetric_difference(other);
}
#[inline]
pub fn set(&mut self, other: Self, value: bool) {
if value { self.insert(other); } else { self.remove(other); }
}
#[inline]
#[must_use]
pub const fn intersection(self, other: Self) -> Self {
Self(self.0 & other.0)
}
#[inline]
#[must_use]
pub const fn union(self, other: Self) -> Self {
Self(self.0 | other.0)
}
#[inline]
#[must_use]
pub const fn difference(self, other: Self) -> Self {
Self(self.0 & !other.0)
}
#[inline]
#[must_use]
pub const fn symmetric_difference(self, other: Self) -> Self {
Self(self.0 ^ other.0)
}
#[inline]
#[must_use]
pub const fn complement(self) -> Self {
Self::from_bits_truncate(!self.0)
}
}
impl ::bitflags::__private::core::fmt::Binary for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Binary::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::Octal for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Octal::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::LowerHex for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::LowerHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::UpperHex for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::UpperHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::ops::BitOr for InternalBitFlags {
type Output = Self;
#[inline]
fn bitor(self, other: InternalBitFlags) -> Self {
self.union(other)
}
}
impl ::bitflags::__private::core::ops::BitOrAssign for
InternalBitFlags {
#[inline]
fn bitor_assign(&mut self, other: Self) { self.insert(other); }
}
impl ::bitflags::__private::core::ops::BitXor for InternalBitFlags {
type Output = Self;
#[inline]
fn bitxor(self, other: Self) -> Self {
self.symmetric_difference(other)
}
}
impl ::bitflags::__private::core::ops::BitXorAssign for
InternalBitFlags {
#[inline]
fn bitxor_assign(&mut self, other: Self) { self.toggle(other); }
}
impl ::bitflags::__private::core::ops::BitAnd for InternalBitFlags {
type Output = Self;
#[inline]
fn bitand(self, other: Self) -> Self { self.intersection(other) }
}
impl ::bitflags::__private::core::ops::BitAndAssign for
InternalBitFlags {
#[inline]
fn bitand_assign(&mut self, other: Self) {
*self =
Self::from_bits_retain(self.bits()).intersection(other);
}
}
impl ::bitflags::__private::core::ops::Sub for InternalBitFlags {
type Output = Self;
#[inline]
fn sub(self, other: Self) -> Self { self.difference(other) }
}
impl ::bitflags::__private::core::ops::SubAssign for InternalBitFlags
{
#[inline]
fn sub_assign(&mut self, other: Self) { self.remove(other); }
}
impl ::bitflags::__private::core::ops::Not for InternalBitFlags {
type Output = Self;
#[inline]
fn not(self) -> Self { self.complement() }
}
impl ::bitflags::__private::core::iter::Extend<InternalBitFlags> for
InternalBitFlags {
fn extend<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(&mut self, iterator: T) {
for item in iterator { self.insert(item) }
}
}
impl ::bitflags::__private::core::iter::FromIterator<InternalBitFlags>
for InternalBitFlags {
fn from_iter<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(iterator: T) -> Self {
use ::bitflags::__private::core::iter::Extend;
let mut result = Self::empty();
result.extend(iterator);
result
}
}
impl InternalBitFlags {
#[inline]
pub const fn iter(&self)
-> ::bitflags::iter::Iter<RemapPathScopeComponents> {
::bitflags::iter::Iter::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
#[inline]
pub const fn iter_names(&self)
-> ::bitflags::iter::IterNames<RemapPathScopeComponents> {
::bitflags::iter::IterNames::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
}
impl ::bitflags::__private::core::iter::IntoIterator for
InternalBitFlags {
type Item = RemapPathScopeComponents;
type IntoIter = ::bitflags::iter::Iter<RemapPathScopeComponents>;
fn into_iter(self) -> Self::IntoIter { self.iter() }
}
impl InternalBitFlags {
#[inline]
pub fn bits_mut(&mut self) -> &mut u8 { &mut self.0 }
}
#[allow(dead_code, deprecated, unused_attributes)]
impl RemapPathScopeComponents {
#[inline]
pub const fn empty() -> Self { Self(InternalBitFlags::empty()) }
#[inline]
pub const fn all() -> Self { Self(InternalBitFlags::all()) }
#[inline]
pub const fn bits(&self) -> u8 { self.0.bits() }
#[inline]
pub const fn from_bits(bits: u8)
-> ::bitflags::__private::core::option::Option<Self> {
match InternalBitFlags::from_bits(bits) {
::bitflags::__private::core::option::Option::Some(bits) =>
::bitflags::__private::core::option::Option::Some(Self(bits)),
::bitflags::__private::core::option::Option::None =>
::bitflags::__private::core::option::Option::None,
}
}
#[inline]
pub const fn from_bits_truncate(bits: u8) -> Self {
Self(InternalBitFlags::from_bits_truncate(bits))
}
#[inline]
pub const fn from_bits_retain(bits: u8) -> Self {
Self(InternalBitFlags::from_bits_retain(bits))
}
#[inline]
pub fn from_name(name: &str)
-> ::bitflags::__private::core::option::Option<Self> {
match InternalBitFlags::from_name(name) {
::bitflags::__private::core::option::Option::Some(bits) =>
::bitflags::__private::core::option::Option::Some(Self(bits)),
::bitflags::__private::core::option::Option::None =>
::bitflags::__private::core::option::Option::None,
}
}
#[inline]
pub const fn is_empty(&self) -> bool { self.0.is_empty() }
#[inline]
pub const fn is_all(&self) -> bool { self.0.is_all() }
#[inline]
pub const fn intersects(&self, other: Self) -> bool {
self.0.intersects(other.0)
}
#[inline]
pub const fn contains(&self, other: Self) -> bool {
self.0.contains(other.0)
}
#[inline]
pub fn insert(&mut self, other: Self) { self.0.insert(other.0) }
#[inline]
pub fn remove(&mut self, other: Self) { self.0.remove(other.0) }
#[inline]
pub fn toggle(&mut self, other: Self) { self.0.toggle(other.0) }
#[inline]
pub fn set(&mut self, other: Self, value: bool) {
self.0.set(other.0, value)
}
#[inline]
#[must_use]
pub const fn intersection(self, other: Self) -> Self {
Self(self.0.intersection(other.0))
}
#[inline]
#[must_use]
pub const fn union(self, other: Self) -> Self {
Self(self.0.union(other.0))
}
#[inline]
#[must_use]
pub const fn difference(self, other: Self) -> Self {
Self(self.0.difference(other.0))
}
#[inline]
#[must_use]
pub const fn symmetric_difference(self, other: Self) -> Self {
Self(self.0.symmetric_difference(other.0))
}
#[inline]
#[must_use]
pub const fn complement(self) -> Self {
Self(self.0.complement())
}
}
impl ::bitflags::__private::core::fmt::Binary for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Binary::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::Octal for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Octal::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::LowerHex for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::LowerHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::UpperHex for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::UpperHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::ops::BitOr for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn bitor(self, other: RemapPathScopeComponents) -> Self {
self.union(other)
}
}
impl ::bitflags::__private::core::ops::BitOrAssign for
RemapPathScopeComponents {
#[inline]
fn bitor_assign(&mut self, other: Self) { self.insert(other); }
}
impl ::bitflags::__private::core::ops::BitXor for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn bitxor(self, other: Self) -> Self {
self.symmetric_difference(other)
}
}
impl ::bitflags::__private::core::ops::BitXorAssign for
RemapPathScopeComponents {
#[inline]
fn bitxor_assign(&mut self, other: Self) { self.toggle(other); }
}
impl ::bitflags::__private::core::ops::BitAnd for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn bitand(self, other: Self) -> Self { self.intersection(other) }
}
impl ::bitflags::__private::core::ops::BitAndAssign for
RemapPathScopeComponents {
#[inline]
fn bitand_assign(&mut self, other: Self) {
*self =
Self::from_bits_retain(self.bits()).intersection(other);
}
}
impl ::bitflags::__private::core::ops::Sub for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn sub(self, other: Self) -> Self { self.difference(other) }
}
impl ::bitflags::__private::core::ops::SubAssign for
RemapPathScopeComponents {
#[inline]
fn sub_assign(&mut self, other: Self) { self.remove(other); }
}
impl ::bitflags::__private::core::ops::Not for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn not(self) -> Self { self.complement() }
}
impl ::bitflags::__private::core::iter::Extend<RemapPathScopeComponents>
for RemapPathScopeComponents {
fn extend<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(&mut self, iterator: T) {
for item in iterator { self.insert(item) }
}
}
impl ::bitflags::__private::core::iter::FromIterator<RemapPathScopeComponents>
for RemapPathScopeComponents {
fn from_iter<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(iterator: T) -> Self {
use ::bitflags::__private::core::iter::Extend;
let mut result = Self::empty();
result.extend(iterator);
result
}
}
impl RemapPathScopeComponents {
#[inline]
pub const fn iter(&self)
-> ::bitflags::iter::Iter<RemapPathScopeComponents> {
::bitflags::iter::Iter::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
#[inline]
pub const fn iter_names(&self)
-> ::bitflags::iter::IterNames<RemapPathScopeComponents> {
::bitflags::iter::IterNames::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
}
impl ::bitflags::__private::core::iter::IntoIterator for
RemapPathScopeComponents {
type Item = RemapPathScopeComponents;
type IntoIter = ::bitflags::iter::Iter<RemapPathScopeComponents>;
fn into_iter(self) -> Self::IntoIter { self.iter() }
}
};bitflags::bitflags! {
226 #[derive(Debug, Eq, PartialEq, Clone, Copy, Ord, PartialOrd, Hash)]
228 pub struct RemapPathScopeComponents: u8 {
229 const MACRO = 1 << 0;
231 const DIAGNOSTICS = 1 << 1;
233 const DEBUGINFO = 1 << 3;
235 const COVERAGE = 1 << 4;
237
238 const OBJECT = Self::MACRO.bits() | Self::DEBUGINFO.bits() | Self::COVERAGE.bits();
241 }
242}
243
244impl<E: Encoder> Encodable<E> for RemapPathScopeComponents {
245 #[inline]
246 fn encode(&self, s: &mut E) {
247 s.emit_u8(self.bits());
248 }
249}
250
251impl<D: Decoder> Decodable<D> for RemapPathScopeComponents {
252 #[inline]
253 fn decode(s: &mut D) -> RemapPathScopeComponents {
254 RemapPathScopeComponents::from_bits(s.read_u8())
255 .expect("invalid bits for RemapPathScopeComponents")
256 }
257}
258
259#[derive(#[automatically_derived]
impl ::core::fmt::Debug for RealFileName {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f, "RealFileName",
"local", &self.local, "maybe_remapped", &self.maybe_remapped,
"scopes", &&self.scopes)
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for RealFileName {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Option<InnerRealFileName>>;
let _: ::core::cmp::AssertParamIsEq<InnerRealFileName>;
let _: ::core::cmp::AssertParamIsEq<RemapPathScopeComponents>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for RealFileName {
#[inline]
fn eq(&self, other: &RealFileName) -> bool {
self.local == other.local &&
self.maybe_remapped == other.maybe_remapped &&
self.scopes == other.scopes
}
}PartialEq, #[automatically_derived]
impl ::core::clone::Clone for RealFileName {
#[inline]
fn clone(&self) -> RealFileName {
RealFileName {
local: ::core::clone::Clone::clone(&self.local),
maybe_remapped: ::core::clone::Clone::clone(&self.maybe_remapped),
scopes: ::core::clone::Clone::clone(&self.scopes),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::Ord for RealFileName {
#[inline]
fn cmp(&self, other: &RealFileName) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.local, &other.local) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.maybe_remapped,
&other.maybe_remapped) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&self.scopes, &other.scopes),
cmp => cmp,
},
cmp => cmp,
}
}
}Ord, #[automatically_derived]
impl ::core::cmp::PartialOrd for RealFileName {
#[inline]
fn partial_cmp(&self, other: &RealFileName)
-> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.local, &other.local)
{
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match ::core::cmp::PartialOrd::partial_cmp(&self.maybe_remapped,
&other.maybe_remapped) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::cmp::PartialOrd::partial_cmp(&self.scopes,
&other.scopes),
cmp => cmp,
},
cmp => cmp,
}
}
}PartialOrd, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for RealFileName {
fn decode(__decoder: &mut __D) -> Self {
RealFileName {
local: ::rustc_serialize::Decodable::decode(__decoder),
maybe_remapped: ::rustc_serialize::Decodable::decode(__decoder),
scopes: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for RealFileName {
fn encode(&self, __encoder: &mut __E) {
match *self {
RealFileName {
local: ref __binding_0,
maybe_remapped: ref __binding_1,
scopes: ref __binding_2 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
}
}
}
}
};Encodable)]
291pub struct RealFileName {
292 local: Option<InnerRealFileName>,
294 maybe_remapped: InnerRealFileName,
296 scopes: RemapPathScopeComponents,
298}
299
300#[derive(#[automatically_derived]
impl ::core::fmt::Debug for InnerRealFileName {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f,
"InnerRealFileName", "name", &self.name, "working_directory",
&self.working_directory, "embeddable_name",
&&self.embeddable_name)
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for InnerRealFileName {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<PathBuf>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for InnerRealFileName {
#[inline]
fn eq(&self, other: &InnerRealFileName) -> bool {
self.name == other.name &&
self.working_directory == other.working_directory &&
self.embeddable_name == other.embeddable_name
}
}PartialEq, #[automatically_derived]
impl ::core::clone::Clone for InnerRealFileName {
#[inline]
fn clone(&self) -> InnerRealFileName {
InnerRealFileName {
name: ::core::clone::Clone::clone(&self.name),
working_directory: ::core::clone::Clone::clone(&self.working_directory),
embeddable_name: ::core::clone::Clone::clone(&self.embeddable_name),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::Ord for InnerRealFileName {
#[inline]
fn cmp(&self, other: &InnerRealFileName) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.name, &other.name) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.working_directory,
&other.working_directory) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&self.embeddable_name,
&other.embeddable_name),
cmp => cmp,
},
cmp => cmp,
}
}
}Ord, #[automatically_derived]
impl ::core::cmp::PartialOrd for InnerRealFileName {
#[inline]
fn partial_cmp(&self, other: &InnerRealFileName)
-> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.name, &other.name) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match ::core::cmp::PartialOrd::partial_cmp(&self.working_directory,
&other.working_directory) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::cmp::PartialOrd::partial_cmp(&self.embeddable_name,
&other.embeddable_name),
cmp => cmp,
},
cmp => cmp,
}
}
}PartialOrd, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for InnerRealFileName {
fn decode(__decoder: &mut __D) -> Self {
InnerRealFileName {
name: ::rustc_serialize::Decodable::decode(__decoder),
working_directory: ::rustc_serialize::Decodable::decode(__decoder),
embeddable_name: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for InnerRealFileName {
fn encode(&self, __encoder: &mut __E) {
match *self {
InnerRealFileName {
name: ref __binding_0,
working_directory: ref __binding_1,
embeddable_name: ref __binding_2 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
}
}
}
}
};Encodable, #[automatically_derived]
impl ::core::hash::Hash for InnerRealFileName {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.name, state);
::core::hash::Hash::hash(&self.working_directory, state);
::core::hash::Hash::hash(&self.embeddable_name, state)
}
}Hash)]
304struct InnerRealFileName {
305 name: PathBuf,
307 working_directory: PathBuf,
309 embeddable_name: PathBuf,
311}
312
313impl Hash for RealFileName {
314 #[inline]
315 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
316 if !self.was_fully_remapped() {
321 self.local.hash(state);
322 }
323 self.maybe_remapped.hash(state);
324 self.scopes.bits().hash(state);
325 }
326}
327
328impl RealFileName {
329 #[inline]
335 pub fn path(&self, scope: RemapPathScopeComponents) -> &Path {
336 if !(scope.bits().count_ones() == 1) {
{
::core::panicking::panic_fmt(format_args!("one and only one scope should be passed to `RealFileName::path`: {0:?}",
scope));
}
};assert!(
337 scope.bits().count_ones() == 1,
338 "one and only one scope should be passed to `RealFileName::path`: {scope:?}"
339 );
340 if !self.scopes.contains(scope)
341 && let Some(local_name) = &self.local
342 {
343 local_name.name.as_path()
344 } else {
345 self.maybe_remapped.name.as_path()
346 }
347 }
348
349 #[inline]
360 pub fn embeddable_name(&self, scope: RemapPathScopeComponents) -> (&Path, &Path) {
361 if !(scope.bits().count_ones() == 1) {
{
::core::panicking::panic_fmt(format_args!("one and only one scope should be passed to `RealFileName::embeddable_path`: {0:?}",
scope));
}
};assert!(
362 scope.bits().count_ones() == 1,
363 "one and only one scope should be passed to `RealFileName::embeddable_path`: {scope:?}"
364 );
365 if !self.scopes.contains(scope)
366 && let Some(local_name) = &self.local
367 {
368 (&local_name.working_directory, &local_name.embeddable_name)
369 } else {
370 (&self.maybe_remapped.working_directory, &self.maybe_remapped.embeddable_name)
371 }
372 }
373
374 #[inline]
381 pub fn local_path(&self) -> Option<&Path> {
382 if self.was_not_remapped() {
383 Some(&self.maybe_remapped.name)
384 } else if let Some(local) = &self.local {
385 Some(&local.name)
386 } else {
387 None
388 }
389 }
390
391 #[inline]
398 pub fn into_local_path(self) -> Option<PathBuf> {
399 if self.was_not_remapped() {
400 Some(self.maybe_remapped.name)
401 } else if let Some(local) = self.local {
402 Some(local.name)
403 } else {
404 None
405 }
406 }
407
408 #[inline]
410 pub(crate) fn was_remapped(&self) -> bool {
411 !self.scopes.is_empty()
412 }
413
414 #[inline]
416 fn was_fully_remapped(&self) -> bool {
417 self.scopes.is_all()
418 }
419
420 #[inline]
422 fn was_not_remapped(&self) -> bool {
423 self.scopes.is_empty()
424 }
425
426 #[inline]
430 pub fn empty() -> RealFileName {
431 RealFileName {
432 local: Some(InnerRealFileName {
433 name: PathBuf::new(),
434 working_directory: PathBuf::new(),
435 embeddable_name: PathBuf::new(),
436 }),
437 maybe_remapped: InnerRealFileName {
438 name: PathBuf::new(),
439 working_directory: PathBuf::new(),
440 embeddable_name: PathBuf::new(),
441 },
442 scopes: RemapPathScopeComponents::empty(),
443 }
444 }
445
446 pub fn from_virtual_path(path: &Path) -> RealFileName {
450 let name = InnerRealFileName {
451 name: path.to_owned(),
452 embeddable_name: path.to_owned(),
453 working_directory: PathBuf::new(),
454 };
455 RealFileName { local: None, maybe_remapped: name, scopes: RemapPathScopeComponents::all() }
456 }
457
458 #[inline]
463 pub fn update_for_crate_metadata(&mut self) {
464 if self.was_fully_remapped() || self.was_not_remapped() {
465 self.local = None;
470 }
471 }
472
473 fn to_string_lossy<'a>(&'a self, display_pref: FileNameDisplayPreference) -> Cow<'a, str> {
477 match display_pref {
478 FileNameDisplayPreference::Remapped => self.maybe_remapped.name.to_string_lossy(),
479 FileNameDisplayPreference::Local => {
480 self.local.as_ref().unwrap_or(&self.maybe_remapped).name.to_string_lossy()
481 }
482 FileNameDisplayPreference::Short => self
483 .maybe_remapped
484 .name
485 .file_name()
486 .map_or_else(|| "".into(), |f| f.to_string_lossy()),
487 FileNameDisplayPreference::Scope(scope) => self.path(scope).to_string_lossy(),
488 }
489 }
490}
491
492#[derive(#[automatically_derived]
impl ::core::fmt::Debug for FileName {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
FileName::Real(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Real",
&__self_0),
FileName::CfgSpec(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CfgSpec", &__self_0),
FileName::Anon(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Anon",
&__self_0),
FileName::MacroExpansion(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"MacroExpansion", &__self_0),
FileName::ProcMacroSourceCode(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"ProcMacroSourceCode", &__self_0),
FileName::CliCrateAttr(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CliCrateAttr", &__self_0),
FileName::Custom(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Custom",
&__self_0),
FileName::DocTest(__self_0, __self_1) =>
::core::fmt::Formatter::debug_tuple_field2_finish(f,
"DocTest", __self_0, &__self_1),
FileName::InlineAsm(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"InlineAsm", &__self_0),
}
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for FileName {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<RealFileName>;
let _: ::core::cmp::AssertParamIsEq<Hash64>;
let _: ::core::cmp::AssertParamIsEq<String>;
let _: ::core::cmp::AssertParamIsEq<PathBuf>;
let _: ::core::cmp::AssertParamIsEq<isize>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for FileName {
#[inline]
fn eq(&self, other: &FileName) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(FileName::Real(__self_0), FileName::Real(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::CfgSpec(__self_0), FileName::CfgSpec(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::Anon(__self_0), FileName::Anon(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::MacroExpansion(__self_0),
FileName::MacroExpansion(__arg1_0)) => __self_0 == __arg1_0,
(FileName::ProcMacroSourceCode(__self_0),
FileName::ProcMacroSourceCode(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::CliCrateAttr(__self_0),
FileName::CliCrateAttr(__arg1_0)) => __self_0 == __arg1_0,
(FileName::Custom(__self_0), FileName::Custom(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::DocTest(__self_0, __self_1),
FileName::DocTest(__arg1_0, __arg1_1)) =>
__self_0 == __arg1_0 && __self_1 == __arg1_1,
(FileName::InlineAsm(__self_0), FileName::InlineAsm(__arg1_0))
=> __self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl ::core::clone::Clone for FileName {
#[inline]
fn clone(&self) -> FileName {
match self {
FileName::Real(__self_0) =>
FileName::Real(::core::clone::Clone::clone(__self_0)),
FileName::CfgSpec(__self_0) =>
FileName::CfgSpec(::core::clone::Clone::clone(__self_0)),
FileName::Anon(__self_0) =>
FileName::Anon(::core::clone::Clone::clone(__self_0)),
FileName::MacroExpansion(__self_0) =>
FileName::MacroExpansion(::core::clone::Clone::clone(__self_0)),
FileName::ProcMacroSourceCode(__self_0) =>
FileName::ProcMacroSourceCode(::core::clone::Clone::clone(__self_0)),
FileName::CliCrateAttr(__self_0) =>
FileName::CliCrateAttr(::core::clone::Clone::clone(__self_0)),
FileName::Custom(__self_0) =>
FileName::Custom(::core::clone::Clone::clone(__self_0)),
FileName::DocTest(__self_0, __self_1) =>
FileName::DocTest(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
FileName::InlineAsm(__self_0) =>
FileName::InlineAsm(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::Ord for FileName {
#[inline]
fn cmp(&self, other: &FileName) -> ::core::cmp::Ordering {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
match ::core::cmp::Ord::cmp(&__self_discr, &__arg1_discr) {
::core::cmp::Ordering::Equal =>
match (self, other) {
(FileName::Real(__self_0), FileName::Real(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::CfgSpec(__self_0), FileName::CfgSpec(__arg1_0))
=> ::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::Anon(__self_0), FileName::Anon(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::MacroExpansion(__self_0),
FileName::MacroExpansion(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::ProcMacroSourceCode(__self_0),
FileName::ProcMacroSourceCode(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::CliCrateAttr(__self_0),
FileName::CliCrateAttr(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::Custom(__self_0), FileName::Custom(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::DocTest(__self_0, __self_1),
FileName::DocTest(__arg1_0, __arg1_1)) =>
match ::core::cmp::Ord::cmp(__self_0, __arg1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(__self_1, __arg1_1),
cmp => cmp,
},
(FileName::InlineAsm(__self_0),
FileName::InlineAsm(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
_ => unsafe { ::core::intrinsics::unreachable() }
},
cmp => cmp,
}
}
}Ord, #[automatically_derived]
impl ::core::cmp::PartialOrd for FileName {
#[inline]
fn partial_cmp(&self, other: &FileName)
-> ::core::option::Option<::core::cmp::Ordering> {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
match (self, other) {
(FileName::Real(__self_0), FileName::Real(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::CfgSpec(__self_0), FileName::CfgSpec(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::Anon(__self_0), FileName::Anon(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::MacroExpansion(__self_0),
FileName::MacroExpansion(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::ProcMacroSourceCode(__self_0),
FileName::ProcMacroSourceCode(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::CliCrateAttr(__self_0),
FileName::CliCrateAttr(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::Custom(__self_0), FileName::Custom(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::DocTest(__self_0, __self_1),
FileName::DocTest(__arg1_0, __arg1_1)) =>
match ::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0)
{
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=> ::core::cmp::PartialOrd::partial_cmp(__self_1, __arg1_1),
cmp => cmp,
},
(FileName::InlineAsm(__self_0), FileName::InlineAsm(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
_ =>
::core::cmp::PartialOrd::partial_cmp(&__self_discr,
&__arg1_discr),
}
}
}PartialOrd, #[automatically_derived]
impl ::core::hash::Hash for FileName {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
FileName::Real(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::CfgSpec(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::Anon(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::MacroExpansion(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::ProcMacroSourceCode(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::CliCrateAttr(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::Custom(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::DocTest(__self_0, __self_1) => {
::core::hash::Hash::hash(__self_0, state);
::core::hash::Hash::hash(__self_1, state)
}
FileName::InlineAsm(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
}
}
}Hash, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for FileName {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
FileName::Real(::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
FileName::CfgSpec(::rustc_serialize::Decodable::decode(__decoder))
}
2usize => {
FileName::Anon(::rustc_serialize::Decodable::decode(__decoder))
}
3usize => {
FileName::MacroExpansion(::rustc_serialize::Decodable::decode(__decoder))
}
4usize => {
FileName::ProcMacroSourceCode(::rustc_serialize::Decodable::decode(__decoder))
}
5usize => {
FileName::CliCrateAttr(::rustc_serialize::Decodable::decode(__decoder))
}
6usize => {
FileName::Custom(::rustc_serialize::Decodable::decode(__decoder))
}
7usize => {
FileName::DocTest(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
8usize => {
FileName::InlineAsm(::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `FileName`, expected 0..9, actual {0}",
n));
}
}
}
}
};Decodable, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for FileName {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
FileName::Real(ref __binding_0) => { 0usize }
FileName::CfgSpec(ref __binding_0) => { 1usize }
FileName::Anon(ref __binding_0) => { 2usize }
FileName::MacroExpansion(ref __binding_0) => { 3usize }
FileName::ProcMacroSourceCode(ref __binding_0) => { 4usize }
FileName::CliCrateAttr(ref __binding_0) => { 5usize }
FileName::Custom(ref __binding_0) => { 6usize }
FileName::DocTest(ref __binding_0, ref __binding_1) => {
7usize
}
FileName::InlineAsm(ref __binding_0) => { 8usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
FileName::Real(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::CfgSpec(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::Anon(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::MacroExpansion(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::ProcMacroSourceCode(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::CliCrateAttr(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::Custom(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::DocTest(ref __binding_0, ref __binding_1) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
FileName::InlineAsm(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable)]
494pub enum FileName {
495 Real(RealFileName),
496 CfgSpec(Hash64),
498 Anon(Hash64),
500 MacroExpansion(Hash64),
503 ProcMacroSourceCode(Hash64),
504 CliCrateAttr(Hash64),
506 Custom(String),
508 DocTest(PathBuf, isize),
509 InlineAsm(Hash64),
511}
512
513pub struct FileNameDisplay<'a> {
514 inner: &'a FileName,
515 display_pref: FileNameDisplayPreference,
516}
517
518#[derive(#[automatically_derived]
impl ::core::clone::Clone for FileNameDisplayPreference {
#[inline]
fn clone(&self) -> FileNameDisplayPreference {
let _: ::core::clone::AssertParamIsClone<RemapPathScopeComponents>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for FileNameDisplayPreference { }Copy)]
520enum FileNameDisplayPreference {
521 Remapped,
522 Local,
523 Short,
524 Scope(RemapPathScopeComponents),
525}
526
527impl fmt::Display for FileNameDisplay<'_> {
528 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
529 use FileName::*;
530 match *self.inner {
531 Real(ref name) => {
532 fmt.write_fmt(format_args!("{0}", name.to_string_lossy(self.display_pref)))write!(fmt, "{}", name.to_string_lossy(self.display_pref))
533 }
534 CfgSpec(_) => fmt.write_fmt(format_args!("<cfgspec>"))write!(fmt, "<cfgspec>"),
535 MacroExpansion(_) => fmt.write_fmt(format_args!("<macro expansion>"))write!(fmt, "<macro expansion>"),
536 Anon(_) => fmt.write_fmt(format_args!("<anon>"))write!(fmt, "<anon>"),
537 ProcMacroSourceCode(_) => fmt.write_fmt(format_args!("<proc-macro source code>"))write!(fmt, "<proc-macro source code>"),
538 CliCrateAttr(_) => fmt.write_fmt(format_args!("<crate attribute>"))write!(fmt, "<crate attribute>"),
539 Custom(ref s) => fmt.write_fmt(format_args!("<{0}>", s))write!(fmt, "<{s}>"),
540 DocTest(ref path, _) => fmt.write_fmt(format_args!("{0}", path.display()))write!(fmt, "{}", path.display()),
541 InlineAsm(_) => fmt.write_fmt(format_args!("<inline asm>"))write!(fmt, "<inline asm>"),
542 }
543 }
544}
545
546impl<'a> FileNameDisplay<'a> {
547 pub fn to_string_lossy(&self) -> Cow<'a, str> {
548 match self.inner {
549 FileName::Real(inner) => inner.to_string_lossy(self.display_pref),
550 _ => Cow::from(self.to_string()),
551 }
552 }
553}
554
555impl FileName {
556 pub fn is_real(&self) -> bool {
557 use FileName::*;
558 match *self {
559 Real(_) => true,
560 Anon(_)
561 | MacroExpansion(_)
562 | ProcMacroSourceCode(_)
563 | CliCrateAttr(_)
564 | Custom(_)
565 | CfgSpec(_)
566 | DocTest(_, _)
567 | InlineAsm(_) => false,
568 }
569 }
570
571 #[inline]
576 pub fn prefer_remapped_unconditionally(&self) -> FileNameDisplay<'_> {
577 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Remapped }
578 }
579
580 #[inline]
585 pub fn prefer_local_unconditionally(&self) -> FileNameDisplay<'_> {
586 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Local }
587 }
588
589 #[inline]
591 pub fn short(&self) -> FileNameDisplay<'_> {
592 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Short }
593 }
594
595 #[inline]
597 pub fn display(&self, scope: RemapPathScopeComponents) -> FileNameDisplay<'_> {
598 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Scope(scope) }
599 }
600
601 pub fn macro_expansion_source_code(src: &str) -> FileName {
602 let mut hasher = StableHasher::new();
603 src.hash(&mut hasher);
604 FileName::MacroExpansion(hasher.finish())
605 }
606
607 pub fn anon_source_code(src: &str) -> FileName {
608 let mut hasher = StableHasher::new();
609 src.hash(&mut hasher);
610 FileName::Anon(hasher.finish())
611 }
612
613 pub fn proc_macro_source_code(src: &str) -> FileName {
614 let mut hasher = StableHasher::new();
615 src.hash(&mut hasher);
616 FileName::ProcMacroSourceCode(hasher.finish())
617 }
618
619 pub fn cfg_spec_source_code(src: &str) -> FileName {
620 let mut hasher = StableHasher::new();
621 src.hash(&mut hasher);
622 FileName::CfgSpec(hasher.finish())
623 }
624
625 pub fn cli_crate_attr_source_code(src: &str) -> FileName {
626 let mut hasher = StableHasher::new();
627 src.hash(&mut hasher);
628 FileName::CliCrateAttr(hasher.finish())
629 }
630
631 pub fn doc_test_source_code(path: PathBuf, line: isize) -> FileName {
632 FileName::DocTest(path, line)
633 }
634
635 pub fn inline_asm_source_code(src: &str) -> FileName {
636 let mut hasher = StableHasher::new();
637 src.hash(&mut hasher);
638 FileName::InlineAsm(hasher.finish())
639 }
640
641 pub fn into_local_path(self) -> Option<PathBuf> {
646 match self {
647 FileName::Real(path) => path.into_local_path(),
648 FileName::DocTest(path, _) => Some(path),
649 _ => None,
650 }
651 }
652}
653
654#[derive(#[automatically_derived]
impl ::core::clone::Clone for SpanData {
#[inline]
fn clone(&self) -> SpanData {
let _: ::core::clone::AssertParamIsClone<BytePos>;
let _: ::core::clone::AssertParamIsClone<SyntaxContext>;
let _: ::core::clone::AssertParamIsClone<Option<LocalDefId>>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for SpanData { }Copy, #[automatically_derived]
impl ::core::hash::Hash for SpanData {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.lo, state);
::core::hash::Hash::hash(&self.hi, state);
::core::hash::Hash::hash(&self.ctxt, state);
::core::hash::Hash::hash(&self.parent, state)
}
}Hash, #[automatically_derived]
impl ::core::cmp::PartialEq for SpanData {
#[inline]
fn eq(&self, other: &SpanData) -> bool {
self.lo == other.lo && self.hi == other.hi && self.ctxt == other.ctxt
&& self.parent == other.parent
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SpanData {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<BytePos>;
let _: ::core::cmp::AssertParamIsEq<SyntaxContext>;
let _: ::core::cmp::AssertParamIsEq<Option<LocalDefId>>;
}
}Eq)]
670#[automatically_derived]
impl ::core::cmp::Ord for SpanData {
#[inline]
fn cmp(&self, __other: &Self) -> ::core::cmp::Ordering {
match (self, __other) {
(SpanData {
lo: ref __field_lo,
hi: ref __field_hi,
ctxt: ref __field_ctxt,
parent: ref __field_parent }, SpanData {
lo: ref __other_field_lo,
hi: ref __other_field_hi,
ctxt: ref __other_field_ctxt,
parent: ref __other_field_parent }) =>
match ::core::cmp::Ord::cmp(__field_lo, __other_field_lo) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(__field_hi, __other_field_hi) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
__cmp => __cmp,
},
__cmp => __cmp,
},
}
}
}#[derive_where(PartialOrd, Ord)]
671pub struct SpanData {
672 pub lo: BytePos,
673 pub hi: BytePos,
674 #[derive_where(skip)]
677 pub ctxt: SyntaxContext,
680 #[derive_where(skip)]
681 pub parent: Option<LocalDefId>,
684}
685
686impl SpanData {
687 #[inline]
688 pub fn span(&self) -> Span {
689 Span::new(self.lo, self.hi, self.ctxt, self.parent)
690 }
691 #[inline]
692 pub fn with_lo(&self, lo: BytePos) -> Span {
693 Span::new(lo, self.hi, self.ctxt, self.parent)
694 }
695 #[inline]
696 pub fn with_hi(&self, hi: BytePos) -> Span {
697 Span::new(self.lo, hi, self.ctxt, self.parent)
698 }
699 #[inline]
701 fn with_ctxt(&self, ctxt: SyntaxContext) -> Span {
702 Span::new(self.lo, self.hi, ctxt, self.parent)
703 }
704 #[inline]
706 fn with_parent(&self, parent: Option<LocalDefId>) -> Span {
707 Span::new(self.lo, self.hi, self.ctxt, parent)
708 }
709 #[inline]
711 pub fn is_dummy(self) -> bool {
712 self.lo.0 == 0 && self.hi.0 == 0
713 }
714 pub fn contains(self, other: Self) -> bool {
716 self.lo <= other.lo && other.hi <= self.hi
717 }
718}
719
720impl Default for SpanData {
721 fn default() -> Self {
722 Self { lo: BytePos(0), hi: BytePos(0), ctxt: SyntaxContext::root(), parent: None }
723 }
724}
725
726impl PartialOrd for Span {
727 fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
728 PartialOrd::partial_cmp(&self.data(), &rhs.data())
729 }
730}
731impl Ord for Span {
732 fn cmp(&self, rhs: &Self) -> Ordering {
733 Ord::cmp(&self.data(), &rhs.data())
734 }
735}
736
737impl Span {
738 #[inline]
739 pub fn lo(self) -> BytePos {
740 self.data().lo
741 }
742 #[inline]
743 pub fn with_lo(self, lo: BytePos) -> Span {
744 self.data().with_lo(lo)
745 }
746 #[inline]
747 pub fn hi(self) -> BytePos {
748 self.data().hi
749 }
750 #[inline]
751 pub fn with_hi(self, hi: BytePos) -> Span {
752 self.data().with_hi(hi)
753 }
754 #[inline]
755 pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span {
756 self.map_ctxt(|_| ctxt)
757 }
758
759 #[inline]
760 pub fn is_visible(self, sm: &SourceMap) -> bool {
761 !self.is_dummy() && sm.is_span_accessible(self)
762 }
763
764 #[inline]
769 pub fn in_external_macro(self, sm: &SourceMap) -> bool {
770 self.ctxt().in_external_macro(sm)
771 }
772
773 pub fn in_derive_expansion(self) -> bool {
775 #[allow(non_exhaustive_omitted_patterns)] match self.ctxt().outer_expn_data().kind
{
ExpnKind::Macro(MacroKind::Derive, _) => true,
_ => false,
}matches!(self.ctxt().outer_expn_data().kind, ExpnKind::Macro(MacroKind::Derive, _))
776 }
777
778 pub fn is_from_async_await(self) -> bool {
780 #[allow(non_exhaustive_omitted_patterns)] match self.ctxt().outer_expn_data().kind
{
ExpnKind::Desugaring(DesugaringKind::Async | DesugaringKind::Await) =>
true,
_ => false,
}matches!(
781 self.ctxt().outer_expn_data().kind,
782 ExpnKind::Desugaring(DesugaringKind::Async | DesugaringKind::Await),
783 )
784 }
785
786 pub fn can_be_used_for_suggestions(self) -> bool {
788 !self.from_expansion()
789 || (self.in_derive_expansion()
793 && self.parent_callsite().map(|p| (p.lo(), p.hi())) != Some((self.lo(), self.hi())))
794 }
795
796 #[inline]
797 pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span {
798 Span::new(lo, hi, SyntaxContext::root(), None)
799 }
800
801 #[inline]
803 pub fn shrink_to_lo(self) -> Span {
804 let span = self.data_untracked();
805 span.with_hi(span.lo)
806 }
807 #[inline]
809 pub fn shrink_to_hi(self) -> Span {
810 let span = self.data_untracked();
811 span.with_lo(span.hi)
812 }
813
814 #[inline]
815 pub fn is_empty(self) -> bool {
817 let span = self.data_untracked();
818 span.hi == span.lo
819 }
820
821 pub fn substitute_dummy(self, other: Span) -> Span {
823 if self.is_dummy() { other } else { self }
824 }
825
826 pub fn contains(self, other: Span) -> bool {
828 let span = self.data();
829 let other = other.data();
830 span.contains(other)
831 }
832
833 pub fn overlaps(self, other: Span) -> bool {
835 let span = self.data();
836 let other = other.data();
837 span.lo < other.hi && other.lo < span.hi
838 }
839
840 pub fn overlaps_or_adjacent(self, other: Span) -> bool {
842 let span = self.data();
843 let other = other.data();
844 span.lo <= other.hi && other.lo <= span.hi
845 }
846
847 pub fn source_equal(self, other: Span) -> bool {
852 let span = self.data();
853 let other = other.data();
854 span.lo == other.lo && span.hi == other.hi
855 }
856
857 pub fn trim_start(self, other: Span) -> Option<Span> {
859 let span = self.data();
860 let other = other.data();
861 if span.hi > other.hi { Some(span.with_lo(cmp::max(span.lo, other.hi))) } else { None }
862 }
863
864 pub fn trim_end(self, other: Span) -> Option<Span> {
866 let span = self.data();
867 let other = other.data();
868 if span.lo < other.lo { Some(span.with_hi(cmp::min(span.hi, other.lo))) } else { None }
869 }
870
871 pub fn source_callsite(self) -> Span {
874 let ctxt = self.ctxt();
875 if !ctxt.is_root() { ctxt.outer_expn_data().call_site.source_callsite() } else { self }
876 }
877
878 pub fn parent_callsite(self) -> Option<Span> {
881 let ctxt = self.ctxt();
882 (!ctxt.is_root()).then(|| ctxt.outer_expn_data().call_site)
883 }
884
885 pub fn find_ancestor_inside(mut self, outer: Span) -> Option<Span> {
898 while !outer.contains(self) {
899 self = self.parent_callsite()?;
900 }
901 Some(self)
902 }
903
904 pub fn find_ancestor_in_same_ctxt(mut self, other: Span) -> Option<Span> {
917 while !self.eq_ctxt(other) {
918 self = self.parent_callsite()?;
919 }
920 Some(self)
921 }
922
923 pub fn find_ancestor_inside_same_ctxt(mut self, outer: Span) -> Option<Span> {
936 while !outer.contains(self) || !self.eq_ctxt(outer) {
937 self = self.parent_callsite()?;
938 }
939 Some(self)
940 }
941
942 pub fn find_ancestor_not_from_extern_macro(mut self, sm: &SourceMap) -> Option<Span> {
956 while self.in_external_macro(sm) {
957 self = self.parent_callsite()?;
958 }
959 Some(self)
960 }
961
962 pub fn find_ancestor_not_from_macro(mut self) -> Option<Span> {
975 while self.from_expansion() {
976 self = self.parent_callsite()?;
977 }
978 Some(self)
979 }
980
981 pub fn edition(self) -> edition::Edition {
983 self.ctxt().edition()
984 }
985
986 #[inline]
988 pub fn is_rust_2015(self) -> bool {
989 self.edition().is_rust_2015()
990 }
991
992 #[inline]
994 pub fn at_least_rust_2018(self) -> bool {
995 self.edition().at_least_rust_2018()
996 }
997
998 #[inline]
1000 pub fn at_least_rust_2021(self) -> bool {
1001 self.edition().at_least_rust_2021()
1002 }
1003
1004 #[inline]
1006 pub fn at_least_rust_2024(self) -> bool {
1007 self.edition().at_least_rust_2024()
1008 }
1009
1010 pub fn source_callee(self) -> Option<ExpnData> {
1016 let mut ctxt = self.ctxt();
1017 let mut opt_expn_data = None;
1018 while !ctxt.is_root() {
1019 let expn_data = ctxt.outer_expn_data();
1020 ctxt = expn_data.call_site.ctxt();
1021 opt_expn_data = Some(expn_data);
1022 }
1023 opt_expn_data
1024 }
1025
1026 pub fn allows_unstable(self, feature: Symbol) -> bool {
1030 self.ctxt()
1031 .outer_expn_data()
1032 .allow_internal_unstable
1033 .is_some_and(|features| features.contains(&feature))
1034 }
1035
1036 pub fn is_desugaring(self, kind: DesugaringKind) -> bool {
1038 match self.ctxt().outer_expn_data().kind {
1039 ExpnKind::Desugaring(k) => k == kind,
1040 _ => false,
1041 }
1042 }
1043
1044 pub fn desugaring_kind(self) -> Option<DesugaringKind> {
1047 match self.ctxt().outer_expn_data().kind {
1048 ExpnKind::Desugaring(k) => Some(k),
1049 _ => None,
1050 }
1051 }
1052
1053 pub fn allows_unsafe(self) -> bool {
1057 self.ctxt().outer_expn_data().allow_internal_unsafe
1058 }
1059
1060 pub fn macro_backtrace(mut self) -> impl Iterator<Item = ExpnData> {
1061 let mut prev_span = DUMMY_SP;
1062 iter::from_fn(move || {
1063 loop {
1064 let ctxt = self.ctxt();
1065 if ctxt.is_root() {
1066 return None;
1067 }
1068
1069 let expn_data = ctxt.outer_expn_data();
1070 let is_recursive = expn_data.call_site.source_equal(prev_span);
1071
1072 prev_span = self;
1073 self = expn_data.call_site;
1074
1075 if !is_recursive {
1077 return Some(expn_data);
1078 }
1079 }
1080 })
1081 }
1082
1083 pub fn split_at(self, pos: u32) -> (Span, Span) {
1085 let len = self.hi().0 - self.lo().0;
1086 if true {
if !(pos <= len) {
::core::panicking::panic("assertion failed: pos <= len")
};
};debug_assert!(pos <= len);
1087
1088 let split_pos = BytePos(self.lo().0 + pos);
1089 (
1090 Span::new(self.lo(), split_pos, self.ctxt(), self.parent()),
1091 Span::new(split_pos, self.hi(), self.ctxt(), self.parent()),
1092 )
1093 }
1094
1095 fn try_metavars(a: SpanData, b: SpanData, a_orig: Span, b_orig: Span) -> (SpanData, SpanData) {
1097 match with_metavar_spans(|mspans| (mspans.get(a_orig), mspans.get(b_orig))) {
1098 (None, None) => {}
1099 (Some(meta_a), None) => {
1100 let meta_a = meta_a.data();
1101 if meta_a.ctxt == b.ctxt {
1102 return (meta_a, b);
1103 }
1104 }
1105 (None, Some(meta_b)) => {
1106 let meta_b = meta_b.data();
1107 if a.ctxt == meta_b.ctxt {
1108 return (a, meta_b);
1109 }
1110 }
1111 (Some(meta_a), Some(meta_b)) => {
1112 let meta_b = meta_b.data();
1113 if a.ctxt == meta_b.ctxt {
1114 return (a, meta_b);
1115 }
1116 let meta_a = meta_a.data();
1117 if meta_a.ctxt == b.ctxt {
1118 return (meta_a, b);
1119 } else if meta_a.ctxt == meta_b.ctxt {
1120 return (meta_a, meta_b);
1121 }
1122 }
1123 }
1124
1125 (a, b)
1126 }
1127
1128 fn prepare_to_combine(
1130 a_orig: Span,
1131 b_orig: Span,
1132 ) -> Result<(SpanData, SpanData, Option<LocalDefId>), Span> {
1133 let (a, b) = (a_orig.data(), b_orig.data());
1134 if a.ctxt == b.ctxt {
1135 return Ok((a, b, if a.parent == b.parent { a.parent } else { None }));
1136 }
1137
1138 let (a, b) = Span::try_metavars(a, b, a_orig, b_orig);
1139 if a.ctxt == b.ctxt {
1140 return Ok((a, b, if a.parent == b.parent { a.parent } else { None }));
1141 }
1142
1143 let a_is_callsite = a.ctxt.is_root() || a.ctxt == b.span().source_callsite().ctxt();
1151 Err(if a_is_callsite { b_orig } else { a_orig })
1152 }
1153
1154 pub fn with_neighbor(self, neighbor: Span) -> Span {
1156 match Span::prepare_to_combine(self, neighbor) {
1157 Ok((this, ..)) => this.span(),
1158 Err(_) => self,
1159 }
1160 }
1161
1162 pub fn to(self, end: Span) -> Span {
1173 match Span::prepare_to_combine(self, end) {
1174 Ok((from, to, parent)) => {
1175 Span::new(cmp::min(from.lo, to.lo), cmp::max(from.hi, to.hi), from.ctxt, parent)
1176 }
1177 Err(fallback) => fallback,
1178 }
1179 }
1180
1181 pub fn between(self, end: Span) -> Span {
1189 match Span::prepare_to_combine(self, end) {
1190 Ok((from, to, parent)) => {
1191 Span::new(cmp::min(from.hi, to.hi), cmp::max(from.lo, to.lo), from.ctxt, parent)
1192 }
1193 Err(fallback) => fallback,
1194 }
1195 }
1196
1197 pub fn until(self, end: Span) -> Span {
1205 match Span::prepare_to_combine(self, end) {
1206 Ok((from, to, parent)) => {
1207 Span::new(cmp::min(from.lo, to.lo), cmp::max(from.lo, to.lo), from.ctxt, parent)
1208 }
1209 Err(fallback) => fallback,
1210 }
1211 }
1212
1213 pub fn within_macro(self, within: Span, sm: &SourceMap) -> Option<Span> {
1228 match Span::prepare_to_combine(self, within) {
1229 Ok((self_, _, parent))
1236 if self_.hi < self.lo() || self.hi() < self_.lo && !sm.is_imported(within) =>
1237 {
1238 Some(Span::new(self_.lo, self_.hi, self_.ctxt, parent))
1239 }
1240 _ => None,
1241 }
1242 }
1243
1244 pub fn from_inner(self, inner: InnerSpan) -> Span {
1245 let span = self.data();
1246 Span::new(
1247 span.lo + BytePos::from_usize(inner.start),
1248 span.lo + BytePos::from_usize(inner.end),
1249 span.ctxt,
1250 span.parent,
1251 )
1252 }
1253
1254 pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span {
1257 self.with_ctxt_from_mark(expn_id, Transparency::Opaque)
1258 }
1259
1260 pub fn with_call_site_ctxt(self, expn_id: ExpnId) -> Span {
1263 self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
1264 }
1265
1266 pub fn with_mixed_site_ctxt(self, expn_id: ExpnId) -> Span {
1269 self.with_ctxt_from_mark(expn_id, Transparency::SemiOpaque)
1270 }
1271
1272 fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
1276 self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency))
1277 }
1278
1279 #[inline]
1280 pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
1281 self.map_ctxt(|ctxt| ctxt.apply_mark(expn_id, transparency))
1282 }
1283
1284 #[inline]
1285 pub fn remove_mark(&mut self) -> ExpnId {
1286 let mut mark = ExpnId::root();
1287 *self = self.map_ctxt(|mut ctxt| {
1288 mark = ctxt.remove_mark();
1289 ctxt
1290 });
1291 mark
1292 }
1293
1294 #[inline]
1295 pub fn adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
1296 let mut mark = None;
1297 *self = self.map_ctxt(|mut ctxt| {
1298 mark = ctxt.adjust(expn_id);
1299 ctxt
1300 });
1301 mark
1302 }
1303
1304 #[inline]
1305 pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
1306 let mut mark = None;
1307 *self = self.map_ctxt(|mut ctxt| {
1308 mark = ctxt.normalize_to_macros_2_0_and_adjust(expn_id);
1309 ctxt
1310 });
1311 mark
1312 }
1313
1314 #[inline]
1315 pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option<Option<ExpnId>> {
1316 let mut mark = None;
1317 *self = self.map_ctxt(|mut ctxt| {
1318 mark = ctxt.glob_adjust(expn_id, glob_span);
1319 ctxt
1320 });
1321 mark
1322 }
1323
1324 #[inline]
1325 pub fn reverse_glob_adjust(
1326 &mut self,
1327 expn_id: ExpnId,
1328 glob_span: Span,
1329 ) -> Option<Option<ExpnId>> {
1330 let mut mark = None;
1331 *self = self.map_ctxt(|mut ctxt| {
1332 mark = ctxt.reverse_glob_adjust(expn_id, glob_span);
1333 ctxt
1334 });
1335 mark
1336 }
1337
1338 #[inline]
1339 pub fn normalize_to_macros_2_0(self) -> Span {
1340 self.map_ctxt(|ctxt| ctxt.normalize_to_macros_2_0())
1341 }
1342
1343 #[inline]
1344 pub fn normalize_to_macro_rules(self) -> Span {
1345 self.map_ctxt(|ctxt| ctxt.normalize_to_macro_rules())
1346 }
1347}
1348
1349impl Default for Span {
1350 fn default() -> Self {
1351 DUMMY_SP
1352 }
1353}
1354
1355impl ::std::fmt::Debug for AttrId {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
fmt.write_fmt(format_args!("AttrId({0})", self.as_u32()))
}
}rustc_index::newtype_index! {
1356 #[orderable]
1357 #[debug_format = "AttrId({})"]
1358 pub struct AttrId {}
1359}
1360
1361pub trait SpanEncoder: Encoder {
1364 fn encode_span(&mut self, span: Span);
1365 fn encode_symbol(&mut self, sym: Symbol);
1366 fn encode_byte_symbol(&mut self, byte_sym: ByteSymbol);
1367 fn encode_expn_id(&mut self, expn_id: ExpnId);
1368 fn encode_syntax_context(&mut self, syntax_context: SyntaxContext);
1369 fn encode_crate_num(&mut self, crate_num: CrateNum);
1372 fn encode_def_index(&mut self, def_index: DefIndex);
1373 fn encode_def_id(&mut self, def_id: DefId);
1374}
1375
1376impl SpanEncoder for FileEncoder {
1377 fn encode_span(&mut self, span: Span) {
1378 let span = span.data();
1379 span.lo.encode(self);
1380 span.hi.encode(self);
1381 }
1382
1383 fn encode_symbol(&mut self, sym: Symbol) {
1384 self.emit_str(sym.as_str());
1385 }
1386
1387 fn encode_byte_symbol(&mut self, byte_sym: ByteSymbol) {
1388 self.emit_byte_str(byte_sym.as_byte_str());
1389 }
1390
1391 fn encode_expn_id(&mut self, _expn_id: ExpnId) {
1392 {
::core::panicking::panic_fmt(format_args!("cannot encode `ExpnId` with `FileEncoder`"));
};panic!("cannot encode `ExpnId` with `FileEncoder`");
1393 }
1394
1395 fn encode_syntax_context(&mut self, _syntax_context: SyntaxContext) {
1396 {
::core::panicking::panic_fmt(format_args!("cannot encode `SyntaxContext` with `FileEncoder`"));
};panic!("cannot encode `SyntaxContext` with `FileEncoder`");
1397 }
1398
1399 fn encode_crate_num(&mut self, crate_num: CrateNum) {
1400 self.emit_u32(crate_num.as_u32());
1401 }
1402
1403 fn encode_def_index(&mut self, _def_index: DefIndex) {
1404 {
::core::panicking::panic_fmt(format_args!("cannot encode `DefIndex` with `FileEncoder`"));
};panic!("cannot encode `DefIndex` with `FileEncoder`");
1405 }
1406
1407 fn encode_def_id(&mut self, def_id: DefId) {
1408 def_id.krate.encode(self);
1409 def_id.index.encode(self);
1410 }
1411}
1412
1413impl<E: SpanEncoder> Encodable<E> for Span {
1414 fn encode(&self, s: &mut E) {
1415 s.encode_span(*self);
1416 }
1417}
1418
1419impl<E: SpanEncoder> Encodable<E> for Symbol {
1420 fn encode(&self, s: &mut E) {
1421 s.encode_symbol(*self);
1422 }
1423}
1424
1425impl<E: SpanEncoder> Encodable<E> for ByteSymbol {
1426 fn encode(&self, s: &mut E) {
1427 s.encode_byte_symbol(*self);
1428 }
1429}
1430
1431impl<E: SpanEncoder> Encodable<E> for ExpnId {
1432 fn encode(&self, s: &mut E) {
1433 s.encode_expn_id(*self)
1434 }
1435}
1436
1437impl<E: SpanEncoder> Encodable<E> for SyntaxContext {
1438 fn encode(&self, s: &mut E) {
1439 s.encode_syntax_context(*self)
1440 }
1441}
1442
1443impl<E: SpanEncoder> Encodable<E> for CrateNum {
1444 fn encode(&self, s: &mut E) {
1445 s.encode_crate_num(*self)
1446 }
1447}
1448
1449impl<E: SpanEncoder> Encodable<E> for DefIndex {
1450 fn encode(&self, s: &mut E) {
1451 s.encode_def_index(*self)
1452 }
1453}
1454
1455impl<E: SpanEncoder> Encodable<E> for DefId {
1456 fn encode(&self, s: &mut E) {
1457 s.encode_def_id(*self)
1458 }
1459}
1460
1461impl<E: SpanEncoder> Encodable<E> for AttrId {
1462 fn encode(&self, _s: &mut E) {
1463 }
1465}
1466
1467pub trait BlobDecoder: Decoder {
1468 fn decode_symbol(&mut self) -> Symbol;
1469 fn decode_byte_symbol(&mut self) -> ByteSymbol;
1470 fn decode_def_index(&mut self) -> DefIndex;
1471}
1472
1473pub trait SpanDecoder: BlobDecoder {
1490 fn decode_span(&mut self) -> Span;
1491 fn decode_expn_id(&mut self) -> ExpnId;
1492 fn decode_syntax_context(&mut self) -> SyntaxContext;
1493 fn decode_crate_num(&mut self) -> CrateNum;
1494 fn decode_def_id(&mut self) -> DefId;
1495 fn decode_attr_id(&mut self) -> AttrId;
1496}
1497
1498impl BlobDecoder for MemDecoder<'_> {
1499 fn decode_symbol(&mut self) -> Symbol {
1500 Symbol::intern(self.read_str())
1501 }
1502
1503 fn decode_byte_symbol(&mut self) -> ByteSymbol {
1504 ByteSymbol::intern(self.read_byte_str())
1505 }
1506
1507 fn decode_def_index(&mut self) -> DefIndex {
1508 {
::core::panicking::panic_fmt(format_args!("cannot decode `DefIndex` with `MemDecoder`"));
};panic!("cannot decode `DefIndex` with `MemDecoder`");
1509 }
1510}
1511
1512impl SpanDecoder for MemDecoder<'_> {
1513 fn decode_span(&mut self) -> Span {
1514 let lo = Decodable::decode(self);
1515 let hi = Decodable::decode(self);
1516
1517 Span::new(lo, hi, SyntaxContext::root(), None)
1518 }
1519
1520 fn decode_expn_id(&mut self) -> ExpnId {
1521 {
::core::panicking::panic_fmt(format_args!("cannot decode `ExpnId` with `MemDecoder`"));
};panic!("cannot decode `ExpnId` with `MemDecoder`");
1522 }
1523
1524 fn decode_syntax_context(&mut self) -> SyntaxContext {
1525 {
::core::panicking::panic_fmt(format_args!("cannot decode `SyntaxContext` with `MemDecoder`"));
};panic!("cannot decode `SyntaxContext` with `MemDecoder`");
1526 }
1527
1528 fn decode_crate_num(&mut self) -> CrateNum {
1529 CrateNum::from_u32(self.read_u32())
1530 }
1531
1532 fn decode_def_id(&mut self) -> DefId {
1533 DefId { krate: Decodable::decode(self), index: Decodable::decode(self) }
1534 }
1535
1536 fn decode_attr_id(&mut self) -> AttrId {
1537 {
::core::panicking::panic_fmt(format_args!("cannot decode `AttrId` with `MemDecoder`"));
};panic!("cannot decode `AttrId` with `MemDecoder`");
1538 }
1539}
1540
1541impl<D: SpanDecoder> Decodable<D> for Span {
1542 fn decode(s: &mut D) -> Span {
1543 s.decode_span()
1544 }
1545}
1546
1547impl<D: BlobDecoder> Decodable<D> for Symbol {
1548 fn decode(s: &mut D) -> Symbol {
1549 s.decode_symbol()
1550 }
1551}
1552
1553impl<D: BlobDecoder> Decodable<D> for ByteSymbol {
1554 fn decode(s: &mut D) -> ByteSymbol {
1555 s.decode_byte_symbol()
1556 }
1557}
1558
1559impl<D: SpanDecoder> Decodable<D> for ExpnId {
1560 fn decode(s: &mut D) -> ExpnId {
1561 s.decode_expn_id()
1562 }
1563}
1564
1565impl<D: SpanDecoder> Decodable<D> for SyntaxContext {
1566 fn decode(s: &mut D) -> SyntaxContext {
1567 s.decode_syntax_context()
1568 }
1569}
1570
1571impl<D: SpanDecoder> Decodable<D> for CrateNum {
1572 fn decode(s: &mut D) -> CrateNum {
1573 s.decode_crate_num()
1574 }
1575}
1576
1577impl<D: BlobDecoder> Decodable<D> for DefIndex {
1578 fn decode(s: &mut D) -> DefIndex {
1579 s.decode_def_index()
1580 }
1581}
1582
1583impl<D: SpanDecoder> Decodable<D> for DefId {
1584 fn decode(s: &mut D) -> DefId {
1585 s.decode_def_id()
1586 }
1587}
1588
1589impl<D: SpanDecoder> Decodable<D> for AttrId {
1590 fn decode(s: &mut D) -> AttrId {
1591 s.decode_attr_id()
1592 }
1593}
1594
1595impl fmt::Debug for Span {
1596 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1597 fn fallback(span: Span, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1601 f.debug_struct("Span")
1602 .field("lo", &span.lo())
1603 .field("hi", &span.hi())
1604 .field("ctxt", &span.ctxt())
1605 .finish()
1606 }
1607
1608 if SESSION_GLOBALS.is_set() {
1609 with_session_globals(|session_globals| {
1610 if let Some(source_map) = &session_globals.source_map {
1611 f.write_fmt(format_args!("{0} ({1:?})",
source_map.span_to_diagnostic_string(*self), self.ctxt()))write!(f, "{} ({:?})", source_map.span_to_diagnostic_string(*self), self.ctxt())
1612 } else {
1613 fallback(*self, f)
1614 }
1615 })
1616 } else {
1617 fallback(*self, f)
1618 }
1619 }
1620}
1621
1622impl fmt::Debug for SpanData {
1623 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1624 fmt::Debug::fmt(&self.span(), f)
1625 }
1626}
1627
1628#[derive(#[automatically_derived]
impl ::core::marker::Copy for MultiByteChar { }Copy, #[automatically_derived]
impl ::core::clone::Clone for MultiByteChar {
#[inline]
fn clone(&self) -> MultiByteChar {
let _: ::core::clone::AssertParamIsClone<RelativeBytePos>;
let _: ::core::clone::AssertParamIsClone<u8>;
*self
}
}Clone, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for MultiByteChar {
fn encode(&self, __encoder: &mut __E) {
match *self {
MultiByteChar { pos: ref __binding_0, bytes: ref __binding_1
} => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for MultiByteChar {
fn decode(__decoder: &mut __D) -> Self {
MultiByteChar {
pos: ::rustc_serialize::Decodable::decode(__decoder),
bytes: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, #[automatically_derived]
impl ::core::cmp::Eq for MultiByteChar {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<RelativeBytePos>;
let _: ::core::cmp::AssertParamIsEq<u8>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for MultiByteChar {
#[inline]
fn eq(&self, other: &MultiByteChar) -> bool {
self.bytes == other.bytes && self.pos == other.pos
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for MultiByteChar {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "MultiByteChar",
"pos", &self.pos, "bytes", &&self.bytes)
}
}Debug, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for MultiByteChar where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
MultiByteChar { pos: ref __binding_0, bytes: ref __binding_1
} => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
1630pub struct MultiByteChar {
1631 pub pos: RelativeBytePos,
1633 pub bytes: u8,
1635}
1636
1637#[derive(#[automatically_derived]
impl ::core::marker::Copy for NormalizedPos { }Copy, #[automatically_derived]
impl ::core::clone::Clone for NormalizedPos {
#[inline]
fn clone(&self) -> NormalizedPos {
let _: ::core::clone::AssertParamIsClone<RelativeBytePos>;
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
}Clone, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for NormalizedPos {
fn encode(&self, __encoder: &mut __E) {
match *self {
NormalizedPos { pos: ref __binding_0, diff: ref __binding_1
} => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for NormalizedPos {
fn decode(__decoder: &mut __D) -> Self {
NormalizedPos {
pos: ::rustc_serialize::Decodable::decode(__decoder),
diff: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, #[automatically_derived]
impl ::core::cmp::Eq for NormalizedPos {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<RelativeBytePos>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for NormalizedPos {
#[inline]
fn eq(&self, other: &NormalizedPos) -> bool {
self.diff == other.diff && self.pos == other.pos
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for NormalizedPos {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "NormalizedPos",
"pos", &self.pos, "diff", &&self.diff)
}
}Debug, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for NormalizedPos where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
NormalizedPos { pos: ref __binding_0, diff: ref __binding_1
} => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
1639pub struct NormalizedPos {
1640 pub pos: RelativeBytePos,
1642 pub diff: u32,
1644}
1645
1646#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for ExternalSource {
#[inline]
fn eq(&self, other: &ExternalSource) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(ExternalSource::Foreign {
kind: __self_0, metadata_index: __self_1 },
ExternalSource::Foreign {
kind: __arg1_0, metadata_index: __arg1_1 }) =>
__self_1 == __arg1_1 && __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ExternalSource {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<ExternalSourceKind>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}Eq, #[automatically_derived]
impl ::core::clone::Clone for ExternalSource {
#[inline]
fn clone(&self) -> ExternalSource {
match self {
ExternalSource::Unneeded => ExternalSource::Unneeded,
ExternalSource::Foreign { kind: __self_0, metadata_index: __self_1
} =>
ExternalSource::Foreign {
kind: ::core::clone::Clone::clone(__self_0),
metadata_index: ::core::clone::Clone::clone(__self_1),
},
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ExternalSource {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
ExternalSource::Unneeded =>
::core::fmt::Formatter::write_str(f, "Unneeded"),
ExternalSource::Foreign { kind: __self_0, metadata_index: __self_1
} =>
::core::fmt::Formatter::debug_struct_field2_finish(f,
"Foreign", "kind", __self_0, "metadata_index", &__self_1),
}
}
}Debug)]
1647pub enum ExternalSource {
1648 Unneeded,
1650 Foreign {
1651 kind: ExternalSourceKind,
1652 metadata_index: u32,
1654 },
1655}
1656
1657#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for ExternalSourceKind {
#[inline]
fn eq(&self, other: &ExternalSourceKind) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(ExternalSourceKind::Present(__self_0),
ExternalSourceKind::Present(__arg1_0)) =>
__self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ExternalSourceKind {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Arc<String>>;
}
}Eq, #[automatically_derived]
impl ::core::clone::Clone for ExternalSourceKind {
#[inline]
fn clone(&self) -> ExternalSourceKind {
match self {
ExternalSourceKind::Present(__self_0) =>
ExternalSourceKind::Present(::core::clone::Clone::clone(__self_0)),
ExternalSourceKind::AbsentOk => ExternalSourceKind::AbsentOk,
ExternalSourceKind::AbsentErr => ExternalSourceKind::AbsentErr,
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ExternalSourceKind {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
ExternalSourceKind::Present(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Present", &__self_0),
ExternalSourceKind::AbsentOk =>
::core::fmt::Formatter::write_str(f, "AbsentOk"),
ExternalSourceKind::AbsentErr =>
::core::fmt::Formatter::write_str(f, "AbsentErr"),
}
}
}Debug)]
1659pub enum ExternalSourceKind {
1660 Present(Arc<String>),
1662 AbsentOk,
1664 AbsentErr,
1666}
1667
1668impl ExternalSource {
1669 pub fn get_source(&self) -> Option<&str> {
1670 match self {
1671 ExternalSource::Foreign { kind: ExternalSourceKind::Present(src), .. } => Some(src),
1672 _ => None,
1673 }
1674 }
1675}
1676
1677#[derive(#[automatically_derived]
impl ::core::fmt::Debug for OffsetOverflowError {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f, "OffsetOverflowError")
}
}Debug)]
1678pub struct OffsetOverflowError;
1679
1680#[derive(#[automatically_derived]
impl ::core::marker::Copy for SourceFileHashAlgorithm { }Copy, #[automatically_derived]
impl ::core::clone::Clone for SourceFileHashAlgorithm {
#[inline]
fn clone(&self) -> SourceFileHashAlgorithm { *self }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for SourceFileHashAlgorithm {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
SourceFileHashAlgorithm::Md5 => "Md5",
SourceFileHashAlgorithm::Sha1 => "Sha1",
SourceFileHashAlgorithm::Sha256 => "Sha256",
SourceFileHashAlgorithm::Blake3 => "Blake3",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for SourceFileHashAlgorithm {
#[inline]
fn eq(&self, other: &SourceFileHashAlgorithm) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SourceFileHashAlgorithm {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialOrd for SourceFileHashAlgorithm {
#[inline]
fn partial_cmp(&self, other: &SourceFileHashAlgorithm)
-> ::core::option::Option<::core::cmp::Ordering> {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
::core::cmp::PartialOrd::partial_cmp(&__self_discr, &__arg1_discr)
}
}PartialOrd, #[automatically_derived]
impl ::core::cmp::Ord for SourceFileHashAlgorithm {
#[inline]
fn cmp(&self, other: &SourceFileHashAlgorithm) -> ::core::cmp::Ordering {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
::core::cmp::Ord::cmp(&__self_discr, &__arg1_discr)
}
}Ord, #[automatically_derived]
impl ::core::hash::Hash for SourceFileHashAlgorithm {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state)
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for SourceFileHashAlgorithm {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
SourceFileHashAlgorithm::Md5 => { 0usize }
SourceFileHashAlgorithm::Sha1 => { 1usize }
SourceFileHashAlgorithm::Sha256 => { 2usize }
SourceFileHashAlgorithm::Blake3 => { 3usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
SourceFileHashAlgorithm::Md5 => {}
SourceFileHashAlgorithm::Sha1 => {}
SourceFileHashAlgorithm::Sha256 => {}
SourceFileHashAlgorithm::Blake3 => {}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for SourceFileHashAlgorithm {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => { SourceFileHashAlgorithm::Md5 }
1usize => { SourceFileHashAlgorithm::Sha1 }
2usize => { SourceFileHashAlgorithm::Sha256 }
3usize => { SourceFileHashAlgorithm::Blake3 }
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `SourceFileHashAlgorithm`, expected 0..4, actual {0}",
n));
}
}
}
}
};Decodable)]
1681#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for SourceFileHashAlgorithm where __CTX: crate::HashStableContext
{
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
SourceFileHashAlgorithm::Md5 => {}
SourceFileHashAlgorithm::Sha1 => {}
SourceFileHashAlgorithm::Sha256 => {}
SourceFileHashAlgorithm::Blake3 => {}
}
}
}
};HashStable_Generic)]
1682pub enum SourceFileHashAlgorithm {
1683 Md5,
1684 Sha1,
1685 Sha256,
1686 Blake3,
1687}
1688
1689impl Display for SourceFileHashAlgorithm {
1690 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1691 f.write_str(match self {
1692 Self::Md5 => "md5",
1693 Self::Sha1 => "sha1",
1694 Self::Sha256 => "sha256",
1695 Self::Blake3 => "blake3",
1696 })
1697 }
1698}
1699
1700impl FromStr for SourceFileHashAlgorithm {
1701 type Err = ();
1702
1703 fn from_str(s: &str) -> Result<SourceFileHashAlgorithm, ()> {
1704 match s {
1705 "md5" => Ok(SourceFileHashAlgorithm::Md5),
1706 "sha1" => Ok(SourceFileHashAlgorithm::Sha1),
1707 "sha256" => Ok(SourceFileHashAlgorithm::Sha256),
1708 "blake3" => Ok(SourceFileHashAlgorithm::Blake3),
1709 _ => Err(()),
1710 }
1711 }
1712}
1713
1714#[derive(#[automatically_derived]
impl ::core::marker::Copy for SourceFileHash { }Copy, #[automatically_derived]
impl ::core::clone::Clone for SourceFileHash {
#[inline]
fn clone(&self) -> SourceFileHash {
let _: ::core::clone::AssertParamIsClone<SourceFileHashAlgorithm>;
let _: ::core::clone::AssertParamIsClone<[u8; 32]>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for SourceFileHash {
#[inline]
fn eq(&self, other: &SourceFileHash) -> bool {
self.kind == other.kind && self.value == other.value
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SourceFileHash {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<SourceFileHashAlgorithm>;
let _: ::core::cmp::AssertParamIsEq<[u8; 32]>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for SourceFileHash {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"SourceFileHash", "kind", &self.kind, "value", &&self.value)
}
}Debug, #[automatically_derived]
impl ::core::hash::Hash for SourceFileHash {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.kind, state);
::core::hash::Hash::hash(&self.value, state)
}
}Hash)]
1716#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for SourceFileHash where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
SourceFileHash {
kind: ref __binding_0, value: ref __binding_1 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for SourceFileHash {
fn encode(&self, __encoder: &mut __E) {
match *self {
SourceFileHash {
kind: ref __binding_0, value: ref __binding_1 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for SourceFileHash {
fn decode(__decoder: &mut __D) -> Self {
SourceFileHash {
kind: ::rustc_serialize::Decodable::decode(__decoder),
value: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable)]
1717pub struct SourceFileHash {
1718 pub kind: SourceFileHashAlgorithm,
1719 value: [u8; 32],
1720}
1721
1722impl Display for SourceFileHash {
1723 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1724 f.write_fmt(format_args!("{0}=", self.kind))write!(f, "{}=", self.kind)?;
1725 for byte in self.value[0..self.hash_len()].into_iter() {
1726 f.write_fmt(format_args!("{0:02x}", byte))write!(f, "{byte:02x}")?;
1727 }
1728 Ok(())
1729 }
1730}
1731
1732impl SourceFileHash {
1733 pub fn new_in_memory(kind: SourceFileHashAlgorithm, src: impl AsRef<[u8]>) -> SourceFileHash {
1734 let mut hash = SourceFileHash { kind, value: Default::default() };
1735 let len = hash.hash_len();
1736 let value = &mut hash.value[..len];
1737 let data = src.as_ref();
1738 match kind {
1739 SourceFileHashAlgorithm::Md5 => {
1740 value.copy_from_slice(&Md5::digest(data));
1741 }
1742 SourceFileHashAlgorithm::Sha1 => {
1743 value.copy_from_slice(&Sha1::digest(data));
1744 }
1745 SourceFileHashAlgorithm::Sha256 => {
1746 value.copy_from_slice(&Sha256::digest(data));
1747 }
1748 SourceFileHashAlgorithm::Blake3 => value.copy_from_slice(blake3::hash(data).as_bytes()),
1749 };
1750 hash
1751 }
1752
1753 pub fn new(kind: SourceFileHashAlgorithm, src: impl Read) -> Result<SourceFileHash, io::Error> {
1754 let mut hash = SourceFileHash { kind, value: Default::default() };
1755 let len = hash.hash_len();
1756 let value = &mut hash.value[..len];
1757 let mut buf = ::alloc::vec::from_elem(0, 16 * 1024)vec![0; 16 * 1024];
1760
1761 fn digest<T>(
1762 mut hasher: T,
1763 mut update: impl FnMut(&mut T, &[u8]),
1764 finish: impl FnOnce(T, &mut [u8]),
1765 mut src: impl Read,
1766 buf: &mut [u8],
1767 value: &mut [u8],
1768 ) -> Result<(), io::Error> {
1769 loop {
1770 let bytes_read = src.read(buf)?;
1771 if bytes_read == 0 {
1772 break;
1773 }
1774 update(&mut hasher, &buf[0..bytes_read]);
1775 }
1776 finish(hasher, value);
1777 Ok(())
1778 }
1779
1780 match kind {
1781 SourceFileHashAlgorithm::Sha256 => {
1782 digest(
1783 Sha256::new(),
1784 |h, b| {
1785 h.update(b);
1786 },
1787 |h, out| out.copy_from_slice(&h.finalize()),
1788 src,
1789 &mut buf,
1790 value,
1791 )?;
1792 }
1793 SourceFileHashAlgorithm::Sha1 => {
1794 digest(
1795 Sha1::new(),
1796 |h, b| {
1797 h.update(b);
1798 },
1799 |h, out| out.copy_from_slice(&h.finalize()),
1800 src,
1801 &mut buf,
1802 value,
1803 )?;
1804 }
1805 SourceFileHashAlgorithm::Md5 => {
1806 digest(
1807 Md5::new(),
1808 |h, b| {
1809 h.update(b);
1810 },
1811 |h, out| out.copy_from_slice(&h.finalize()),
1812 src,
1813 &mut buf,
1814 value,
1815 )?;
1816 }
1817 SourceFileHashAlgorithm::Blake3 => {
1818 digest(
1819 blake3::Hasher::new(),
1820 |h, b| {
1821 h.update(b);
1822 },
1823 |h, out| out.copy_from_slice(h.finalize().as_bytes()),
1824 src,
1825 &mut buf,
1826 value,
1827 )?;
1828 }
1829 }
1830 Ok(hash)
1831 }
1832
1833 pub fn matches(&self, src: &str) -> bool {
1835 Self::new_in_memory(self.kind, src.as_bytes()) == *self
1836 }
1837
1838 pub fn hash_bytes(&self) -> &[u8] {
1840 let len = self.hash_len();
1841 &self.value[..len]
1842 }
1843
1844 fn hash_len(&self) -> usize {
1845 match self.kind {
1846 SourceFileHashAlgorithm::Md5 => 16,
1847 SourceFileHashAlgorithm::Sha1 => 20,
1848 SourceFileHashAlgorithm::Sha256 | SourceFileHashAlgorithm::Blake3 => 32,
1849 }
1850 }
1851}
1852
1853#[derive(#[automatically_derived]
impl ::core::clone::Clone for SourceFileLines {
#[inline]
fn clone(&self) -> SourceFileLines {
match self {
SourceFileLines::Lines(__self_0) =>
SourceFileLines::Lines(::core::clone::Clone::clone(__self_0)),
SourceFileLines::Diffs(__self_0) =>
SourceFileLines::Diffs(::core::clone::Clone::clone(__self_0)),
}
}
}Clone)]
1854pub enum SourceFileLines {
1855 Lines(Vec<RelativeBytePos>),
1857
1858 Diffs(SourceFileDiffs),
1860}
1861
1862impl SourceFileLines {
1863 pub fn is_lines(&self) -> bool {
1864 #[allow(non_exhaustive_omitted_patterns)] match self {
SourceFileLines::Lines(_) => true,
_ => false,
}matches!(self, SourceFileLines::Lines(_))
1865 }
1866}
1867
1868#[derive(#[automatically_derived]
impl ::core::clone::Clone for SourceFileDiffs {
#[inline]
fn clone(&self) -> SourceFileDiffs {
SourceFileDiffs {
bytes_per_diff: ::core::clone::Clone::clone(&self.bytes_per_diff),
num_diffs: ::core::clone::Clone::clone(&self.num_diffs),
raw_diffs: ::core::clone::Clone::clone(&self.raw_diffs),
}
}
}Clone)]
1876pub struct SourceFileDiffs {
1877 bytes_per_diff: usize,
1881
1882 num_diffs: usize,
1885
1886 raw_diffs: Vec<u8>,
1892}
1893
1894pub struct SourceFile {
1896 pub name: FileName,
1900 pub src: Option<Arc<String>>,
1902 pub src_hash: SourceFileHash,
1904 pub checksum_hash: Option<SourceFileHash>,
1908 pub external_src: FreezeLock<ExternalSource>,
1911 pub start_pos: BytePos,
1913 pub normalized_source_len: RelativeBytePos,
1915 pub unnormalized_source_len: u32,
1917 pub lines: FreezeLock<SourceFileLines>,
1919 pub multibyte_chars: Vec<MultiByteChar>,
1921 pub normalized_pos: Vec<NormalizedPos>,
1923 pub stable_id: StableSourceFileId,
1927 pub cnum: CrateNum,
1929}
1930
1931impl Clone for SourceFile {
1932 fn clone(&self) -> Self {
1933 Self {
1934 name: self.name.clone(),
1935 src: self.src.clone(),
1936 src_hash: self.src_hash,
1937 checksum_hash: self.checksum_hash,
1938 external_src: self.external_src.clone(),
1939 start_pos: self.start_pos,
1940 normalized_source_len: self.normalized_source_len,
1941 unnormalized_source_len: self.unnormalized_source_len,
1942 lines: self.lines.clone(),
1943 multibyte_chars: self.multibyte_chars.clone(),
1944 normalized_pos: self.normalized_pos.clone(),
1945 stable_id: self.stable_id,
1946 cnum: self.cnum,
1947 }
1948 }
1949}
1950
1951impl<S: SpanEncoder> Encodable<S> for SourceFile {
1952 fn encode(&self, s: &mut S) {
1953 self.name.encode(s);
1954 self.src_hash.encode(s);
1955 self.checksum_hash.encode(s);
1956 self.normalized_source_len.encode(s);
1958 self.unnormalized_source_len.encode(s);
1959
1960 if !self.lines.read().is_lines() {
::core::panicking::panic("assertion failed: self.lines.read().is_lines()")
};assert!(self.lines.read().is_lines());
1962 let lines = self.lines();
1963 s.emit_u32(lines.len() as u32);
1965
1966 if lines.len() != 0 {
1968 let max_line_length = if lines.len() == 1 {
1969 0
1970 } else {
1971 lines
1972 .array_windows()
1973 .map(|&[fst, snd]| snd - fst)
1974 .map(|bp| bp.to_usize())
1975 .max()
1976 .unwrap()
1977 };
1978
1979 let bytes_per_diff: usize = match max_line_length {
1980 0..=0xFF => 1,
1981 0x100..=0xFFFF => 2,
1982 _ => 4,
1983 };
1984
1985 s.emit_u8(bytes_per_diff as u8);
1987
1988 match (&lines[0], &RelativeBytePos(0)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(lines[0], RelativeBytePos(0));
1990
1991 let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
1993 let num_diffs = lines.len() - 1;
1994 let mut raw_diffs;
1995 match bytes_per_diff {
1996 1 => {
1997 raw_diffs = Vec::with_capacity(num_diffs);
1998 for diff in diff_iter {
1999 raw_diffs.push(diff.0 as u8);
2000 }
2001 }
2002 2 => {
2003 raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
2004 for diff in diff_iter {
2005 raw_diffs.extend_from_slice(&(diff.0 as u16).to_le_bytes());
2006 }
2007 }
2008 4 => {
2009 raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
2010 for diff in diff_iter {
2011 raw_diffs.extend_from_slice(&(diff.0).to_le_bytes());
2012 }
2013 }
2014 _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
2015 }
2016 s.emit_raw_bytes(&raw_diffs);
2017 }
2018
2019 self.multibyte_chars.encode(s);
2020 self.stable_id.encode(s);
2021 self.normalized_pos.encode(s);
2022 self.cnum.encode(s);
2023 }
2024}
2025
2026impl<D: SpanDecoder> Decodable<D> for SourceFile {
2027 fn decode(d: &mut D) -> SourceFile {
2028 let name: FileName = Decodable::decode(d);
2029 let src_hash: SourceFileHash = Decodable::decode(d);
2030 let checksum_hash: Option<SourceFileHash> = Decodable::decode(d);
2031 let normalized_source_len: RelativeBytePos = Decodable::decode(d);
2032 let unnormalized_source_len = Decodable::decode(d);
2033 let lines = {
2034 let num_lines: u32 = Decodable::decode(d);
2035 if num_lines > 0 {
2036 let bytes_per_diff = d.read_u8() as usize;
2038
2039 let num_diffs = num_lines as usize - 1;
2041 let raw_diffs = d.read_raw_bytes(bytes_per_diff * num_diffs).to_vec();
2042 SourceFileLines::Diffs(SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs })
2043 } else {
2044 SourceFileLines::Lines(::alloc::vec::Vec::new()vec![])
2045 }
2046 };
2047 let multibyte_chars: Vec<MultiByteChar> = Decodable::decode(d);
2048 let stable_id = Decodable::decode(d);
2049 let normalized_pos: Vec<NormalizedPos> = Decodable::decode(d);
2050 let cnum: CrateNum = Decodable::decode(d);
2051 SourceFile {
2052 name,
2053 start_pos: BytePos::from_u32(0),
2054 normalized_source_len,
2055 unnormalized_source_len,
2056 src: None,
2057 src_hash,
2058 checksum_hash,
2059 external_src: FreezeLock::frozen(ExternalSource::Unneeded),
2062 lines: FreezeLock::new(lines),
2063 multibyte_chars,
2064 normalized_pos,
2065 stable_id,
2066 cnum,
2067 }
2068 }
2069}
2070
2071impl fmt::Debug for SourceFile {
2072 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
2073 fmt.write_fmt(format_args!("SourceFile({0:?})", self.name))write!(fmt, "SourceFile({:?})", self.name)
2074 }
2075}
2076
2077#[derive(
2099 #[automatically_derived]
impl ::core::fmt::Debug for StableSourceFileId {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"StableSourceFileId", &&self.0)
}
}Debug,
2100 #[automatically_derived]
impl ::core::clone::Clone for StableSourceFileId {
#[inline]
fn clone(&self) -> StableSourceFileId {
let _: ::core::clone::AssertParamIsClone<Hash128>;
*self
}
}Clone,
2101 #[automatically_derived]
impl ::core::marker::Copy for StableSourceFileId { }Copy,
2102 #[automatically_derived]
impl ::core::hash::Hash for StableSourceFileId {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.0, state)
}
}Hash,
2103 #[automatically_derived]
impl ::core::cmp::PartialEq for StableSourceFileId {
#[inline]
fn eq(&self, other: &StableSourceFileId) -> bool { self.0 == other.0 }
}PartialEq,
2104 #[automatically_derived]
impl ::core::cmp::Eq for StableSourceFileId {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Hash128>;
}
}Eq,
2105 const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for StableSourceFileId where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
StableSourceFileId(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic,
2106 const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for StableSourceFileId {
fn encode(&self, __encoder: &mut __E) {
match *self {
StableSourceFileId(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable,
2107 const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for StableSourceFileId {
fn decode(__decoder: &mut __D) -> Self {
StableSourceFileId(::rustc_serialize::Decodable::decode(__decoder))
}
}
};Decodable,
2108 #[automatically_derived]
impl ::core::default::Default for StableSourceFileId {
#[inline]
fn default() -> StableSourceFileId {
StableSourceFileId(::core::default::Default::default())
}
}Default,
2109 #[automatically_derived]
impl ::core::cmp::PartialOrd for StableSourceFileId {
#[inline]
fn partial_cmp(&self, other: &StableSourceFileId)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}PartialOrd,
2110 #[automatically_derived]
impl ::core::cmp::Ord for StableSourceFileId {
#[inline]
fn cmp(&self, other: &StableSourceFileId) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}Ord
2111)]
2112pub struct StableSourceFileId(Hash128);
2113
2114impl StableSourceFileId {
2115 fn from_filename_in_current_crate(filename: &FileName) -> Self {
2116 Self::from_filename_and_stable_crate_id(filename, None)
2117 }
2118
2119 pub fn from_filename_for_export(
2120 filename: &FileName,
2121 local_crate_stable_crate_id: StableCrateId,
2122 ) -> Self {
2123 Self::from_filename_and_stable_crate_id(filename, Some(local_crate_stable_crate_id))
2124 }
2125
2126 fn from_filename_and_stable_crate_id(
2127 filename: &FileName,
2128 stable_crate_id: Option<StableCrateId>,
2129 ) -> Self {
2130 let mut hasher = StableHasher::new();
2131 filename.hash(&mut hasher);
2132 stable_crate_id.hash(&mut hasher);
2133 StableSourceFileId(hasher.finish())
2134 }
2135}
2136
2137impl SourceFile {
2138 const MAX_FILE_SIZE: u32 = u32::MAX - 1;
2139
2140 pub fn new(
2141 name: FileName,
2142 mut src: String,
2143 hash_kind: SourceFileHashAlgorithm,
2144 checksum_hash_kind: Option<SourceFileHashAlgorithm>,
2145 ) -> Result<Self, OffsetOverflowError> {
2146 let src_hash = SourceFileHash::new_in_memory(hash_kind, src.as_bytes());
2148 let checksum_hash = checksum_hash_kind.map(|checksum_hash_kind| {
2149 if checksum_hash_kind == hash_kind {
2150 src_hash
2151 } else {
2152 SourceFileHash::new_in_memory(checksum_hash_kind, src.as_bytes())
2153 }
2154 });
2155 let unnormalized_source_len = u32::try_from(src.len()).map_err(|_| OffsetOverflowError)?;
2157 if unnormalized_source_len > Self::MAX_FILE_SIZE {
2158 return Err(OffsetOverflowError);
2159 }
2160
2161 let normalized_pos = normalize_src(&mut src);
2162
2163 let stable_id = StableSourceFileId::from_filename_in_current_crate(&name);
2164 let normalized_source_len = u32::try_from(src.len()).map_err(|_| OffsetOverflowError)?;
2165 if normalized_source_len > Self::MAX_FILE_SIZE {
2166 return Err(OffsetOverflowError);
2167 }
2168
2169 let (lines, multibyte_chars) = analyze_source_file::analyze_source_file(&src);
2170
2171 Ok(SourceFile {
2172 name,
2173 src: Some(Arc::new(src)),
2174 src_hash,
2175 checksum_hash,
2176 external_src: FreezeLock::frozen(ExternalSource::Unneeded),
2177 start_pos: BytePos::from_u32(0),
2178 normalized_source_len: RelativeBytePos::from_u32(normalized_source_len),
2179 unnormalized_source_len,
2180 lines: FreezeLock::frozen(SourceFileLines::Lines(lines)),
2181 multibyte_chars,
2182 normalized_pos,
2183 stable_id,
2184 cnum: LOCAL_CRATE,
2185 })
2186 }
2187
2188 fn convert_diffs_to_lines_frozen(&self) {
2191 let mut guard = if let Some(guard) = self.lines.try_write() { guard } else { return };
2192
2193 let SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs } = match &*guard {
2194 SourceFileLines::Diffs(diffs) => diffs,
2195 SourceFileLines::Lines(..) => {
2196 FreezeWriteGuard::freeze(guard);
2197 return;
2198 }
2199 };
2200
2201 let num_lines = num_diffs + 1;
2203 let mut lines = Vec::with_capacity(num_lines);
2204 let mut line_start = RelativeBytePos(0);
2205 lines.push(line_start);
2206
2207 match (&*num_diffs, &(raw_diffs.len() / bytes_per_diff)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(*num_diffs, raw_diffs.len() / bytes_per_diff);
2208 match bytes_per_diff {
2209 1 => {
2210 lines.extend(raw_diffs.into_iter().map(|&diff| {
2211 line_start = line_start + RelativeBytePos(diff as u32);
2212 line_start
2213 }));
2214 }
2215 2 => {
2216 lines.extend((0..*num_diffs).map(|i| {
2217 let pos = bytes_per_diff * i;
2218 let bytes = [raw_diffs[pos], raw_diffs[pos + 1]];
2219 let diff = u16::from_le_bytes(bytes);
2220 line_start = line_start + RelativeBytePos(diff as u32);
2221 line_start
2222 }));
2223 }
2224 4 => {
2225 lines.extend((0..*num_diffs).map(|i| {
2226 let pos = bytes_per_diff * i;
2227 let bytes = [
2228 raw_diffs[pos],
2229 raw_diffs[pos + 1],
2230 raw_diffs[pos + 2],
2231 raw_diffs[pos + 3],
2232 ];
2233 let diff = u32::from_le_bytes(bytes);
2234 line_start = line_start + RelativeBytePos(diff);
2235 line_start
2236 }));
2237 }
2238 _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
2239 }
2240
2241 *guard = SourceFileLines::Lines(lines);
2242
2243 FreezeWriteGuard::freeze(guard);
2244 }
2245
2246 pub fn lines(&self) -> &[RelativeBytePos] {
2247 if let Some(SourceFileLines::Lines(lines)) = self.lines.get() {
2248 return &lines[..];
2249 }
2250
2251 outline(|| {
2252 self.convert_diffs_to_lines_frozen();
2253 if let Some(SourceFileLines::Lines(lines)) = self.lines.get() {
2254 return &lines[..];
2255 }
2256 ::core::panicking::panic("internal error: entered unreachable code")unreachable!()
2257 })
2258 }
2259
2260 pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
2262 let pos = self.relative_position(pos);
2263 let line_index = self.lookup_line(pos).unwrap();
2264 let line_start_pos = self.lines()[line_index];
2265 self.absolute_position(line_start_pos)
2266 }
2267
2268 pub fn add_external_src<F>(&self, get_src: F) -> bool
2273 where
2274 F: FnOnce() -> Option<String>,
2275 {
2276 if !self.external_src.is_frozen() {
2277 let src = get_src();
2278 let src = src.and_then(|mut src| {
2279 self.src_hash.matches(&src).then(|| {
2281 normalize_src(&mut src);
2282 src
2283 })
2284 });
2285
2286 self.external_src.try_write().map(|mut external_src| {
2287 if let ExternalSource::Foreign {
2288 kind: src_kind @ ExternalSourceKind::AbsentOk,
2289 ..
2290 } = &mut *external_src
2291 {
2292 *src_kind = if let Some(src) = src {
2293 ExternalSourceKind::Present(Arc::new(src))
2294 } else {
2295 ExternalSourceKind::AbsentErr
2296 };
2297 } else {
2298 {
::core::panicking::panic_fmt(format_args!("unexpected state {0:?}",
*external_src));
}panic!("unexpected state {:?}", *external_src)
2299 }
2300
2301 FreezeWriteGuard::freeze(external_src)
2303 });
2304 }
2305
2306 self.src.is_some() || self.external_src.read().get_source().is_some()
2307 }
2308
2309 pub fn get_line(&self, line_number: usize) -> Option<Cow<'_, str>> {
2312 fn get_until_newline(src: &str, begin: usize) -> &str {
2313 let slice = &src[begin..];
2317 match slice.find('\n') {
2318 Some(e) => &slice[..e],
2319 None => slice,
2320 }
2321 }
2322
2323 let begin = {
2324 let line = self.lines().get(line_number).copied()?;
2325 line.to_usize()
2326 };
2327
2328 if let Some(ref src) = self.src {
2329 Some(Cow::from(get_until_newline(src, begin)))
2330 } else {
2331 self.external_src
2332 .borrow()
2333 .get_source()
2334 .map(|src| Cow::Owned(String::from(get_until_newline(src, begin))))
2335 }
2336 }
2337
2338 pub fn is_real_file(&self) -> bool {
2339 self.name.is_real()
2340 }
2341
2342 #[inline]
2343 pub fn is_imported(&self) -> bool {
2344 self.src.is_none()
2345 }
2346
2347 pub fn count_lines(&self) -> usize {
2348 self.lines().len()
2349 }
2350
2351 #[inline]
2352 pub fn absolute_position(&self, pos: RelativeBytePos) -> BytePos {
2353 BytePos::from_u32(pos.to_u32() + self.start_pos.to_u32())
2354 }
2355
2356 #[inline]
2357 pub fn relative_position(&self, pos: BytePos) -> RelativeBytePos {
2358 RelativeBytePos::from_u32(pos.to_u32() - self.start_pos.to_u32())
2359 }
2360
2361 #[inline]
2362 pub fn end_position(&self) -> BytePos {
2363 self.absolute_position(self.normalized_source_len)
2364 }
2365
2366 pub fn lookup_line(&self, pos: RelativeBytePos) -> Option<usize> {
2371 self.lines().partition_point(|x| x <= &pos).checked_sub(1)
2372 }
2373
2374 pub fn line_bounds(&self, line_index: usize) -> Range<BytePos> {
2375 if self.is_empty() {
2376 return self.start_pos..self.start_pos;
2377 }
2378
2379 let lines = self.lines();
2380 if !(line_index < lines.len()) {
::core::panicking::panic("assertion failed: line_index < lines.len()")
};assert!(line_index < lines.len());
2381 if line_index == (lines.len() - 1) {
2382 self.absolute_position(lines[line_index])..self.end_position()
2383 } else {
2384 self.absolute_position(lines[line_index])..self.absolute_position(lines[line_index + 1])
2385 }
2386 }
2387
2388 #[inline]
2393 pub fn contains(&self, byte_pos: BytePos) -> bool {
2394 byte_pos >= self.start_pos && byte_pos <= self.end_position()
2395 }
2396
2397 #[inline]
2398 pub fn is_empty(&self) -> bool {
2399 self.normalized_source_len.to_u32() == 0
2400 }
2401
2402 pub fn original_relative_byte_pos(&self, pos: BytePos) -> RelativeBytePos {
2405 let pos = self.relative_position(pos);
2406
2407 let diff = match self.normalized_pos.binary_search_by(|np| np.pos.cmp(&pos)) {
2411 Ok(i) => self.normalized_pos[i].diff,
2412 Err(0) => 0,
2413 Err(i) => self.normalized_pos[i - 1].diff,
2414 };
2415
2416 RelativeBytePos::from_u32(pos.0 + diff)
2417 }
2418
2419 pub fn normalized_byte_pos(&self, offset: u32) -> BytePos {
2429 let diff = match self
2430 .normalized_pos
2431 .binary_search_by(|np| (np.pos.0 + np.diff).cmp(&(self.start_pos.0 + offset)))
2432 {
2433 Ok(i) => self.normalized_pos[i].diff,
2434 Err(0) => 0,
2435 Err(i) => self.normalized_pos[i - 1].diff,
2436 };
2437
2438 BytePos::from_u32(self.start_pos.0 + offset - diff)
2439 }
2440
2441 fn bytepos_to_file_charpos(&self, bpos: RelativeBytePos) -> CharPos {
2443 let mut total_extra_bytes = 0;
2445
2446 for mbc in self.multibyte_chars.iter() {
2447 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2447",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2447u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0}-byte char at {1:?}",
mbc.bytes, mbc.pos) as &dyn Value))])
});
} else { ; }
};debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
2448 if mbc.pos < bpos {
2449 total_extra_bytes += mbc.bytes as u32 - 1;
2452 if !(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32) {
::core::panicking::panic("assertion failed: bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32")
};assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
2455 } else {
2456 break;
2457 }
2458 }
2459
2460 if !(total_extra_bytes <= bpos.to_u32()) {
::core::panicking::panic("assertion failed: total_extra_bytes <= bpos.to_u32()")
};assert!(total_extra_bytes <= bpos.to_u32());
2461 CharPos(bpos.to_usize() - total_extra_bytes as usize)
2462 }
2463
2464 fn lookup_file_pos(&self, pos: RelativeBytePos) -> (usize, CharPos) {
2467 let chpos = self.bytepos_to_file_charpos(pos);
2468 match self.lookup_line(pos) {
2469 Some(a) => {
2470 let line = a + 1; let linebpos = self.lines()[a];
2472 let linechpos = self.bytepos_to_file_charpos(linebpos);
2473 let col = chpos - linechpos;
2474 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2474",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2474u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("byte pos {0:?} is on the line at byte pos {1:?}",
pos, linebpos) as &dyn Value))])
});
} else { ; }
};debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos);
2475 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2475",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2475u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("char pos {0:?} is on the line at char pos {1:?}",
chpos, linechpos) as &dyn Value))])
});
} else { ; }
};debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos);
2476 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2476",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2476u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("byte is on line: {0}",
line) as &dyn Value))])
});
} else { ; }
};debug!("byte is on line: {}", line);
2477 if !(chpos >= linechpos) {
::core::panicking::panic("assertion failed: chpos >= linechpos")
};assert!(chpos >= linechpos);
2478 (line, col)
2479 }
2480 None => (0, chpos),
2481 }
2482 }
2483
2484 pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) {
2487 let pos = self.relative_position(pos);
2488 let (line, col_or_chpos) = self.lookup_file_pos(pos);
2489 if line > 0 {
2490 let Some(code) = self.get_line(line - 1) else {
2491 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2498",
"rustc_span", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2498u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("couldn\'t find line {1} {0:?}",
self.name, line) as &dyn Value))])
});
} else { ; }
};tracing::info!("couldn't find line {line} {:?}", self.name);
2499 return (line, col_or_chpos, col_or_chpos.0);
2500 };
2501 let display_col = code.chars().take(col_or_chpos.0).map(|ch| char_width(ch)).sum();
2502 (line, col_or_chpos, display_col)
2503 } else {
2504 (0, col_or_chpos, col_or_chpos.0)
2506 }
2507 }
2508}
2509
2510pub fn char_width(ch: char) -> usize {
2511 match ch {
2514 '\t' => 4,
2515 '\u{0000}' | '\u{0001}' | '\u{0002}' | '\u{0003}' | '\u{0004}' | '\u{0005}'
2519 | '\u{0006}' | '\u{0007}' | '\u{0008}' | '\u{000B}' | '\u{000C}' | '\u{000D}'
2520 | '\u{000E}' | '\u{000F}' | '\u{0010}' | '\u{0011}' | '\u{0012}' | '\u{0013}'
2521 | '\u{0014}' | '\u{0015}' | '\u{0016}' | '\u{0017}' | '\u{0018}' | '\u{0019}'
2522 | '\u{001A}' | '\u{001B}' | '\u{001C}' | '\u{001D}' | '\u{001E}' | '\u{001F}'
2523 | '\u{007F}' | '\u{202A}' | '\u{202B}' | '\u{202D}' | '\u{202E}' | '\u{2066}'
2524 | '\u{2067}' | '\u{2068}' | '\u{202C}' | '\u{2069}' => 1,
2525 _ => unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1),
2526 }
2527}
2528
2529pub fn str_width(s: &str) -> usize {
2530 s.chars().map(char_width).sum()
2531}
2532
2533fn normalize_src(src: &mut String) -> Vec<NormalizedPos> {
2535 let mut normalized_pos = ::alloc::vec::Vec::new()vec![];
2536 remove_bom(src, &mut normalized_pos);
2537 normalize_newlines(src, &mut normalized_pos);
2538 normalized_pos
2539}
2540
2541fn remove_bom(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
2543 if src.starts_with('\u{feff}') {
2544 src.drain(..3);
2545 normalized_pos.push(NormalizedPos { pos: RelativeBytePos(0), diff: 3 });
2546 }
2547}
2548
2549fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
2553 if !src.as_bytes().contains(&b'\r') {
2554 return;
2555 }
2556
2557 let mut buf = std::mem::replace(src, String::new()).into_bytes();
2563 let mut gap_len = 0;
2564 let mut tail = buf.as_mut_slice();
2565 let mut cursor = 0;
2566 let original_gap = normalized_pos.last().map_or(0, |l| l.diff);
2567 loop {
2568 let idx = match find_crlf(&tail[gap_len..]) {
2569 None => tail.len(),
2570 Some(idx) => idx + gap_len,
2571 };
2572 tail.copy_within(gap_len..idx, 0);
2573 tail = &mut tail[idx - gap_len..];
2574 if tail.len() == gap_len {
2575 break;
2576 }
2577 cursor += idx - gap_len;
2578 gap_len += 1;
2579 normalized_pos.push(NormalizedPos {
2580 pos: RelativeBytePos::from_usize(cursor + 1),
2581 diff: original_gap + gap_len as u32,
2582 });
2583 }
2584
2585 let new_len = buf.len() - gap_len;
2588 unsafe {
2589 buf.set_len(new_len);
2590 *src = String::from_utf8_unchecked(buf);
2591 }
2592
2593 fn find_crlf(src: &[u8]) -> Option<usize> {
2594 let mut search_idx = 0;
2595 while let Some(idx) = find_cr(&src[search_idx..]) {
2596 if src[search_idx..].get(idx + 1) != Some(&b'\n') {
2597 search_idx += idx + 1;
2598 continue;
2599 }
2600 return Some(search_idx + idx);
2601 }
2602 None
2603 }
2604
2605 fn find_cr(src: &[u8]) -> Option<usize> {
2606 src.iter().position(|&b| b == b'\r')
2607 }
2608}
2609
2610pub trait Pos {
2615 fn from_usize(n: usize) -> Self;
2616 fn to_usize(&self) -> usize;
2617 fn from_u32(n: u32) -> Self;
2618 fn to_u32(&self) -> u32;
2619}
2620
2621macro_rules! impl_pos {
2622 (
2623 $(
2624 $(#[$attr:meta])*
2625 $vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty);
2626 )*
2627 ) => {
2628 $(
2629 $(#[$attr])*
2630 $vis struct $ident($inner_vis $inner_ty);
2631
2632 impl Pos for $ident {
2633 #[inline(always)]
2634 fn from_usize(n: usize) -> $ident {
2635 $ident(n as $inner_ty)
2636 }
2637
2638 #[inline(always)]
2639 fn to_usize(&self) -> usize {
2640 self.0 as usize
2641 }
2642
2643 #[inline(always)]
2644 fn from_u32(n: u32) -> $ident {
2645 $ident(n as $inner_ty)
2646 }
2647
2648 #[inline(always)]
2649 fn to_u32(&self) -> u32 {
2650 self.0 as u32
2651 }
2652 }
2653
2654 impl Add for $ident {
2655 type Output = $ident;
2656
2657 #[inline(always)]
2658 fn add(self, rhs: $ident) -> $ident {
2659 $ident(self.0 + rhs.0)
2660 }
2661 }
2662
2663 impl Sub for $ident {
2664 type Output = $ident;
2665
2666 #[inline(always)]
2667 fn sub(self, rhs: $ident) -> $ident {
2668 $ident(self.0 - rhs.0)
2669 }
2670 }
2671 )*
2672 };
2673}
2674
2675#[doc = r" A character offset."]
#[doc = r""]
#[doc = r" Because of multibyte UTF-8 characters, a byte offset"]
#[doc =
r" is not equivalent to a character offset. The [`SourceMap`] will convert [`BytePos`]"]
#[doc = r" values to `CharPos` values as necessary."]
pub struct CharPos(pub usize);
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for CharPos { }
#[automatically_derived]
impl ::core::clone::Clone for CharPos {
#[inline]
fn clone(&self) -> CharPos {
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for CharPos { }
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for CharPos { }
#[automatically_derived]
impl ::core::cmp::PartialEq for CharPos {
#[inline]
fn eq(&self, other: &CharPos) -> bool { self.0 == other.0 }
}
#[automatically_derived]
impl ::core::cmp::Eq for CharPos {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<usize>;
}
}
#[automatically_derived]
impl ::core::cmp::PartialOrd for CharPos {
#[inline]
fn partial_cmp(&self, other: &CharPos)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::cmp::Ord for CharPos {
#[inline]
fn cmp(&self, other: &CharPos) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::fmt::Debug for CharPos {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "CharPos",
&&self.0)
}
}
impl Pos for CharPos {
#[inline(always)]
fn from_usize(n: usize) -> CharPos { CharPos(n as usize) }
#[inline(always)]
fn to_usize(&self) -> usize { self.0 as usize }
#[inline(always)]
fn from_u32(n: u32) -> CharPos { CharPos(n as usize) }
#[inline(always)]
fn to_u32(&self) -> u32 { self.0 as u32 }
}
impl Add for CharPos {
type Output = CharPos;
#[inline(always)]
fn add(self, rhs: CharPos) -> CharPos { CharPos(self.0 + rhs.0) }
}
impl Sub for CharPos {
type Output = CharPos;
#[inline(always)]
fn sub(self, rhs: CharPos) -> CharPos { CharPos(self.0 - rhs.0) }
}impl_pos! {
2676 #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
2680 pub struct BytePos(pub u32);
2681
2682 #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
2684 pub struct RelativeBytePos(pub u32);
2685
2686 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
2692 pub struct CharPos(pub usize);
2693}
2694
2695impl<S: Encoder> Encodable<S> for BytePos {
2696 fn encode(&self, s: &mut S) {
2697 s.emit_u32(self.0);
2698 }
2699}
2700
2701impl<D: Decoder> Decodable<D> for BytePos {
2702 fn decode(d: &mut D) -> BytePos {
2703 BytePos(d.read_u32())
2704 }
2705}
2706
2707impl<H: HashStableContext> HashStable<H> for RelativeBytePos {
2708 fn hash_stable(&self, hcx: &mut H, hasher: &mut StableHasher) {
2709 self.0.hash_stable(hcx, hasher);
2710 }
2711}
2712
2713impl<S: Encoder> Encodable<S> for RelativeBytePos {
2714 fn encode(&self, s: &mut S) {
2715 s.emit_u32(self.0);
2716 }
2717}
2718
2719impl<D: Decoder> Decodable<D> for RelativeBytePos {
2720 fn decode(d: &mut D) -> RelativeBytePos {
2721 RelativeBytePos(d.read_u32())
2722 }
2723}
2724
2725#[derive(#[automatically_derived]
impl ::core::fmt::Debug for Loc {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field4_finish(f, "Loc", "file",
&self.file, "line", &self.line, "col", &self.col, "col_display",
&&self.col_display)
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for Loc {
#[inline]
fn clone(&self) -> Loc {
Loc {
file: ::core::clone::Clone::clone(&self.file),
line: ::core::clone::Clone::clone(&self.line),
col: ::core::clone::Clone::clone(&self.col),
col_display: ::core::clone::Clone::clone(&self.col_display),
}
}
}Clone)]
2731pub struct Loc {
2732 pub file: Arc<SourceFile>,
2734 pub line: usize,
2736 pub col: CharPos,
2738 pub col_display: usize,
2740}
2741
2742#[derive(#[automatically_derived]
impl ::core::fmt::Debug for SourceFileAndLine {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"SourceFileAndLine", "sf", &self.sf, "line", &&self.line)
}
}Debug)]
2744pub struct SourceFileAndLine {
2745 pub sf: Arc<SourceFile>,
2746 pub line: usize,
2748}
2749#[derive(#[automatically_derived]
impl ::core::fmt::Debug for SourceFileAndBytePos {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"SourceFileAndBytePos", "sf", &self.sf, "pos", &&self.pos)
}
}Debug)]
2750pub struct SourceFileAndBytePos {
2751 pub sf: Arc<SourceFile>,
2752 pub pos: BytePos,
2753}
2754
2755#[derive(#[automatically_derived]
impl ::core::marker::Copy for LineInfo { }Copy, #[automatically_derived]
impl ::core::clone::Clone for LineInfo {
#[inline]
fn clone(&self) -> LineInfo {
let _: ::core::clone::AssertParamIsClone<usize>;
let _: ::core::clone::AssertParamIsClone<CharPos>;
*self
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for LineInfo {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f, "LineInfo",
"line_index", &self.line_index, "start_col", &self.start_col,
"end_col", &&self.end_col)
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for LineInfo {
#[inline]
fn eq(&self, other: &LineInfo) -> bool {
self.line_index == other.line_index &&
self.start_col == other.start_col &&
self.end_col == other.end_col
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for LineInfo {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<usize>;
let _: ::core::cmp::AssertParamIsEq<CharPos>;
}
}Eq)]
2756pub struct LineInfo {
2757 pub line_index: usize,
2759
2760 pub start_col: CharPos,
2762
2763 pub end_col: CharPos,
2765}
2766
2767pub struct FileLines {
2768 pub file: Arc<SourceFile>,
2769 pub lines: Vec<LineInfo>,
2770}
2771
2772pub static SPAN_TRACK: AtomicRef<fn(LocalDefId)> = AtomicRef::new(&((|_| {}) as fn(_)));
2773
2774pub type FileLinesResult = Result<FileLines, SpanLinesError>;
2779
2780#[derive(#[automatically_derived]
impl ::core::clone::Clone for SpanLinesError {
#[inline]
fn clone(&self) -> SpanLinesError {
match self {
SpanLinesError::DistinctSources(__self_0) =>
SpanLinesError::DistinctSources(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for SpanLinesError {
#[inline]
fn eq(&self, other: &SpanLinesError) -> bool {
match (self, other) {
(SpanLinesError::DistinctSources(__self_0),
SpanLinesError::DistinctSources(__arg1_0)) =>
__self_0 == __arg1_0,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SpanLinesError {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Box<DistinctSources>>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for SpanLinesError {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
SpanLinesError::DistinctSources(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"DistinctSources", &__self_0),
}
}
}Debug)]
2781pub enum SpanLinesError {
2782 DistinctSources(Box<DistinctSources>),
2783}
2784
2785#[derive(#[automatically_derived]
impl ::core::clone::Clone for SpanSnippetError {
#[inline]
fn clone(&self) -> SpanSnippetError {
match self {
SpanSnippetError::IllFormedSpan(__self_0) =>
SpanSnippetError::IllFormedSpan(::core::clone::Clone::clone(__self_0)),
SpanSnippetError::DistinctSources(__self_0) =>
SpanSnippetError::DistinctSources(::core::clone::Clone::clone(__self_0)),
SpanSnippetError::MalformedForSourcemap(__self_0) =>
SpanSnippetError::MalformedForSourcemap(::core::clone::Clone::clone(__self_0)),
SpanSnippetError::SourceNotAvailable { filename: __self_0 } =>
SpanSnippetError::SourceNotAvailable {
filename: ::core::clone::Clone::clone(__self_0),
},
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for SpanSnippetError {
#[inline]
fn eq(&self, other: &SpanSnippetError) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(SpanSnippetError::IllFormedSpan(__self_0),
SpanSnippetError::IllFormedSpan(__arg1_0)) =>
__self_0 == __arg1_0,
(SpanSnippetError::DistinctSources(__self_0),
SpanSnippetError::DistinctSources(__arg1_0)) =>
__self_0 == __arg1_0,
(SpanSnippetError::MalformedForSourcemap(__self_0),
SpanSnippetError::MalformedForSourcemap(__arg1_0)) =>
__self_0 == __arg1_0,
(SpanSnippetError::SourceNotAvailable { filename: __self_0 },
SpanSnippetError::SourceNotAvailable { filename: __arg1_0 })
=> __self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SpanSnippetError {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Span>;
let _: ::core::cmp::AssertParamIsEq<Box<DistinctSources>>;
let _: ::core::cmp::AssertParamIsEq<MalformedSourceMapPositions>;
let _: ::core::cmp::AssertParamIsEq<FileName>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for SpanSnippetError {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
SpanSnippetError::IllFormedSpan(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"IllFormedSpan", &__self_0),
SpanSnippetError::DistinctSources(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"DistinctSources", &__self_0),
SpanSnippetError::MalformedForSourcemap(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"MalformedForSourcemap", &__self_0),
SpanSnippetError::SourceNotAvailable { filename: __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f,
"SourceNotAvailable", "filename", &__self_0),
}
}
}Debug)]
2786pub enum SpanSnippetError {
2787 IllFormedSpan(Span),
2788 DistinctSources(Box<DistinctSources>),
2789 MalformedForSourcemap(MalformedSourceMapPositions),
2790 SourceNotAvailable { filename: FileName },
2791}
2792
2793#[derive(#[automatically_derived]
impl ::core::clone::Clone for DistinctSources {
#[inline]
fn clone(&self) -> DistinctSources {
DistinctSources {
begin: ::core::clone::Clone::clone(&self.begin),
end: ::core::clone::Clone::clone(&self.end),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for DistinctSources {
#[inline]
fn eq(&self, other: &DistinctSources) -> bool {
self.begin == other.begin && self.end == other.end
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DistinctSources {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<(FileName, BytePos)>;
let _: ::core::cmp::AssertParamIsEq<(FileName, BytePos)>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for DistinctSources {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"DistinctSources", "begin", &self.begin, "end", &&self.end)
}
}Debug)]
2794pub struct DistinctSources {
2795 pub begin: (FileName, BytePos),
2796 pub end: (FileName, BytePos),
2797}
2798
2799#[derive(#[automatically_derived]
impl ::core::clone::Clone for MalformedSourceMapPositions {
#[inline]
fn clone(&self) -> MalformedSourceMapPositions {
MalformedSourceMapPositions {
name: ::core::clone::Clone::clone(&self.name),
source_len: ::core::clone::Clone::clone(&self.source_len),
begin_pos: ::core::clone::Clone::clone(&self.begin_pos),
end_pos: ::core::clone::Clone::clone(&self.end_pos),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for MalformedSourceMapPositions {
#[inline]
fn eq(&self, other: &MalformedSourceMapPositions) -> bool {
self.name == other.name && self.source_len == other.source_len &&
self.begin_pos == other.begin_pos &&
self.end_pos == other.end_pos
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for MalformedSourceMapPositions {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<FileName>;
let _: ::core::cmp::AssertParamIsEq<usize>;
let _: ::core::cmp::AssertParamIsEq<BytePos>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for MalformedSourceMapPositions {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field4_finish(f,
"MalformedSourceMapPositions", "name", &self.name, "source_len",
&self.source_len, "begin_pos", &self.begin_pos, "end_pos",
&&self.end_pos)
}
}Debug)]
2800pub struct MalformedSourceMapPositions {
2801 pub name: FileName,
2802 pub source_len: usize,
2803 pub begin_pos: BytePos,
2804 pub end_pos: BytePos,
2805}
2806
2807#[derive(#[automatically_derived]
impl ::core::marker::Copy for InnerSpan { }Copy, #[automatically_derived]
impl ::core::clone::Clone for InnerSpan {
#[inline]
fn clone(&self) -> InnerSpan {
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for InnerSpan {
#[inline]
fn eq(&self, other: &InnerSpan) -> bool {
self.start == other.start && self.end == other.end
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for InnerSpan {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<usize>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for InnerSpan {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "InnerSpan",
"start", &self.start, "end", &&self.end)
}
}Debug)]
2809pub struct InnerSpan {
2810 pub start: usize,
2811 pub end: usize,
2812}
2813
2814impl InnerSpan {
2815 pub fn new(start: usize, end: usize) -> InnerSpan {
2816 InnerSpan { start, end }
2817 }
2818}
2819
2820pub trait HashStableContext {
2825 fn def_path_hash(&self, def_id: DefId) -> DefPathHash;
2826 fn hash_spans(&self) -> bool;
2827 fn unstable_opts_incremental_ignore_spans(&self) -> bool;
2830 fn def_span(&self, def_id: LocalDefId) -> Span;
2831 fn span_data_to_lines_and_cols(
2832 &mut self,
2833 span: &SpanData,
2834 ) -> Option<(&SourceFile, usize, BytePos, usize, BytePos)>;
2835 fn hashing_controls(&self) -> HashingControls;
2836}
2837
2838impl<CTX> HashStable<CTX> for Span
2839where
2840 CTX: HashStableContext,
2841{
2842 fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
2855 const TAG_VALID_SPAN: u8 = 0;
2856 const TAG_INVALID_SPAN: u8 = 1;
2857 const TAG_RELATIVE_SPAN: u8 = 2;
2858
2859 if !ctx.hash_spans() {
2860 return;
2861 }
2862
2863 let span = self.data_untracked();
2864 span.ctxt.hash_stable(ctx, hasher);
2865 span.parent.hash_stable(ctx, hasher);
2866
2867 if span.is_dummy() {
2868 Hash::hash(&TAG_INVALID_SPAN, hasher);
2869 return;
2870 }
2871
2872 let parent = span.parent.map(|parent| ctx.def_span(parent).data_untracked());
2873 if let Some(parent) = parent
2874 && parent.contains(span)
2875 {
2876 Hash::hash(&TAG_RELATIVE_SPAN, hasher);
2880 (span.lo - parent.lo).to_u32().hash_stable(ctx, hasher);
2881 (span.hi - parent.lo).to_u32().hash_stable(ctx, hasher);
2882 return;
2883 }
2884
2885 let Some((file, line_lo, col_lo, line_hi, col_hi)) = ctx.span_data_to_lines_and_cols(&span)
2889 else {
2890 Hash::hash(&TAG_INVALID_SPAN, hasher);
2891 return;
2892 };
2893
2894 if let Some(parent) = parent
2895 && file.contains(parent.lo)
2896 {
2897 Hash::hash(&TAG_RELATIVE_SPAN, hasher);
2900 Hash::hash(&(span.lo.0.wrapping_sub(parent.lo.0)), hasher);
2901 Hash::hash(&(span.hi.0.wrapping_sub(parent.lo.0)), hasher);
2902 return;
2903 }
2904
2905 Hash::hash(&TAG_VALID_SPAN, hasher);
2906 Hash::hash(&file.stable_id, hasher);
2907
2908 let col_lo_trunc = (col_lo.0 as u64) & 0xFF;
2918 let line_lo_trunc = ((line_lo as u64) & 0xFF_FF_FF) << 8;
2919 let col_hi_trunc = (col_hi.0 as u64) & 0xFF << 32;
2920 let line_hi_trunc = ((line_hi as u64) & 0xFF_FF_FF) << 40;
2921 let col_line = col_lo_trunc | line_lo_trunc | col_hi_trunc | line_hi_trunc;
2922 let len = (span.hi - span.lo).0;
2923 Hash::hash(&col_line, hasher);
2924 Hash::hash(&len, hasher);
2925 }
2926}
2927
2928#[derive(#[automatically_derived]
impl ::core::clone::Clone for ErrorGuaranteed {
#[inline]
fn clone(&self) -> ErrorGuaranteed {
let _: ::core::clone::AssertParamIsClone<()>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for ErrorGuaranteed { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for ErrorGuaranteed {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"ErrorGuaranteed", &&self.0)
}
}Debug, #[automatically_derived]
impl ::core::hash::Hash for ErrorGuaranteed {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.0, state)
}
}Hash, #[automatically_derived]
impl ::core::cmp::PartialEq for ErrorGuaranteed {
#[inline]
fn eq(&self, other: &ErrorGuaranteed) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ErrorGuaranteed {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<()>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialOrd for ErrorGuaranteed {
#[inline]
fn partial_cmp(&self, other: &ErrorGuaranteed)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}PartialOrd, #[automatically_derived]
impl ::core::cmp::Ord for ErrorGuaranteed {
#[inline]
fn cmp(&self, other: &ErrorGuaranteed) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}Ord)]
2934#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for ErrorGuaranteed where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
ErrorGuaranteed(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
2935pub struct ErrorGuaranteed(());
2936
2937impl ErrorGuaranteed {
2938 #[deprecated = "should only be used in `DiagCtxtInner::emit_diagnostic`"]
2940 pub fn unchecked_error_guaranteed() -> Self {
2941 ErrorGuaranteed(())
2942 }
2943
2944 pub fn raise_fatal(self) -> ! {
2945 FatalError.raise()
2946 }
2947}
2948
2949impl<E: rustc_serialize::Encoder> Encodable<E> for ErrorGuaranteed {
2950 #[inline]
2951 fn encode(&self, _e: &mut E) {
2952 {
::core::panicking::panic_fmt(format_args!("should never serialize an `ErrorGuaranteed`, as we do not write metadata or incremental caches in case errors occurred"));
}panic!(
2953 "should never serialize an `ErrorGuaranteed`, as we do not write metadata or \
2954 incremental caches in case errors occurred"
2955 )
2956 }
2957}
2958impl<D: rustc_serialize::Decoder> Decodable<D> for ErrorGuaranteed {
2959 #[inline]
2960 fn decode(_d: &mut D) -> ErrorGuaranteed {
2961 {
::core::panicking::panic_fmt(format_args!("`ErrorGuaranteed` should never have been serialized to metadata or incremental caches"));
}panic!(
2962 "`ErrorGuaranteed` should never have been serialized to metadata or incremental caches"
2963 )
2964 }
2965}