1#![allow(internal_features)]
20#![cfg_attr(target_arch = "loongarch64", feature(stdarch_loongarch))]
21#![feature(cfg_select)]
22#![feature(core_io_borrowed_buf)]
23#![feature(if_let_guard)]
24#![feature(map_try_insert)]
25#![feature(negative_impls)]
26#![feature(read_buf)]
27#![feature(rustc_attrs)]
28extern crate self as rustc_span;
34
35use derive_where::derive_where;
36use rustc_data_structures::{AtomicRef, outline};
37use rustc_macros::{Decodable, Encodable, HashStable_Generic};
38use rustc_serialize::opaque::{FileEncoder, MemDecoder};
39use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
40use tracing::debug;
41pub use unicode_width::UNICODE_VERSION;
42
43mod caching_source_map_view;
44pub mod source_map;
45use source_map::{SourceMap, SourceMapInputs};
46
47pub use self::caching_source_map_view::CachingSourceMapView;
48use crate::fatal_error::FatalError;
49
50pub mod edition;
51use edition::Edition;
52pub mod hygiene;
53use hygiene::Transparency;
54pub use hygiene::{
55 DesugaringKind, ExpnData, ExpnHash, ExpnId, ExpnKind, LocalExpnId, MacroKind, SyntaxContext,
56};
57pub mod def_id;
58use def_id::{CrateNum, DefId, DefIndex, DefPathHash, LOCAL_CRATE, LocalDefId, StableCrateId};
59pub mod edit_distance;
60mod span_encoding;
61pub use span_encoding::{DUMMY_SP, Span};
62
63pub mod symbol;
64pub use symbol::{
65 ByteSymbol, Ident, MacroRulesNormalizedIdent, STDLIB_STABLE_CRATES, Symbol, kw, sym,
66};
67
68mod analyze_source_file;
69pub mod fatal_error;
70
71pub mod profiling;
72
73use std::borrow::Cow;
74use std::cmp::{self, Ordering};
75use std::fmt::Display;
76use std::hash::Hash;
77use std::io::{self, Read};
78use std::ops::{Add, Range, Sub};
79use std::path::{Path, PathBuf};
80use std::str::FromStr;
81use std::sync::Arc;
82use std::{fmt, iter};
83
84use md5::{Digest, Md5};
85use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
86use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock};
87use rustc_data_structures::unord::UnordMap;
88use rustc_hashes::{Hash64, Hash128};
89use sha1::Sha1;
90use sha2::Sha256;
91
92#[cfg(test)]
93mod tests;
94
95pub struct SessionGlobals {
100 symbol_interner: symbol::Interner,
101 span_interner: Lock<span_encoding::SpanInterner>,
102 metavar_spans: MetavarSpansMap,
105 hygiene_data: Lock<hygiene::HygieneData>,
106
107 source_map: Option<Arc<SourceMap>>,
111}
112
113impl SessionGlobals {
114 pub fn new(
115 edition: Edition,
116 extra_symbols: &[&'static str],
117 sm_inputs: Option<SourceMapInputs>,
118 ) -> SessionGlobals {
119 SessionGlobals {
120 symbol_interner: symbol::Interner::with_extra_symbols(extra_symbols),
121 span_interner: Lock::new(span_encoding::SpanInterner::default()),
122 metavar_spans: Default::default(),
123 hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
124 source_map: sm_inputs.map(|inputs| Arc::new(SourceMap::with_inputs(inputs))),
125 }
126 }
127}
128
129pub fn create_session_globals_then<R>(
130 edition: Edition,
131 extra_symbols: &[&'static str],
132 sm_inputs: Option<SourceMapInputs>,
133 f: impl FnOnce() -> R,
134) -> R {
135 if !!SESSION_GLOBALS.is_set() {
{
::core::panicking::panic_fmt(format_args!("SESSION_GLOBALS should never be overwritten! Use another thread if you need another SessionGlobals"));
}
};assert!(
136 !SESSION_GLOBALS.is_set(),
137 "SESSION_GLOBALS should never be overwritten! \
138 Use another thread if you need another SessionGlobals"
139 );
140 let session_globals = SessionGlobals::new(edition, extra_symbols, sm_inputs);
141 SESSION_GLOBALS.set(&session_globals, f)
142}
143
144pub fn set_session_globals_then<R>(session_globals: &SessionGlobals, f: impl FnOnce() -> R) -> R {
145 if !!SESSION_GLOBALS.is_set() {
{
::core::panicking::panic_fmt(format_args!("SESSION_GLOBALS should never be overwritten! Use another thread if you need another SessionGlobals"));
}
};assert!(
146 !SESSION_GLOBALS.is_set(),
147 "SESSION_GLOBALS should never be overwritten! \
148 Use another thread if you need another SessionGlobals"
149 );
150 SESSION_GLOBALS.set(session_globals, f)
151}
152
153pub fn create_session_if_not_set_then<R, F>(edition: Edition, f: F) -> R
155where
156 F: FnOnce(&SessionGlobals) -> R,
157{
158 if !SESSION_GLOBALS.is_set() {
159 let session_globals = SessionGlobals::new(edition, &[], None);
160 SESSION_GLOBALS.set(&session_globals, || SESSION_GLOBALS.with(f))
161 } else {
162 SESSION_GLOBALS.with(f)
163 }
164}
165
166#[inline]
167pub fn with_session_globals<R, F>(f: F) -> R
168where
169 F: FnOnce(&SessionGlobals) -> R,
170{
171 SESSION_GLOBALS.with(f)
172}
173
174pub fn create_default_session_globals_then<R>(f: impl FnOnce() -> R) -> R {
176 create_session_globals_then(edition::DEFAULT_EDITION, &[], None, f)
177}
178
179static SESSION_GLOBALS: ::scoped_tls::ScopedKey<SessionGlobals> =
::scoped_tls::ScopedKey {
inner: {
const FOO: ::std::thread::LocalKey<::std::cell::Cell<*const ()>> =
{
const __RUST_STD_INTERNAL_INIT: ::std::cell::Cell<*const ()>
=
{ ::std::cell::Cell::new(::std::ptr::null()) };
unsafe {
::std::thread::LocalKey::new(const {
if ::std::mem::needs_drop::<::std::cell::Cell<*const ()>>()
{
|_|
{
#[thread_local]
static __RUST_STD_INTERNAL_VAL:
::std::thread::local_impl::EagerStorage<::std::cell::Cell<*const ()>>
=
::std::thread::local_impl::EagerStorage::new(__RUST_STD_INTERNAL_INIT);
__RUST_STD_INTERNAL_VAL.get()
}
} else {
|_|
{
#[thread_local]
static __RUST_STD_INTERNAL_VAL: ::std::cell::Cell<*const ()>
=
__RUST_STD_INTERNAL_INIT;
&__RUST_STD_INTERNAL_VAL
}
}
})
}
};
&FOO
},
_marker: ::std::marker::PhantomData,
};scoped_tls::scoped_thread_local!(static SESSION_GLOBALS: SessionGlobals);
183
184#[derive(#[automatically_derived]
impl ::core::default::Default for MetavarSpansMap {
#[inline]
fn default() -> MetavarSpansMap {
MetavarSpansMap(::core::default::Default::default())
}
}Default)]
185pub struct MetavarSpansMap(FreezeLock<UnordMap<Span, (Span, bool)>>);
186
187impl MetavarSpansMap {
188 pub fn insert(&self, span: Span, var_span: Span) -> bool {
189 match self.0.write().try_insert(span, (var_span, false)) {
190 Ok(_) => true,
191 Err(entry) => entry.entry.get().0 == var_span,
192 }
193 }
194
195 pub fn get(&self, span: Span) -> Option<Span> {
197 if let Some(mut mspans) = self.0.try_write() {
198 if let Some((var_span, read)) = mspans.get_mut(&span) {
199 *read = true;
200 Some(*var_span)
201 } else {
202 None
203 }
204 } else {
205 if let Some((span, true)) = self.0.read().get(&span) { Some(*span) } else { None }
206 }
207 }
208
209 pub fn freeze_and_get_read_spans(&self) -> UnordMap<Span, Span> {
213 self.0.freeze().items().filter(|(_, (_, b))| *b).map(|(s1, (s2, _))| (*s1, *s2)).collect()
214 }
215}
216
217#[inline]
218pub fn with_metavar_spans<R>(f: impl FnOnce(&MetavarSpansMap) -> R) -> R {
219 with_session_globals(|session_globals| f(&session_globals.metavar_spans))
220}
221
222#[doc =
r" Scopes used to determined if it need to apply to `--remap-path-prefix`"]
pub struct RemapPathScopeComponents(<RemapPathScopeComponents as
::bitflags::__private::PublicFlags>::Internal);
#[automatically_derived]
impl ::core::fmt::Debug for RemapPathScopeComponents {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"RemapPathScopeComponents", &&self.0)
}
}
#[automatically_derived]
impl ::core::cmp::Eq for RemapPathScopeComponents {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _:
::core::cmp::AssertParamIsEq<<RemapPathScopeComponents as
::bitflags::__private::PublicFlags>::Internal>;
}
}
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for RemapPathScopeComponents { }
#[automatically_derived]
impl ::core::cmp::PartialEq for RemapPathScopeComponents {
#[inline]
fn eq(&self, other: &RemapPathScopeComponents) -> bool {
self.0 == other.0
}
}
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for RemapPathScopeComponents { }
#[automatically_derived]
impl ::core::clone::Clone for RemapPathScopeComponents {
#[inline]
fn clone(&self) -> RemapPathScopeComponents {
let _:
::core::clone::AssertParamIsClone<<RemapPathScopeComponents as
::bitflags::__private::PublicFlags>::Internal>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for RemapPathScopeComponents { }
#[automatically_derived]
impl ::core::cmp::Ord for RemapPathScopeComponents {
#[inline]
fn cmp(&self, other: &RemapPathScopeComponents) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::cmp::PartialOrd for RemapPathScopeComponents {
#[inline]
fn partial_cmp(&self, other: &RemapPathScopeComponents)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::hash::Hash for RemapPathScopeComponents {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.0, state)
}
}
impl RemapPathScopeComponents {
#[doc = r" Apply remappings to the expansion of `std::file!()` macro"]
#[allow(deprecated, non_upper_case_globals,)]
pub const MACRO: Self = Self::from_bits_retain(1 << 0);
#[doc = r" Apply remappings to printed compiler diagnostics"]
#[allow(deprecated, non_upper_case_globals,)]
pub const DIAGNOSTICS: Self = Self::from_bits_retain(1 << 1);
#[doc = r" Apply remappings to debug information"]
#[allow(deprecated, non_upper_case_globals,)]
pub const DEBUGINFO: Self = Self::from_bits_retain(1 << 3);
#[doc = r" Apply remappings to coverage information"]
#[allow(deprecated, non_upper_case_globals,)]
pub const COVERAGE: Self = Self::from_bits_retain(1 << 4);
#[doc = r" Apply remappings to documentation information"]
#[allow(deprecated, non_upper_case_globals,)]
pub const DOCUMENTATION: Self = Self::from_bits_retain(1 << 5);
#[doc =
r" An alias for `macro`, `debuginfo` and `coverage`. This ensures all paths in compiled"]
#[doc =
r" executables, libraries and objects are remapped but not elsewhere."]
#[allow(deprecated, non_upper_case_globals,)]
pub const OBJECT: Self =
Self::from_bits_retain(Self::MACRO.bits() | Self::DEBUGINFO.bits() |
Self::COVERAGE.bits());
}
impl ::bitflags::Flags for RemapPathScopeComponents {
const FLAGS: &'static [::bitflags::Flag<RemapPathScopeComponents>] =
&[{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("MACRO",
RemapPathScopeComponents::MACRO)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("DIAGNOSTICS",
RemapPathScopeComponents::DIAGNOSTICS)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("DEBUGINFO",
RemapPathScopeComponents::DEBUGINFO)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("COVERAGE",
RemapPathScopeComponents::COVERAGE)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("DOCUMENTATION",
RemapPathScopeComponents::DOCUMENTATION)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("OBJECT",
RemapPathScopeComponents::OBJECT)
}];
type Bits = u8;
fn bits(&self) -> u8 { RemapPathScopeComponents::bits(self) }
fn from_bits_retain(bits: u8) -> RemapPathScopeComponents {
RemapPathScopeComponents::from_bits_retain(bits)
}
}
#[allow(dead_code, deprecated, unused_doc_comments, unused_attributes,
unused_mut, unused_imports, non_upper_case_globals, clippy ::
assign_op_pattern, clippy :: indexing_slicing, clippy :: same_name_method,
clippy :: iter_without_into_iter,)]
const _: () =
{
#[repr(transparent)]
pub struct InternalBitFlags(u8);
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for InternalBitFlags { }
#[automatically_derived]
impl ::core::clone::Clone for InternalBitFlags {
#[inline]
fn clone(&self) -> InternalBitFlags {
let _: ::core::clone::AssertParamIsClone<u8>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for InternalBitFlags { }
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for InternalBitFlags { }
#[automatically_derived]
impl ::core::cmp::PartialEq for InternalBitFlags {
#[inline]
fn eq(&self, other: &InternalBitFlags) -> bool {
self.0 == other.0
}
}
#[automatically_derived]
impl ::core::cmp::Eq for InternalBitFlags {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<u8>;
}
}
#[automatically_derived]
impl ::core::cmp::PartialOrd for InternalBitFlags {
#[inline]
fn partial_cmp(&self, other: &InternalBitFlags)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::cmp::Ord for InternalBitFlags {
#[inline]
fn cmp(&self, other: &InternalBitFlags) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::hash::Hash for InternalBitFlags {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.0, state)
}
}
impl ::bitflags::__private::PublicFlags for RemapPathScopeComponents {
type Primitive = u8;
type Internal = InternalBitFlags;
}
impl ::bitflags::__private::core::default::Default for
InternalBitFlags {
#[inline]
fn default() -> Self { InternalBitFlags::empty() }
}
impl ::bitflags::__private::core::fmt::Debug for InternalBitFlags {
fn fmt(&self,
f: &mut ::bitflags::__private::core::fmt::Formatter<'_>)
-> ::bitflags::__private::core::fmt::Result {
if self.is_empty() {
f.write_fmt(format_args!("{0:#x}",
<u8 as ::bitflags::Bits>::EMPTY))
} else {
::bitflags::__private::core::fmt::Display::fmt(self, f)
}
}
}
impl ::bitflags::__private::core::fmt::Display for InternalBitFlags {
fn fmt(&self,
f: &mut ::bitflags::__private::core::fmt::Formatter<'_>)
-> ::bitflags::__private::core::fmt::Result {
::bitflags::parser::to_writer(&RemapPathScopeComponents(*self),
f)
}
}
impl ::bitflags::__private::core::str::FromStr for InternalBitFlags {
type Err = ::bitflags::parser::ParseError;
fn from_str(s: &str)
->
::bitflags::__private::core::result::Result<Self,
Self::Err> {
::bitflags::parser::from_str::<RemapPathScopeComponents>(s).map(|flags|
flags.0)
}
}
impl ::bitflags::__private::core::convert::AsRef<u8> for
InternalBitFlags {
fn as_ref(&self) -> &u8 { &self.0 }
}
impl ::bitflags::__private::core::convert::From<u8> for
InternalBitFlags {
fn from(bits: u8) -> Self { Self::from_bits_retain(bits) }
}
#[allow(dead_code, deprecated, unused_attributes)]
impl InternalBitFlags {
#[inline]
pub const fn empty() -> Self {
Self(<u8 as ::bitflags::Bits>::EMPTY)
}
#[inline]
pub const fn all() -> Self {
let mut truncated = <u8 as ::bitflags::Bits>::EMPTY;
let mut i = 0;
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
let _ = i;
Self(truncated)
}
#[inline]
pub const fn bits(&self) -> u8 { self.0 }
#[inline]
pub const fn from_bits(bits: u8)
-> ::bitflags::__private::core::option::Option<Self> {
let truncated = Self::from_bits_truncate(bits).0;
if truncated == bits {
::bitflags::__private::core::option::Option::Some(Self(bits))
} else { ::bitflags::__private::core::option::Option::None }
}
#[inline]
pub const fn from_bits_truncate(bits: u8) -> Self {
Self(bits & Self::all().0)
}
#[inline]
pub const fn from_bits_retain(bits: u8) -> Self { Self(bits) }
#[inline]
pub fn from_name(name: &str)
-> ::bitflags::__private::core::option::Option<Self> {
{
if name == "MACRO" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::MACRO.bits()));
}
};
;
{
if name == "DIAGNOSTICS" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::DIAGNOSTICS.bits()));
}
};
;
{
if name == "DEBUGINFO" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::DEBUGINFO.bits()));
}
};
;
{
if name == "COVERAGE" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::COVERAGE.bits()));
}
};
;
{
if name == "DOCUMENTATION" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::DOCUMENTATION.bits()));
}
};
;
{
if name == "OBJECT" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::OBJECT.bits()));
}
};
;
let _ = name;
::bitflags::__private::core::option::Option::None
}
#[inline]
pub const fn is_empty(&self) -> bool {
self.0 == <u8 as ::bitflags::Bits>::EMPTY
}
#[inline]
pub const fn is_all(&self) -> bool {
Self::all().0 | self.0 == self.0
}
#[inline]
pub const fn intersects(&self, other: Self) -> bool {
self.0 & other.0 != <u8 as ::bitflags::Bits>::EMPTY
}
#[inline]
pub const fn contains(&self, other: Self) -> bool {
self.0 & other.0 == other.0
}
#[inline]
pub fn insert(&mut self, other: Self) {
*self = Self(self.0).union(other);
}
#[inline]
pub fn remove(&mut self, other: Self) {
*self = Self(self.0).difference(other);
}
#[inline]
pub fn toggle(&mut self, other: Self) {
*self = Self(self.0).symmetric_difference(other);
}
#[inline]
pub fn set(&mut self, other: Self, value: bool) {
if value { self.insert(other); } else { self.remove(other); }
}
#[inline]
#[must_use]
pub const fn intersection(self, other: Self) -> Self {
Self(self.0 & other.0)
}
#[inline]
#[must_use]
pub const fn union(self, other: Self) -> Self {
Self(self.0 | other.0)
}
#[inline]
#[must_use]
pub const fn difference(self, other: Self) -> Self {
Self(self.0 & !other.0)
}
#[inline]
#[must_use]
pub const fn symmetric_difference(self, other: Self) -> Self {
Self(self.0 ^ other.0)
}
#[inline]
#[must_use]
pub const fn complement(self) -> Self {
Self::from_bits_truncate(!self.0)
}
}
impl ::bitflags::__private::core::fmt::Binary for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Binary::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::Octal for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Octal::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::LowerHex for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::LowerHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::UpperHex for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::UpperHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::ops::BitOr for InternalBitFlags {
type Output = Self;
#[inline]
fn bitor(self, other: InternalBitFlags) -> Self {
self.union(other)
}
}
impl ::bitflags::__private::core::ops::BitOrAssign for
InternalBitFlags {
#[inline]
fn bitor_assign(&mut self, other: Self) { self.insert(other); }
}
impl ::bitflags::__private::core::ops::BitXor for InternalBitFlags {
type Output = Self;
#[inline]
fn bitxor(self, other: Self) -> Self {
self.symmetric_difference(other)
}
}
impl ::bitflags::__private::core::ops::BitXorAssign for
InternalBitFlags {
#[inline]
fn bitxor_assign(&mut self, other: Self) { self.toggle(other); }
}
impl ::bitflags::__private::core::ops::BitAnd for InternalBitFlags {
type Output = Self;
#[inline]
fn bitand(self, other: Self) -> Self { self.intersection(other) }
}
impl ::bitflags::__private::core::ops::BitAndAssign for
InternalBitFlags {
#[inline]
fn bitand_assign(&mut self, other: Self) {
*self =
Self::from_bits_retain(self.bits()).intersection(other);
}
}
impl ::bitflags::__private::core::ops::Sub for InternalBitFlags {
type Output = Self;
#[inline]
fn sub(self, other: Self) -> Self { self.difference(other) }
}
impl ::bitflags::__private::core::ops::SubAssign for InternalBitFlags
{
#[inline]
fn sub_assign(&mut self, other: Self) { self.remove(other); }
}
impl ::bitflags::__private::core::ops::Not for InternalBitFlags {
type Output = Self;
#[inline]
fn not(self) -> Self { self.complement() }
}
impl ::bitflags::__private::core::iter::Extend<InternalBitFlags> for
InternalBitFlags {
fn extend<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(&mut self, iterator: T) {
for item in iterator { self.insert(item) }
}
}
impl ::bitflags::__private::core::iter::FromIterator<InternalBitFlags>
for InternalBitFlags {
fn from_iter<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(iterator: T) -> Self {
use ::bitflags::__private::core::iter::Extend;
let mut result = Self::empty();
result.extend(iterator);
result
}
}
impl InternalBitFlags {
#[inline]
pub const fn iter(&self)
-> ::bitflags::iter::Iter<RemapPathScopeComponents> {
::bitflags::iter::Iter::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
#[inline]
pub const fn iter_names(&self)
-> ::bitflags::iter::IterNames<RemapPathScopeComponents> {
::bitflags::iter::IterNames::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
}
impl ::bitflags::__private::core::iter::IntoIterator for
InternalBitFlags {
type Item = RemapPathScopeComponents;
type IntoIter = ::bitflags::iter::Iter<RemapPathScopeComponents>;
fn into_iter(self) -> Self::IntoIter { self.iter() }
}
impl InternalBitFlags {
#[inline]
pub fn bits_mut(&mut self) -> &mut u8 { &mut self.0 }
}
#[allow(dead_code, deprecated, unused_attributes)]
impl RemapPathScopeComponents {
#[inline]
pub const fn empty() -> Self { Self(InternalBitFlags::empty()) }
#[inline]
pub const fn all() -> Self { Self(InternalBitFlags::all()) }
#[inline]
pub const fn bits(&self) -> u8 { self.0.bits() }
#[inline]
pub const fn from_bits(bits: u8)
-> ::bitflags::__private::core::option::Option<Self> {
match InternalBitFlags::from_bits(bits) {
::bitflags::__private::core::option::Option::Some(bits) =>
::bitflags::__private::core::option::Option::Some(Self(bits)),
::bitflags::__private::core::option::Option::None =>
::bitflags::__private::core::option::Option::None,
}
}
#[inline]
pub const fn from_bits_truncate(bits: u8) -> Self {
Self(InternalBitFlags::from_bits_truncate(bits))
}
#[inline]
pub const fn from_bits_retain(bits: u8) -> Self {
Self(InternalBitFlags::from_bits_retain(bits))
}
#[inline]
pub fn from_name(name: &str)
-> ::bitflags::__private::core::option::Option<Self> {
match InternalBitFlags::from_name(name) {
::bitflags::__private::core::option::Option::Some(bits) =>
::bitflags::__private::core::option::Option::Some(Self(bits)),
::bitflags::__private::core::option::Option::None =>
::bitflags::__private::core::option::Option::None,
}
}
#[inline]
pub const fn is_empty(&self) -> bool { self.0.is_empty() }
#[inline]
pub const fn is_all(&self) -> bool { self.0.is_all() }
#[inline]
pub const fn intersects(&self, other: Self) -> bool {
self.0.intersects(other.0)
}
#[inline]
pub const fn contains(&self, other: Self) -> bool {
self.0.contains(other.0)
}
#[inline]
pub fn insert(&mut self, other: Self) { self.0.insert(other.0) }
#[inline]
pub fn remove(&mut self, other: Self) { self.0.remove(other.0) }
#[inline]
pub fn toggle(&mut self, other: Self) { self.0.toggle(other.0) }
#[inline]
pub fn set(&mut self, other: Self, value: bool) {
self.0.set(other.0, value)
}
#[inline]
#[must_use]
pub const fn intersection(self, other: Self) -> Self {
Self(self.0.intersection(other.0))
}
#[inline]
#[must_use]
pub const fn union(self, other: Self) -> Self {
Self(self.0.union(other.0))
}
#[inline]
#[must_use]
pub const fn difference(self, other: Self) -> Self {
Self(self.0.difference(other.0))
}
#[inline]
#[must_use]
pub const fn symmetric_difference(self, other: Self) -> Self {
Self(self.0.symmetric_difference(other.0))
}
#[inline]
#[must_use]
pub const fn complement(self) -> Self {
Self(self.0.complement())
}
}
impl ::bitflags::__private::core::fmt::Binary for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Binary::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::Octal for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Octal::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::LowerHex for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::LowerHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::UpperHex for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::UpperHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::ops::BitOr for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn bitor(self, other: RemapPathScopeComponents) -> Self {
self.union(other)
}
}
impl ::bitflags::__private::core::ops::BitOrAssign for
RemapPathScopeComponents {
#[inline]
fn bitor_assign(&mut self, other: Self) { self.insert(other); }
}
impl ::bitflags::__private::core::ops::BitXor for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn bitxor(self, other: Self) -> Self {
self.symmetric_difference(other)
}
}
impl ::bitflags::__private::core::ops::BitXorAssign for
RemapPathScopeComponents {
#[inline]
fn bitxor_assign(&mut self, other: Self) { self.toggle(other); }
}
impl ::bitflags::__private::core::ops::BitAnd for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn bitand(self, other: Self) -> Self { self.intersection(other) }
}
impl ::bitflags::__private::core::ops::BitAndAssign for
RemapPathScopeComponents {
#[inline]
fn bitand_assign(&mut self, other: Self) {
*self =
Self::from_bits_retain(self.bits()).intersection(other);
}
}
impl ::bitflags::__private::core::ops::Sub for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn sub(self, other: Self) -> Self { self.difference(other) }
}
impl ::bitflags::__private::core::ops::SubAssign for
RemapPathScopeComponents {
#[inline]
fn sub_assign(&mut self, other: Self) { self.remove(other); }
}
impl ::bitflags::__private::core::ops::Not for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn not(self) -> Self { self.complement() }
}
impl ::bitflags::__private::core::iter::Extend<RemapPathScopeComponents>
for RemapPathScopeComponents {
fn extend<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(&mut self, iterator: T) {
for item in iterator { self.insert(item) }
}
}
impl ::bitflags::__private::core::iter::FromIterator<RemapPathScopeComponents>
for RemapPathScopeComponents {
fn from_iter<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(iterator: T) -> Self {
use ::bitflags::__private::core::iter::Extend;
let mut result = Self::empty();
result.extend(iterator);
result
}
}
impl RemapPathScopeComponents {
#[inline]
pub const fn iter(&self)
-> ::bitflags::iter::Iter<RemapPathScopeComponents> {
::bitflags::iter::Iter::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
#[inline]
pub const fn iter_names(&self)
-> ::bitflags::iter::IterNames<RemapPathScopeComponents> {
::bitflags::iter::IterNames::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
}
impl ::bitflags::__private::core::iter::IntoIterator for
RemapPathScopeComponents {
type Item = RemapPathScopeComponents;
type IntoIter = ::bitflags::iter::Iter<RemapPathScopeComponents>;
fn into_iter(self) -> Self::IntoIter { self.iter() }
}
};bitflags::bitflags! {
223 #[derive(Debug, Eq, PartialEq, Clone, Copy, Ord, PartialOrd, Hash)]
225 pub struct RemapPathScopeComponents: u8 {
226 const MACRO = 1 << 0;
228 const DIAGNOSTICS = 1 << 1;
230 const DEBUGINFO = 1 << 3;
232 const COVERAGE = 1 << 4;
234 const DOCUMENTATION = 1 << 5;
236
237 const OBJECT = Self::MACRO.bits() | Self::DEBUGINFO.bits() | Self::COVERAGE.bits();
240 }
241}
242
243impl<E: Encoder> Encodable<E> for RemapPathScopeComponents {
244 #[inline]
245 fn encode(&self, s: &mut E) {
246 s.emit_u8(self.bits());
247 }
248}
249
250impl<D: Decoder> Decodable<D> for RemapPathScopeComponents {
251 #[inline]
252 fn decode(s: &mut D) -> RemapPathScopeComponents {
253 RemapPathScopeComponents::from_bits(s.read_u8())
254 .expect("invalid bits for RemapPathScopeComponents")
255 }
256}
257
258#[derive(#[automatically_derived]
impl ::core::fmt::Debug for RealFileName {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f, "RealFileName",
"local", &self.local, "maybe_remapped", &self.maybe_remapped,
"scopes", &&self.scopes)
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for RealFileName {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Option<InnerRealFileName>>;
let _: ::core::cmp::AssertParamIsEq<InnerRealFileName>;
let _: ::core::cmp::AssertParamIsEq<RemapPathScopeComponents>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for RealFileName {
#[inline]
fn eq(&self, other: &RealFileName) -> bool {
self.local == other.local &&
self.maybe_remapped == other.maybe_remapped &&
self.scopes == other.scopes
}
}PartialEq, #[automatically_derived]
impl ::core::clone::Clone for RealFileName {
#[inline]
fn clone(&self) -> RealFileName {
RealFileName {
local: ::core::clone::Clone::clone(&self.local),
maybe_remapped: ::core::clone::Clone::clone(&self.maybe_remapped),
scopes: ::core::clone::Clone::clone(&self.scopes),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::Ord for RealFileName {
#[inline]
fn cmp(&self, other: &RealFileName) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.local, &other.local) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.maybe_remapped,
&other.maybe_remapped) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&self.scopes, &other.scopes),
cmp => cmp,
},
cmp => cmp,
}
}
}Ord, #[automatically_derived]
impl ::core::cmp::PartialOrd for RealFileName {
#[inline]
fn partial_cmp(&self, other: &RealFileName)
-> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.local, &other.local)
{
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match ::core::cmp::PartialOrd::partial_cmp(&self.maybe_remapped,
&other.maybe_remapped) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::cmp::PartialOrd::partial_cmp(&self.scopes,
&other.scopes),
cmp => cmp,
},
cmp => cmp,
}
}
}PartialOrd, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for RealFileName {
fn decode(__decoder: &mut __D) -> Self {
RealFileName {
local: ::rustc_serialize::Decodable::decode(__decoder),
maybe_remapped: ::rustc_serialize::Decodable::decode(__decoder),
scopes: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for RealFileName {
fn encode(&self, __encoder: &mut __E) {
match *self {
RealFileName {
local: ref __binding_0,
maybe_remapped: ref __binding_1,
scopes: ref __binding_2 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
}
}
}
}
};Encodable)]
290pub struct RealFileName {
291 local: Option<InnerRealFileName>,
293 maybe_remapped: InnerRealFileName,
295 scopes: RemapPathScopeComponents,
297}
298
299#[derive(#[automatically_derived]
impl ::core::fmt::Debug for InnerRealFileName {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f,
"InnerRealFileName", "name", &self.name, "working_directory",
&self.working_directory, "embeddable_name",
&&self.embeddable_name)
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for InnerRealFileName {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<PathBuf>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for InnerRealFileName {
#[inline]
fn eq(&self, other: &InnerRealFileName) -> bool {
self.name == other.name &&
self.working_directory == other.working_directory &&
self.embeddable_name == other.embeddable_name
}
}PartialEq, #[automatically_derived]
impl ::core::clone::Clone for InnerRealFileName {
#[inline]
fn clone(&self) -> InnerRealFileName {
InnerRealFileName {
name: ::core::clone::Clone::clone(&self.name),
working_directory: ::core::clone::Clone::clone(&self.working_directory),
embeddable_name: ::core::clone::Clone::clone(&self.embeddable_name),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::Ord for InnerRealFileName {
#[inline]
fn cmp(&self, other: &InnerRealFileName) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.name, &other.name) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.working_directory,
&other.working_directory) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&self.embeddable_name,
&other.embeddable_name),
cmp => cmp,
},
cmp => cmp,
}
}
}Ord, #[automatically_derived]
impl ::core::cmp::PartialOrd for InnerRealFileName {
#[inline]
fn partial_cmp(&self, other: &InnerRealFileName)
-> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.name, &other.name) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match ::core::cmp::PartialOrd::partial_cmp(&self.working_directory,
&other.working_directory) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::cmp::PartialOrd::partial_cmp(&self.embeddable_name,
&other.embeddable_name),
cmp => cmp,
},
cmp => cmp,
}
}
}PartialOrd, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for InnerRealFileName {
fn decode(__decoder: &mut __D) -> Self {
InnerRealFileName {
name: ::rustc_serialize::Decodable::decode(__decoder),
working_directory: ::rustc_serialize::Decodable::decode(__decoder),
embeddable_name: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for InnerRealFileName {
fn encode(&self, __encoder: &mut __E) {
match *self {
InnerRealFileName {
name: ref __binding_0,
working_directory: ref __binding_1,
embeddable_name: ref __binding_2 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
}
}
}
}
};Encodable, #[automatically_derived]
impl ::core::hash::Hash for InnerRealFileName {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.name, state);
::core::hash::Hash::hash(&self.working_directory, state);
::core::hash::Hash::hash(&self.embeddable_name, state)
}
}Hash)]
303struct InnerRealFileName {
304 name: PathBuf,
306 working_directory: PathBuf,
308 embeddable_name: PathBuf,
310}
311
312impl Hash for RealFileName {
313 #[inline]
314 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
315 if !self.was_fully_remapped() {
320 self.local.hash(state);
321 }
322 self.maybe_remapped.hash(state);
323 self.scopes.bits().hash(state);
324 }
325}
326
327impl RealFileName {
328 #[inline]
334 pub fn path(&self, scope: RemapPathScopeComponents) -> &Path {
335 if !(scope.bits().count_ones() == 1) {
{
::core::panicking::panic_fmt(format_args!("one and only one scope should be passed to `RealFileName::path`: {0:?}",
scope));
}
};assert!(
336 scope.bits().count_ones() == 1,
337 "one and only one scope should be passed to `RealFileName::path`: {scope:?}"
338 );
339 if !self.scopes.contains(scope)
340 && let Some(local_name) = &self.local
341 {
342 local_name.name.as_path()
343 } else {
344 self.maybe_remapped.name.as_path()
345 }
346 }
347
348 #[inline]
359 pub fn embeddable_name(&self, scope: RemapPathScopeComponents) -> (&Path, &Path) {
360 if !(scope.bits().count_ones() == 1) {
{
::core::panicking::panic_fmt(format_args!("one and only one scope should be passed to `RealFileName::embeddable_path`: {0:?}",
scope));
}
};assert!(
361 scope.bits().count_ones() == 1,
362 "one and only one scope should be passed to `RealFileName::embeddable_path`: {scope:?}"
363 );
364 if !self.scopes.contains(scope)
365 && let Some(local_name) = &self.local
366 {
367 (&local_name.working_directory, &local_name.embeddable_name)
368 } else {
369 (&self.maybe_remapped.working_directory, &self.maybe_remapped.embeddable_name)
370 }
371 }
372
373 #[inline]
380 pub fn local_path(&self) -> Option<&Path> {
381 if self.was_not_remapped() {
382 Some(&self.maybe_remapped.name)
383 } else if let Some(local) = &self.local {
384 Some(&local.name)
385 } else {
386 None
387 }
388 }
389
390 #[inline]
397 pub fn into_local_path(self) -> Option<PathBuf> {
398 if self.was_not_remapped() {
399 Some(self.maybe_remapped.name)
400 } else if let Some(local) = self.local {
401 Some(local.name)
402 } else {
403 None
404 }
405 }
406
407 #[inline]
409 pub(crate) fn was_remapped(&self) -> bool {
410 !self.scopes.is_empty()
411 }
412
413 #[inline]
415 fn was_fully_remapped(&self) -> bool {
416 self.scopes.is_all()
417 }
418
419 #[inline]
421 fn was_not_remapped(&self) -> bool {
422 self.scopes.is_empty()
423 }
424
425 #[inline]
429 pub fn empty() -> RealFileName {
430 RealFileName {
431 local: Some(InnerRealFileName {
432 name: PathBuf::new(),
433 working_directory: PathBuf::new(),
434 embeddable_name: PathBuf::new(),
435 }),
436 maybe_remapped: InnerRealFileName {
437 name: PathBuf::new(),
438 working_directory: PathBuf::new(),
439 embeddable_name: PathBuf::new(),
440 },
441 scopes: RemapPathScopeComponents::empty(),
442 }
443 }
444
445 pub fn from_virtual_path(path: &Path) -> RealFileName {
449 let name = InnerRealFileName {
450 name: path.to_owned(),
451 embeddable_name: path.to_owned(),
452 working_directory: PathBuf::new(),
453 };
454 RealFileName { local: None, maybe_remapped: name, scopes: RemapPathScopeComponents::all() }
455 }
456
457 #[inline]
462 pub fn update_for_crate_metadata(&mut self) {
463 if self.was_fully_remapped() || self.was_not_remapped() {
464 self.local = None;
469 }
470 }
471
472 fn to_string_lossy<'a>(&'a self, display_pref: FileNameDisplayPreference) -> Cow<'a, str> {
476 match display_pref {
477 FileNameDisplayPreference::Remapped => self.maybe_remapped.name.to_string_lossy(),
478 FileNameDisplayPreference::Local => {
479 self.local.as_ref().unwrap_or(&self.maybe_remapped).name.to_string_lossy()
480 }
481 FileNameDisplayPreference::Short => self
482 .maybe_remapped
483 .name
484 .file_name()
485 .map_or_else(|| "".into(), |f| f.to_string_lossy()),
486 FileNameDisplayPreference::Scope(scope) => self.path(scope).to_string_lossy(),
487 }
488 }
489}
490
491#[derive(#[automatically_derived]
impl ::core::fmt::Debug for FileName {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
FileName::Real(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Real",
&__self_0),
FileName::CfgSpec(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CfgSpec", &__self_0),
FileName::Anon(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Anon",
&__self_0),
FileName::MacroExpansion(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"MacroExpansion", &__self_0),
FileName::ProcMacroSourceCode(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"ProcMacroSourceCode", &__self_0),
FileName::CliCrateAttr(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CliCrateAttr", &__self_0),
FileName::Custom(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Custom",
&__self_0),
FileName::DocTest(__self_0, __self_1) =>
::core::fmt::Formatter::debug_tuple_field2_finish(f,
"DocTest", __self_0, &__self_1),
FileName::InlineAsm(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"InlineAsm", &__self_0),
}
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for FileName {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<RealFileName>;
let _: ::core::cmp::AssertParamIsEq<Hash64>;
let _: ::core::cmp::AssertParamIsEq<String>;
let _: ::core::cmp::AssertParamIsEq<PathBuf>;
let _: ::core::cmp::AssertParamIsEq<isize>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for FileName {
#[inline]
fn eq(&self, other: &FileName) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(FileName::Real(__self_0), FileName::Real(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::CfgSpec(__self_0), FileName::CfgSpec(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::Anon(__self_0), FileName::Anon(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::MacroExpansion(__self_0),
FileName::MacroExpansion(__arg1_0)) => __self_0 == __arg1_0,
(FileName::ProcMacroSourceCode(__self_0),
FileName::ProcMacroSourceCode(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::CliCrateAttr(__self_0),
FileName::CliCrateAttr(__arg1_0)) => __self_0 == __arg1_0,
(FileName::Custom(__self_0), FileName::Custom(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::DocTest(__self_0, __self_1),
FileName::DocTest(__arg1_0, __arg1_1)) =>
__self_0 == __arg1_0 && __self_1 == __arg1_1,
(FileName::InlineAsm(__self_0), FileName::InlineAsm(__arg1_0))
=> __self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl ::core::clone::Clone for FileName {
#[inline]
fn clone(&self) -> FileName {
match self {
FileName::Real(__self_0) =>
FileName::Real(::core::clone::Clone::clone(__self_0)),
FileName::CfgSpec(__self_0) =>
FileName::CfgSpec(::core::clone::Clone::clone(__self_0)),
FileName::Anon(__self_0) =>
FileName::Anon(::core::clone::Clone::clone(__self_0)),
FileName::MacroExpansion(__self_0) =>
FileName::MacroExpansion(::core::clone::Clone::clone(__self_0)),
FileName::ProcMacroSourceCode(__self_0) =>
FileName::ProcMacroSourceCode(::core::clone::Clone::clone(__self_0)),
FileName::CliCrateAttr(__self_0) =>
FileName::CliCrateAttr(::core::clone::Clone::clone(__self_0)),
FileName::Custom(__self_0) =>
FileName::Custom(::core::clone::Clone::clone(__self_0)),
FileName::DocTest(__self_0, __self_1) =>
FileName::DocTest(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
FileName::InlineAsm(__self_0) =>
FileName::InlineAsm(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::Ord for FileName {
#[inline]
fn cmp(&self, other: &FileName) -> ::core::cmp::Ordering {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
match ::core::cmp::Ord::cmp(&__self_discr, &__arg1_discr) {
::core::cmp::Ordering::Equal =>
match (self, other) {
(FileName::Real(__self_0), FileName::Real(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::CfgSpec(__self_0), FileName::CfgSpec(__arg1_0))
=> ::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::Anon(__self_0), FileName::Anon(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::MacroExpansion(__self_0),
FileName::MacroExpansion(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::ProcMacroSourceCode(__self_0),
FileName::ProcMacroSourceCode(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::CliCrateAttr(__self_0),
FileName::CliCrateAttr(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::Custom(__self_0), FileName::Custom(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::DocTest(__self_0, __self_1),
FileName::DocTest(__arg1_0, __arg1_1)) =>
match ::core::cmp::Ord::cmp(__self_0, __arg1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(__self_1, __arg1_1),
cmp => cmp,
},
(FileName::InlineAsm(__self_0),
FileName::InlineAsm(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
_ => unsafe { ::core::intrinsics::unreachable() }
},
cmp => cmp,
}
}
}Ord, #[automatically_derived]
impl ::core::cmp::PartialOrd for FileName {
#[inline]
fn partial_cmp(&self, other: &FileName)
-> ::core::option::Option<::core::cmp::Ordering> {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
match (self, other) {
(FileName::Real(__self_0), FileName::Real(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::CfgSpec(__self_0), FileName::CfgSpec(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::Anon(__self_0), FileName::Anon(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::MacroExpansion(__self_0),
FileName::MacroExpansion(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::ProcMacroSourceCode(__self_0),
FileName::ProcMacroSourceCode(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::CliCrateAttr(__self_0),
FileName::CliCrateAttr(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::Custom(__self_0), FileName::Custom(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::DocTest(__self_0, __self_1),
FileName::DocTest(__arg1_0, __arg1_1)) =>
match ::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0)
{
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=> ::core::cmp::PartialOrd::partial_cmp(__self_1, __arg1_1),
cmp => cmp,
},
(FileName::InlineAsm(__self_0), FileName::InlineAsm(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
_ =>
::core::cmp::PartialOrd::partial_cmp(&__self_discr,
&__arg1_discr),
}
}
}PartialOrd, #[automatically_derived]
impl ::core::hash::Hash for FileName {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
FileName::Real(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::CfgSpec(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::Anon(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::MacroExpansion(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::ProcMacroSourceCode(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::CliCrateAttr(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::Custom(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::DocTest(__self_0, __self_1) => {
::core::hash::Hash::hash(__self_0, state);
::core::hash::Hash::hash(__self_1, state)
}
FileName::InlineAsm(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
}
}
}Hash, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for FileName {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
FileName::Real(::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
FileName::CfgSpec(::rustc_serialize::Decodable::decode(__decoder))
}
2usize => {
FileName::Anon(::rustc_serialize::Decodable::decode(__decoder))
}
3usize => {
FileName::MacroExpansion(::rustc_serialize::Decodable::decode(__decoder))
}
4usize => {
FileName::ProcMacroSourceCode(::rustc_serialize::Decodable::decode(__decoder))
}
5usize => {
FileName::CliCrateAttr(::rustc_serialize::Decodable::decode(__decoder))
}
6usize => {
FileName::Custom(::rustc_serialize::Decodable::decode(__decoder))
}
7usize => {
FileName::DocTest(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
8usize => {
FileName::InlineAsm(::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `FileName`, expected 0..9, actual {0}",
n));
}
}
}
}
};Decodable, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for FileName {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
FileName::Real(ref __binding_0) => { 0usize }
FileName::CfgSpec(ref __binding_0) => { 1usize }
FileName::Anon(ref __binding_0) => { 2usize }
FileName::MacroExpansion(ref __binding_0) => { 3usize }
FileName::ProcMacroSourceCode(ref __binding_0) => { 4usize }
FileName::CliCrateAttr(ref __binding_0) => { 5usize }
FileName::Custom(ref __binding_0) => { 6usize }
FileName::DocTest(ref __binding_0, ref __binding_1) => {
7usize
}
FileName::InlineAsm(ref __binding_0) => { 8usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
FileName::Real(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::CfgSpec(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::Anon(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::MacroExpansion(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::ProcMacroSourceCode(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::CliCrateAttr(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::Custom(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::DocTest(ref __binding_0, ref __binding_1) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
FileName::InlineAsm(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable)]
493pub enum FileName {
494 Real(RealFileName),
495 CfgSpec(Hash64),
497 Anon(Hash64),
499 MacroExpansion(Hash64),
502 ProcMacroSourceCode(Hash64),
503 CliCrateAttr(Hash64),
505 Custom(String),
507 DocTest(PathBuf, isize),
508 InlineAsm(Hash64),
510}
511
512pub struct FileNameDisplay<'a> {
513 inner: &'a FileName,
514 display_pref: FileNameDisplayPreference,
515}
516
517#[derive(#[automatically_derived]
impl ::core::clone::Clone for FileNameDisplayPreference {
#[inline]
fn clone(&self) -> FileNameDisplayPreference {
let _: ::core::clone::AssertParamIsClone<RemapPathScopeComponents>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for FileNameDisplayPreference { }Copy)]
519enum FileNameDisplayPreference {
520 Remapped,
521 Local,
522 Short,
523 Scope(RemapPathScopeComponents),
524}
525
526impl fmt::Display for FileNameDisplay<'_> {
527 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
528 use FileName::*;
529 match *self.inner {
530 Real(ref name) => {
531 fmt.write_fmt(format_args!("{0}", name.to_string_lossy(self.display_pref)))write!(fmt, "{}", name.to_string_lossy(self.display_pref))
532 }
533 CfgSpec(_) => fmt.write_fmt(format_args!("<cfgspec>"))write!(fmt, "<cfgspec>"),
534 MacroExpansion(_) => fmt.write_fmt(format_args!("<macro expansion>"))write!(fmt, "<macro expansion>"),
535 Anon(_) => fmt.write_fmt(format_args!("<anon>"))write!(fmt, "<anon>"),
536 ProcMacroSourceCode(_) => fmt.write_fmt(format_args!("<proc-macro source code>"))write!(fmt, "<proc-macro source code>"),
537 CliCrateAttr(_) => fmt.write_fmt(format_args!("<crate attribute>"))write!(fmt, "<crate attribute>"),
538 Custom(ref s) => fmt.write_fmt(format_args!("<{0}>", s))write!(fmt, "<{s}>"),
539 DocTest(ref path, _) => fmt.write_fmt(format_args!("{0}", path.display()))write!(fmt, "{}", path.display()),
540 InlineAsm(_) => fmt.write_fmt(format_args!("<inline asm>"))write!(fmt, "<inline asm>"),
541 }
542 }
543}
544
545impl<'a> FileNameDisplay<'a> {
546 pub fn to_string_lossy(&self) -> Cow<'a, str> {
547 match self.inner {
548 FileName::Real(inner) => inner.to_string_lossy(self.display_pref),
549 _ => Cow::from(self.to_string()),
550 }
551 }
552}
553
554impl FileName {
555 pub fn is_real(&self) -> bool {
556 use FileName::*;
557 match *self {
558 Real(_) => true,
559 Anon(_)
560 | MacroExpansion(_)
561 | ProcMacroSourceCode(_)
562 | CliCrateAttr(_)
563 | Custom(_)
564 | CfgSpec(_)
565 | DocTest(_, _)
566 | InlineAsm(_) => false,
567 }
568 }
569
570 #[inline]
575 pub fn prefer_remapped_unconditionally(&self) -> FileNameDisplay<'_> {
576 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Remapped }
577 }
578
579 #[inline]
584 pub fn prefer_local_unconditionally(&self) -> FileNameDisplay<'_> {
585 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Local }
586 }
587
588 #[inline]
590 pub fn short(&self) -> FileNameDisplay<'_> {
591 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Short }
592 }
593
594 #[inline]
596 pub fn display(&self, scope: RemapPathScopeComponents) -> FileNameDisplay<'_> {
597 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Scope(scope) }
598 }
599
600 pub fn macro_expansion_source_code(src: &str) -> FileName {
601 let mut hasher = StableHasher::new();
602 src.hash(&mut hasher);
603 FileName::MacroExpansion(hasher.finish())
604 }
605
606 pub fn anon_source_code(src: &str) -> FileName {
607 let mut hasher = StableHasher::new();
608 src.hash(&mut hasher);
609 FileName::Anon(hasher.finish())
610 }
611
612 pub fn proc_macro_source_code(src: &str) -> FileName {
613 let mut hasher = StableHasher::new();
614 src.hash(&mut hasher);
615 FileName::ProcMacroSourceCode(hasher.finish())
616 }
617
618 pub fn cfg_spec_source_code(src: &str) -> FileName {
619 let mut hasher = StableHasher::new();
620 src.hash(&mut hasher);
621 FileName::CfgSpec(hasher.finish())
622 }
623
624 pub fn cli_crate_attr_source_code(src: &str) -> FileName {
625 let mut hasher = StableHasher::new();
626 src.hash(&mut hasher);
627 FileName::CliCrateAttr(hasher.finish())
628 }
629
630 pub fn doc_test_source_code(path: PathBuf, line: isize) -> FileName {
631 FileName::DocTest(path, line)
632 }
633
634 pub fn inline_asm_source_code(src: &str) -> FileName {
635 let mut hasher = StableHasher::new();
636 src.hash(&mut hasher);
637 FileName::InlineAsm(hasher.finish())
638 }
639
640 pub fn into_local_path(self) -> Option<PathBuf> {
645 match self {
646 FileName::Real(path) => path.into_local_path(),
647 FileName::DocTest(path, _) => Some(path),
648 _ => None,
649 }
650 }
651}
652
653#[derive(#[automatically_derived]
impl ::core::clone::Clone for SpanData {
#[inline]
fn clone(&self) -> SpanData {
let _: ::core::clone::AssertParamIsClone<BytePos>;
let _: ::core::clone::AssertParamIsClone<SyntaxContext>;
let _: ::core::clone::AssertParamIsClone<Option<LocalDefId>>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for SpanData { }Copy, #[automatically_derived]
impl ::core::hash::Hash for SpanData {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.lo, state);
::core::hash::Hash::hash(&self.hi, state);
::core::hash::Hash::hash(&self.ctxt, state);
::core::hash::Hash::hash(&self.parent, state)
}
}Hash, #[automatically_derived]
impl ::core::cmp::PartialEq for SpanData {
#[inline]
fn eq(&self, other: &SpanData) -> bool {
self.lo == other.lo && self.hi == other.hi && self.ctxt == other.ctxt
&& self.parent == other.parent
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SpanData {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<BytePos>;
let _: ::core::cmp::AssertParamIsEq<SyntaxContext>;
let _: ::core::cmp::AssertParamIsEq<Option<LocalDefId>>;
}
}Eq)]
669#[automatically_derived]
impl ::core::cmp::Ord for SpanData {
#[inline]
fn cmp(&self, __other: &Self) -> ::core::cmp::Ordering {
match (self, __other) {
(SpanData {
lo: ref __field_lo,
hi: ref __field_hi,
ctxt: ref __field_ctxt,
parent: ref __field_parent }, SpanData {
lo: ref __other_field_lo,
hi: ref __other_field_hi,
ctxt: ref __other_field_ctxt,
parent: ref __other_field_parent }) =>
match ::core::cmp::Ord::cmp(__field_lo, __other_field_lo) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(__field_hi, __other_field_hi) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
__cmp => __cmp,
},
__cmp => __cmp,
},
}
}
}#[derive_where(PartialOrd, Ord)]
670pub struct SpanData {
671 pub lo: BytePos,
672 pub hi: BytePos,
673 #[derive_where(skip)]
676 pub ctxt: SyntaxContext,
679 #[derive_where(skip)]
680 pub parent: Option<LocalDefId>,
683}
684
685impl SpanData {
686 #[inline]
687 pub fn span(&self) -> Span {
688 Span::new(self.lo, self.hi, self.ctxt, self.parent)
689 }
690 #[inline]
691 pub fn with_lo(&self, lo: BytePos) -> Span {
692 Span::new(lo, self.hi, self.ctxt, self.parent)
693 }
694 #[inline]
695 pub fn with_hi(&self, hi: BytePos) -> Span {
696 Span::new(self.lo, hi, self.ctxt, self.parent)
697 }
698 #[inline]
700 fn with_ctxt(&self, ctxt: SyntaxContext) -> Span {
701 Span::new(self.lo, self.hi, ctxt, self.parent)
702 }
703 #[inline]
705 fn with_parent(&self, parent: Option<LocalDefId>) -> Span {
706 Span::new(self.lo, self.hi, self.ctxt, parent)
707 }
708 #[inline]
710 pub fn is_dummy(self) -> bool {
711 self.lo.0 == 0 && self.hi.0 == 0
712 }
713 pub fn contains(self, other: Self) -> bool {
715 self.lo <= other.lo && other.hi <= self.hi
716 }
717}
718
719impl Default for SpanData {
720 fn default() -> Self {
721 Self { lo: BytePos(0), hi: BytePos(0), ctxt: SyntaxContext::root(), parent: None }
722 }
723}
724
725impl PartialOrd for Span {
726 fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
727 PartialOrd::partial_cmp(&self.data(), &rhs.data())
728 }
729}
730impl Ord for Span {
731 fn cmp(&self, rhs: &Self) -> Ordering {
732 Ord::cmp(&self.data(), &rhs.data())
733 }
734}
735
736impl Span {
737 #[inline]
738 pub fn lo(self) -> BytePos {
739 self.data().lo
740 }
741 #[inline]
742 pub fn with_lo(self, lo: BytePos) -> Span {
743 self.data().with_lo(lo)
744 }
745 #[inline]
746 pub fn hi(self) -> BytePos {
747 self.data().hi
748 }
749 #[inline]
750 pub fn with_hi(self, hi: BytePos) -> Span {
751 self.data().with_hi(hi)
752 }
753 #[inline]
754 pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span {
755 self.map_ctxt(|_| ctxt)
756 }
757
758 #[inline]
759 pub fn is_visible(self, sm: &SourceMap) -> bool {
760 !self.is_dummy() && sm.is_span_accessible(self)
761 }
762
763 #[inline]
768 pub fn in_external_macro(self, sm: &SourceMap) -> bool {
769 self.ctxt().in_external_macro(sm)
770 }
771
772 pub fn in_derive_expansion(self) -> bool {
774 #[allow(non_exhaustive_omitted_patterns)] match self.ctxt().outer_expn_data().kind
{
ExpnKind::Macro(MacroKind::Derive, _) => true,
_ => false,
}matches!(self.ctxt().outer_expn_data().kind, ExpnKind::Macro(MacroKind::Derive, _))
775 }
776
777 pub fn is_from_async_await(self) -> bool {
779 #[allow(non_exhaustive_omitted_patterns)] match self.ctxt().outer_expn_data().kind
{
ExpnKind::Desugaring(DesugaringKind::Async | DesugaringKind::Await) =>
true,
_ => false,
}matches!(
780 self.ctxt().outer_expn_data().kind,
781 ExpnKind::Desugaring(DesugaringKind::Async | DesugaringKind::Await),
782 )
783 }
784
785 pub fn can_be_used_for_suggestions(self) -> bool {
787 !self.from_expansion()
788 || (self.in_derive_expansion()
792 && self.parent_callsite().map(|p| (p.lo(), p.hi())) != Some((self.lo(), self.hi())))
793 }
794
795 #[inline]
796 pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span {
797 Span::new(lo, hi, SyntaxContext::root(), None)
798 }
799
800 #[inline]
802 pub fn shrink_to_lo(self) -> Span {
803 let span = self.data_untracked();
804 span.with_hi(span.lo)
805 }
806 #[inline]
808 pub fn shrink_to_hi(self) -> Span {
809 let span = self.data_untracked();
810 span.with_lo(span.hi)
811 }
812
813 #[inline]
814 pub fn is_empty(self) -> bool {
816 let span = self.data_untracked();
817 span.hi == span.lo
818 }
819
820 pub fn substitute_dummy(self, other: Span) -> Span {
822 if self.is_dummy() { other } else { self }
823 }
824
825 pub fn contains(self, other: Span) -> bool {
827 let span = self.data();
828 let other = other.data();
829 span.contains(other)
830 }
831
832 pub fn overlaps(self, other: Span) -> bool {
834 let span = self.data();
835 let other = other.data();
836 span.lo < other.hi && other.lo < span.hi
837 }
838
839 pub fn overlaps_or_adjacent(self, other: Span) -> bool {
841 let span = self.data();
842 let other = other.data();
843 span.lo <= other.hi && other.lo <= span.hi
844 }
845
846 pub fn source_equal(self, other: Span) -> bool {
851 let span = self.data();
852 let other = other.data();
853 span.lo == other.lo && span.hi == other.hi
854 }
855
856 pub fn trim_start(self, other: Span) -> Option<Span> {
858 let span = self.data();
859 let other = other.data();
860 if span.hi > other.hi { Some(span.with_lo(cmp::max(span.lo, other.hi))) } else { None }
861 }
862
863 pub fn trim_end(self, other: Span) -> Option<Span> {
865 let span = self.data();
866 let other = other.data();
867 if span.lo < other.lo { Some(span.with_hi(cmp::min(span.hi, other.lo))) } else { None }
868 }
869
870 pub fn source_callsite(self) -> Span {
873 let ctxt = self.ctxt();
874 if !ctxt.is_root() { ctxt.outer_expn_data().call_site.source_callsite() } else { self }
875 }
876
877 pub fn parent_callsite(self) -> Option<Span> {
880 let ctxt = self.ctxt();
881 (!ctxt.is_root()).then(|| ctxt.outer_expn_data().call_site)
882 }
883
884 pub fn find_ancestor_inside(mut self, outer: Span) -> Option<Span> {
897 while !outer.contains(self) {
898 self = self.parent_callsite()?;
899 }
900 Some(self)
901 }
902
903 pub fn find_ancestor_in_same_ctxt(mut self, other: Span) -> Option<Span> {
916 while !self.eq_ctxt(other) {
917 self = self.parent_callsite()?;
918 }
919 Some(self)
920 }
921
922 pub fn find_ancestor_inside_same_ctxt(mut self, outer: Span) -> Option<Span> {
935 while !outer.contains(self) || !self.eq_ctxt(outer) {
936 self = self.parent_callsite()?;
937 }
938 Some(self)
939 }
940
941 pub fn find_ancestor_not_from_extern_macro(mut self, sm: &SourceMap) -> Option<Span> {
955 while self.in_external_macro(sm) {
956 self = self.parent_callsite()?;
957 }
958 Some(self)
959 }
960
961 pub fn find_ancestor_not_from_macro(mut self) -> Option<Span> {
974 while self.from_expansion() {
975 self = self.parent_callsite()?;
976 }
977 Some(self)
978 }
979
980 pub fn edition(self) -> edition::Edition {
982 self.ctxt().edition()
983 }
984
985 #[inline]
987 pub fn is_rust_2015(self) -> bool {
988 self.edition().is_rust_2015()
989 }
990
991 #[inline]
993 pub fn at_least_rust_2018(self) -> bool {
994 self.edition().at_least_rust_2018()
995 }
996
997 #[inline]
999 pub fn at_least_rust_2021(self) -> bool {
1000 self.edition().at_least_rust_2021()
1001 }
1002
1003 #[inline]
1005 pub fn at_least_rust_2024(self) -> bool {
1006 self.edition().at_least_rust_2024()
1007 }
1008
1009 pub fn source_callee(self) -> Option<ExpnData> {
1015 let mut ctxt = self.ctxt();
1016 let mut opt_expn_data = None;
1017 while !ctxt.is_root() {
1018 let expn_data = ctxt.outer_expn_data();
1019 ctxt = expn_data.call_site.ctxt();
1020 opt_expn_data = Some(expn_data);
1021 }
1022 opt_expn_data
1023 }
1024
1025 pub fn allows_unstable(self, feature: Symbol) -> bool {
1029 self.ctxt()
1030 .outer_expn_data()
1031 .allow_internal_unstable
1032 .is_some_and(|features| features.contains(&feature))
1033 }
1034
1035 pub fn is_desugaring(self, kind: DesugaringKind) -> bool {
1037 match self.ctxt().outer_expn_data().kind {
1038 ExpnKind::Desugaring(k) => k == kind,
1039 _ => false,
1040 }
1041 }
1042
1043 pub fn desugaring_kind(self) -> Option<DesugaringKind> {
1046 match self.ctxt().outer_expn_data().kind {
1047 ExpnKind::Desugaring(k) => Some(k),
1048 _ => None,
1049 }
1050 }
1051
1052 pub fn allows_unsafe(self) -> bool {
1056 self.ctxt().outer_expn_data().allow_internal_unsafe
1057 }
1058
1059 pub fn macro_backtrace(mut self) -> impl Iterator<Item = ExpnData> {
1060 let mut prev_span = DUMMY_SP;
1061 iter::from_fn(move || {
1062 loop {
1063 let ctxt = self.ctxt();
1064 if ctxt.is_root() {
1065 return None;
1066 }
1067
1068 let expn_data = ctxt.outer_expn_data();
1069 let is_recursive = expn_data.call_site.source_equal(prev_span);
1070
1071 prev_span = self;
1072 self = expn_data.call_site;
1073
1074 if !is_recursive {
1076 return Some(expn_data);
1077 }
1078 }
1079 })
1080 }
1081
1082 pub fn split_at(self, pos: u32) -> (Span, Span) {
1084 let len = self.hi().0 - self.lo().0;
1085 if true {
if !(pos <= len) {
::core::panicking::panic("assertion failed: pos <= len")
};
};debug_assert!(pos <= len);
1086
1087 let split_pos = BytePos(self.lo().0 + pos);
1088 (
1089 Span::new(self.lo(), split_pos, self.ctxt(), self.parent()),
1090 Span::new(split_pos, self.hi(), self.ctxt(), self.parent()),
1091 )
1092 }
1093
1094 fn try_metavars(a: SpanData, b: SpanData, a_orig: Span, b_orig: Span) -> (SpanData, SpanData) {
1096 match with_metavar_spans(|mspans| (mspans.get(a_orig), mspans.get(b_orig))) {
1097 (None, None) => {}
1098 (Some(meta_a), None) => {
1099 let meta_a = meta_a.data();
1100 if meta_a.ctxt == b.ctxt {
1101 return (meta_a, b);
1102 }
1103 }
1104 (None, Some(meta_b)) => {
1105 let meta_b = meta_b.data();
1106 if a.ctxt == meta_b.ctxt {
1107 return (a, meta_b);
1108 }
1109 }
1110 (Some(meta_a), Some(meta_b)) => {
1111 let meta_b = meta_b.data();
1112 if a.ctxt == meta_b.ctxt {
1113 return (a, meta_b);
1114 }
1115 let meta_a = meta_a.data();
1116 if meta_a.ctxt == b.ctxt {
1117 return (meta_a, b);
1118 } else if meta_a.ctxt == meta_b.ctxt {
1119 return (meta_a, meta_b);
1120 }
1121 }
1122 }
1123
1124 (a, b)
1125 }
1126
1127 fn prepare_to_combine(
1129 a_orig: Span,
1130 b_orig: Span,
1131 ) -> Result<(SpanData, SpanData, Option<LocalDefId>), Span> {
1132 let (a, b) = (a_orig.data(), b_orig.data());
1133 if a.ctxt == b.ctxt {
1134 return Ok((a, b, if a.parent == b.parent { a.parent } else { None }));
1135 }
1136
1137 let (a, b) = Span::try_metavars(a, b, a_orig, b_orig);
1138 if a.ctxt == b.ctxt {
1139 return Ok((a, b, if a.parent == b.parent { a.parent } else { None }));
1140 }
1141
1142 let a_is_callsite = a.ctxt.is_root() || a.ctxt == b.span().source_callsite().ctxt();
1150 Err(if a_is_callsite { b_orig } else { a_orig })
1151 }
1152
1153 pub fn with_neighbor(self, neighbor: Span) -> Span {
1155 match Span::prepare_to_combine(self, neighbor) {
1156 Ok((this, ..)) => this.span(),
1157 Err(_) => self,
1158 }
1159 }
1160
1161 pub fn to(self, end: Span) -> Span {
1172 match Span::prepare_to_combine(self, end) {
1173 Ok((from, to, parent)) => {
1174 Span::new(cmp::min(from.lo, to.lo), cmp::max(from.hi, to.hi), from.ctxt, parent)
1175 }
1176 Err(fallback) => fallback,
1177 }
1178 }
1179
1180 pub fn between(self, end: Span) -> Span {
1188 match Span::prepare_to_combine(self, end) {
1189 Ok((from, to, parent)) => {
1190 Span::new(cmp::min(from.hi, to.hi), cmp::max(from.lo, to.lo), from.ctxt, parent)
1191 }
1192 Err(fallback) => fallback,
1193 }
1194 }
1195
1196 pub fn until(self, end: Span) -> Span {
1204 match Span::prepare_to_combine(self, end) {
1205 Ok((from, to, parent)) => {
1206 Span::new(cmp::min(from.lo, to.lo), cmp::max(from.lo, to.lo), from.ctxt, parent)
1207 }
1208 Err(fallback) => fallback,
1209 }
1210 }
1211
1212 pub fn within_macro(self, within: Span, sm: &SourceMap) -> Option<Span> {
1227 match Span::prepare_to_combine(self, within) {
1228 Ok((self_, _, parent))
1235 if self_.hi < self.lo() || self.hi() < self_.lo && !sm.is_imported(within) =>
1236 {
1237 Some(Span::new(self_.lo, self_.hi, self_.ctxt, parent))
1238 }
1239 _ => None,
1240 }
1241 }
1242
1243 pub fn from_inner(self, inner: InnerSpan) -> Span {
1244 let span = self.data();
1245 Span::new(
1246 span.lo + BytePos::from_usize(inner.start),
1247 span.lo + BytePos::from_usize(inner.end),
1248 span.ctxt,
1249 span.parent,
1250 )
1251 }
1252
1253 pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span {
1256 self.with_ctxt_from_mark(expn_id, Transparency::Opaque)
1257 }
1258
1259 pub fn with_call_site_ctxt(self, expn_id: ExpnId) -> Span {
1262 self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
1263 }
1264
1265 pub fn with_mixed_site_ctxt(self, expn_id: ExpnId) -> Span {
1268 self.with_ctxt_from_mark(expn_id, Transparency::SemiOpaque)
1269 }
1270
1271 fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
1275 self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency))
1276 }
1277
1278 #[inline]
1279 pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
1280 self.map_ctxt(|ctxt| ctxt.apply_mark(expn_id, transparency))
1281 }
1282
1283 #[inline]
1284 pub fn remove_mark(&mut self) -> ExpnId {
1285 let mut mark = ExpnId::root();
1286 *self = self.map_ctxt(|mut ctxt| {
1287 mark = ctxt.remove_mark();
1288 ctxt
1289 });
1290 mark
1291 }
1292
1293 #[inline]
1294 pub fn adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
1295 let mut mark = None;
1296 *self = self.map_ctxt(|mut ctxt| {
1297 mark = ctxt.adjust(expn_id);
1298 ctxt
1299 });
1300 mark
1301 }
1302
1303 #[inline]
1304 pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
1305 let mut mark = None;
1306 *self = self.map_ctxt(|mut ctxt| {
1307 mark = ctxt.normalize_to_macros_2_0_and_adjust(expn_id);
1308 ctxt
1309 });
1310 mark
1311 }
1312
1313 #[inline]
1314 pub fn normalize_to_macros_2_0(self) -> Span {
1315 self.map_ctxt(|ctxt| ctxt.normalize_to_macros_2_0())
1316 }
1317
1318 #[inline]
1319 pub fn normalize_to_macro_rules(self) -> Span {
1320 self.map_ctxt(|ctxt| ctxt.normalize_to_macro_rules())
1321 }
1322}
1323
1324impl Default for Span {
1325 fn default() -> Self {
1326 DUMMY_SP
1327 }
1328}
1329
1330impl ::std::fmt::Debug for AttrId {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
fmt.write_fmt(format_args!("AttrId({0})", self.as_u32()))
}
}rustc_index::newtype_index! {
1331 #[orderable]
1332 #[debug_format = "AttrId({})"]
1333 pub struct AttrId {}
1334}
1335
1336pub trait SpanEncoder: Encoder {
1339 fn encode_span(&mut self, span: Span);
1340 fn encode_symbol(&mut self, sym: Symbol);
1341 fn encode_byte_symbol(&mut self, byte_sym: ByteSymbol);
1342 fn encode_expn_id(&mut self, expn_id: ExpnId);
1343 fn encode_syntax_context(&mut self, syntax_context: SyntaxContext);
1344 fn encode_crate_num(&mut self, crate_num: CrateNum);
1347 fn encode_def_index(&mut self, def_index: DefIndex);
1348 fn encode_def_id(&mut self, def_id: DefId);
1349}
1350
1351impl SpanEncoder for FileEncoder {
1352 fn encode_span(&mut self, span: Span) {
1353 let span = span.data();
1354 span.lo.encode(self);
1355 span.hi.encode(self);
1356 }
1357
1358 fn encode_symbol(&mut self, sym: Symbol) {
1359 self.emit_str(sym.as_str());
1360 }
1361
1362 fn encode_byte_symbol(&mut self, byte_sym: ByteSymbol) {
1363 self.emit_byte_str(byte_sym.as_byte_str());
1364 }
1365
1366 fn encode_expn_id(&mut self, _expn_id: ExpnId) {
1367 {
::core::panicking::panic_fmt(format_args!("cannot encode `ExpnId` with `FileEncoder`"));
};panic!("cannot encode `ExpnId` with `FileEncoder`");
1368 }
1369
1370 fn encode_syntax_context(&mut self, _syntax_context: SyntaxContext) {
1371 {
::core::panicking::panic_fmt(format_args!("cannot encode `SyntaxContext` with `FileEncoder`"));
};panic!("cannot encode `SyntaxContext` with `FileEncoder`");
1372 }
1373
1374 fn encode_crate_num(&mut self, crate_num: CrateNum) {
1375 self.emit_u32(crate_num.as_u32());
1376 }
1377
1378 fn encode_def_index(&mut self, _def_index: DefIndex) {
1379 {
::core::panicking::panic_fmt(format_args!("cannot encode `DefIndex` with `FileEncoder`"));
};panic!("cannot encode `DefIndex` with `FileEncoder`");
1380 }
1381
1382 fn encode_def_id(&mut self, def_id: DefId) {
1383 def_id.krate.encode(self);
1384 def_id.index.encode(self);
1385 }
1386}
1387
1388impl<E: SpanEncoder> Encodable<E> for Span {
1389 fn encode(&self, s: &mut E) {
1390 s.encode_span(*self);
1391 }
1392}
1393
1394impl<E: SpanEncoder> Encodable<E> for Symbol {
1395 fn encode(&self, s: &mut E) {
1396 s.encode_symbol(*self);
1397 }
1398}
1399
1400impl<E: SpanEncoder> Encodable<E> for ByteSymbol {
1401 fn encode(&self, s: &mut E) {
1402 s.encode_byte_symbol(*self);
1403 }
1404}
1405
1406impl<E: SpanEncoder> Encodable<E> for ExpnId {
1407 fn encode(&self, s: &mut E) {
1408 s.encode_expn_id(*self)
1409 }
1410}
1411
1412impl<E: SpanEncoder> Encodable<E> for SyntaxContext {
1413 fn encode(&self, s: &mut E) {
1414 s.encode_syntax_context(*self)
1415 }
1416}
1417
1418impl<E: SpanEncoder> Encodable<E> for CrateNum {
1419 fn encode(&self, s: &mut E) {
1420 s.encode_crate_num(*self)
1421 }
1422}
1423
1424impl<E: SpanEncoder> Encodable<E> for DefIndex {
1425 fn encode(&self, s: &mut E) {
1426 s.encode_def_index(*self)
1427 }
1428}
1429
1430impl<E: SpanEncoder> Encodable<E> for DefId {
1431 fn encode(&self, s: &mut E) {
1432 s.encode_def_id(*self)
1433 }
1434}
1435
1436impl<E: SpanEncoder> Encodable<E> for AttrId {
1437 fn encode(&self, _s: &mut E) {
1438 }
1440}
1441
1442pub trait BlobDecoder: Decoder {
1443 fn decode_symbol(&mut self) -> Symbol;
1444 fn decode_byte_symbol(&mut self) -> ByteSymbol;
1445 fn decode_def_index(&mut self) -> DefIndex;
1446}
1447
1448pub trait SpanDecoder: BlobDecoder {
1465 fn decode_span(&mut self) -> Span;
1466 fn decode_expn_id(&mut self) -> ExpnId;
1467 fn decode_syntax_context(&mut self) -> SyntaxContext;
1468 fn decode_crate_num(&mut self) -> CrateNum;
1469 fn decode_def_id(&mut self) -> DefId;
1470 fn decode_attr_id(&mut self) -> AttrId;
1471}
1472
1473impl BlobDecoder for MemDecoder<'_> {
1474 fn decode_symbol(&mut self) -> Symbol {
1475 Symbol::intern(self.read_str())
1476 }
1477
1478 fn decode_byte_symbol(&mut self) -> ByteSymbol {
1479 ByteSymbol::intern(self.read_byte_str())
1480 }
1481
1482 fn decode_def_index(&mut self) -> DefIndex {
1483 {
::core::panicking::panic_fmt(format_args!("cannot decode `DefIndex` with `MemDecoder`"));
};panic!("cannot decode `DefIndex` with `MemDecoder`");
1484 }
1485}
1486
1487impl SpanDecoder for MemDecoder<'_> {
1488 fn decode_span(&mut self) -> Span {
1489 let lo = Decodable::decode(self);
1490 let hi = Decodable::decode(self);
1491
1492 Span::new(lo, hi, SyntaxContext::root(), None)
1493 }
1494
1495 fn decode_expn_id(&mut self) -> ExpnId {
1496 {
::core::panicking::panic_fmt(format_args!("cannot decode `ExpnId` with `MemDecoder`"));
};panic!("cannot decode `ExpnId` with `MemDecoder`");
1497 }
1498
1499 fn decode_syntax_context(&mut self) -> SyntaxContext {
1500 {
::core::panicking::panic_fmt(format_args!("cannot decode `SyntaxContext` with `MemDecoder`"));
};panic!("cannot decode `SyntaxContext` with `MemDecoder`");
1501 }
1502
1503 fn decode_crate_num(&mut self) -> CrateNum {
1504 CrateNum::from_u32(self.read_u32())
1505 }
1506
1507 fn decode_def_id(&mut self) -> DefId {
1508 DefId { krate: Decodable::decode(self), index: Decodable::decode(self) }
1509 }
1510
1511 fn decode_attr_id(&mut self) -> AttrId {
1512 {
::core::panicking::panic_fmt(format_args!("cannot decode `AttrId` with `MemDecoder`"));
};panic!("cannot decode `AttrId` with `MemDecoder`");
1513 }
1514}
1515
1516impl<D: SpanDecoder> Decodable<D> for Span {
1517 fn decode(s: &mut D) -> Span {
1518 s.decode_span()
1519 }
1520}
1521
1522impl<D: BlobDecoder> Decodable<D> for Symbol {
1523 fn decode(s: &mut D) -> Symbol {
1524 s.decode_symbol()
1525 }
1526}
1527
1528impl<D: BlobDecoder> Decodable<D> for ByteSymbol {
1529 fn decode(s: &mut D) -> ByteSymbol {
1530 s.decode_byte_symbol()
1531 }
1532}
1533
1534impl<D: SpanDecoder> Decodable<D> for ExpnId {
1535 fn decode(s: &mut D) -> ExpnId {
1536 s.decode_expn_id()
1537 }
1538}
1539
1540impl<D: SpanDecoder> Decodable<D> for SyntaxContext {
1541 fn decode(s: &mut D) -> SyntaxContext {
1542 s.decode_syntax_context()
1543 }
1544}
1545
1546impl<D: SpanDecoder> Decodable<D> for CrateNum {
1547 fn decode(s: &mut D) -> CrateNum {
1548 s.decode_crate_num()
1549 }
1550}
1551
1552impl<D: BlobDecoder> Decodable<D> for DefIndex {
1553 fn decode(s: &mut D) -> DefIndex {
1554 s.decode_def_index()
1555 }
1556}
1557
1558impl<D: SpanDecoder> Decodable<D> for DefId {
1559 fn decode(s: &mut D) -> DefId {
1560 s.decode_def_id()
1561 }
1562}
1563
1564impl<D: SpanDecoder> Decodable<D> for AttrId {
1565 fn decode(s: &mut D) -> AttrId {
1566 s.decode_attr_id()
1567 }
1568}
1569
1570impl fmt::Debug for Span {
1571 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1572 fn fallback(span: Span, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1576 f.debug_struct("Span")
1577 .field("lo", &span.lo())
1578 .field("hi", &span.hi())
1579 .field("ctxt", &span.ctxt())
1580 .finish()
1581 }
1582
1583 if SESSION_GLOBALS.is_set() {
1584 with_session_globals(|session_globals| {
1585 if let Some(source_map) = &session_globals.source_map {
1586 f.write_fmt(format_args!("{0} ({1:?})",
source_map.span_to_diagnostic_string(*self), self.ctxt()))write!(f, "{} ({:?})", source_map.span_to_diagnostic_string(*self), self.ctxt())
1587 } else {
1588 fallback(*self, f)
1589 }
1590 })
1591 } else {
1592 fallback(*self, f)
1593 }
1594 }
1595}
1596
1597impl fmt::Debug for SpanData {
1598 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1599 fmt::Debug::fmt(&self.span(), f)
1600 }
1601}
1602
1603#[derive(#[automatically_derived]
impl ::core::marker::Copy for MultiByteChar { }Copy, #[automatically_derived]
impl ::core::clone::Clone for MultiByteChar {
#[inline]
fn clone(&self) -> MultiByteChar {
let _: ::core::clone::AssertParamIsClone<RelativeBytePos>;
let _: ::core::clone::AssertParamIsClone<u8>;
*self
}
}Clone, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for MultiByteChar {
fn encode(&self, __encoder: &mut __E) {
match *self {
MultiByteChar { pos: ref __binding_0, bytes: ref __binding_1
} => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for MultiByteChar {
fn decode(__decoder: &mut __D) -> Self {
MultiByteChar {
pos: ::rustc_serialize::Decodable::decode(__decoder),
bytes: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, #[automatically_derived]
impl ::core::cmp::Eq for MultiByteChar {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<RelativeBytePos>;
let _: ::core::cmp::AssertParamIsEq<u8>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for MultiByteChar {
#[inline]
fn eq(&self, other: &MultiByteChar) -> bool {
self.bytes == other.bytes && self.pos == other.pos
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for MultiByteChar {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "MultiByteChar",
"pos", &self.pos, "bytes", &&self.bytes)
}
}Debug, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for MultiByteChar where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
MultiByteChar { pos: ref __binding_0, bytes: ref __binding_1
} => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
1605pub struct MultiByteChar {
1606 pub pos: RelativeBytePos,
1608 pub bytes: u8,
1610}
1611
1612#[derive(#[automatically_derived]
impl ::core::marker::Copy for NormalizedPos { }Copy, #[automatically_derived]
impl ::core::clone::Clone for NormalizedPos {
#[inline]
fn clone(&self) -> NormalizedPos {
let _: ::core::clone::AssertParamIsClone<RelativeBytePos>;
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
}Clone, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for NormalizedPos {
fn encode(&self, __encoder: &mut __E) {
match *self {
NormalizedPos { pos: ref __binding_0, diff: ref __binding_1
} => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for NormalizedPos {
fn decode(__decoder: &mut __D) -> Self {
NormalizedPos {
pos: ::rustc_serialize::Decodable::decode(__decoder),
diff: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, #[automatically_derived]
impl ::core::cmp::Eq for NormalizedPos {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<RelativeBytePos>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for NormalizedPos {
#[inline]
fn eq(&self, other: &NormalizedPos) -> bool {
self.diff == other.diff && self.pos == other.pos
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for NormalizedPos {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "NormalizedPos",
"pos", &self.pos, "diff", &&self.diff)
}
}Debug, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for NormalizedPos where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
NormalizedPos { pos: ref __binding_0, diff: ref __binding_1
} => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
1614pub struct NormalizedPos {
1615 pub pos: RelativeBytePos,
1617 pub diff: u32,
1619}
1620
1621#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for ExternalSource {
#[inline]
fn eq(&self, other: &ExternalSource) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(ExternalSource::Foreign {
kind: __self_0, metadata_index: __self_1 },
ExternalSource::Foreign {
kind: __arg1_0, metadata_index: __arg1_1 }) =>
__self_1 == __arg1_1 && __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ExternalSource {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<ExternalSourceKind>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}Eq, #[automatically_derived]
impl ::core::clone::Clone for ExternalSource {
#[inline]
fn clone(&self) -> ExternalSource {
match self {
ExternalSource::Unneeded => ExternalSource::Unneeded,
ExternalSource::Foreign { kind: __self_0, metadata_index: __self_1
} =>
ExternalSource::Foreign {
kind: ::core::clone::Clone::clone(__self_0),
metadata_index: ::core::clone::Clone::clone(__self_1),
},
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ExternalSource {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
ExternalSource::Unneeded =>
::core::fmt::Formatter::write_str(f, "Unneeded"),
ExternalSource::Foreign { kind: __self_0, metadata_index: __self_1
} =>
::core::fmt::Formatter::debug_struct_field2_finish(f,
"Foreign", "kind", __self_0, "metadata_index", &__self_1),
}
}
}Debug)]
1622pub enum ExternalSource {
1623 Unneeded,
1625 Foreign {
1626 kind: ExternalSourceKind,
1627 metadata_index: u32,
1629 },
1630}
1631
1632#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for ExternalSourceKind {
#[inline]
fn eq(&self, other: &ExternalSourceKind) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(ExternalSourceKind::Present(__self_0),
ExternalSourceKind::Present(__arg1_0)) =>
__self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ExternalSourceKind {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Arc<String>>;
}
}Eq, #[automatically_derived]
impl ::core::clone::Clone for ExternalSourceKind {
#[inline]
fn clone(&self) -> ExternalSourceKind {
match self {
ExternalSourceKind::Present(__self_0) =>
ExternalSourceKind::Present(::core::clone::Clone::clone(__self_0)),
ExternalSourceKind::AbsentOk => ExternalSourceKind::AbsentOk,
ExternalSourceKind::AbsentErr => ExternalSourceKind::AbsentErr,
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ExternalSourceKind {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
ExternalSourceKind::Present(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Present", &__self_0),
ExternalSourceKind::AbsentOk =>
::core::fmt::Formatter::write_str(f, "AbsentOk"),
ExternalSourceKind::AbsentErr =>
::core::fmt::Formatter::write_str(f, "AbsentErr"),
}
}
}Debug)]
1634pub enum ExternalSourceKind {
1635 Present(Arc<String>),
1637 AbsentOk,
1639 AbsentErr,
1641}
1642
1643impl ExternalSource {
1644 pub fn get_source(&self) -> Option<&str> {
1645 match self {
1646 ExternalSource::Foreign { kind: ExternalSourceKind::Present(src), .. } => Some(src),
1647 _ => None,
1648 }
1649 }
1650}
1651
1652#[derive(#[automatically_derived]
impl ::core::fmt::Debug for OffsetOverflowError {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f, "OffsetOverflowError")
}
}Debug)]
1653pub struct OffsetOverflowError;
1654
1655#[derive(#[automatically_derived]
impl ::core::marker::Copy for SourceFileHashAlgorithm { }Copy, #[automatically_derived]
impl ::core::clone::Clone for SourceFileHashAlgorithm {
#[inline]
fn clone(&self) -> SourceFileHashAlgorithm { *self }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for SourceFileHashAlgorithm {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
SourceFileHashAlgorithm::Md5 => "Md5",
SourceFileHashAlgorithm::Sha1 => "Sha1",
SourceFileHashAlgorithm::Sha256 => "Sha256",
SourceFileHashAlgorithm::Blake3 => "Blake3",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for SourceFileHashAlgorithm {
#[inline]
fn eq(&self, other: &SourceFileHashAlgorithm) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SourceFileHashAlgorithm {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialOrd for SourceFileHashAlgorithm {
#[inline]
fn partial_cmp(&self, other: &SourceFileHashAlgorithm)
-> ::core::option::Option<::core::cmp::Ordering> {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
::core::cmp::PartialOrd::partial_cmp(&__self_discr, &__arg1_discr)
}
}PartialOrd, #[automatically_derived]
impl ::core::cmp::Ord for SourceFileHashAlgorithm {
#[inline]
fn cmp(&self, other: &SourceFileHashAlgorithm) -> ::core::cmp::Ordering {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
::core::cmp::Ord::cmp(&__self_discr, &__arg1_discr)
}
}Ord, #[automatically_derived]
impl ::core::hash::Hash for SourceFileHashAlgorithm {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state)
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for SourceFileHashAlgorithm {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
SourceFileHashAlgorithm::Md5 => { 0usize }
SourceFileHashAlgorithm::Sha1 => { 1usize }
SourceFileHashAlgorithm::Sha256 => { 2usize }
SourceFileHashAlgorithm::Blake3 => { 3usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
SourceFileHashAlgorithm::Md5 => {}
SourceFileHashAlgorithm::Sha1 => {}
SourceFileHashAlgorithm::Sha256 => {}
SourceFileHashAlgorithm::Blake3 => {}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for SourceFileHashAlgorithm {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => { SourceFileHashAlgorithm::Md5 }
1usize => { SourceFileHashAlgorithm::Sha1 }
2usize => { SourceFileHashAlgorithm::Sha256 }
3usize => { SourceFileHashAlgorithm::Blake3 }
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `SourceFileHashAlgorithm`, expected 0..4, actual {0}",
n));
}
}
}
}
};Decodable)]
1656#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for SourceFileHashAlgorithm where __CTX: crate::HashStableContext
{
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
SourceFileHashAlgorithm::Md5 => {}
SourceFileHashAlgorithm::Sha1 => {}
SourceFileHashAlgorithm::Sha256 => {}
SourceFileHashAlgorithm::Blake3 => {}
}
}
}
};HashStable_Generic)]
1657pub enum SourceFileHashAlgorithm {
1658 Md5,
1659 Sha1,
1660 Sha256,
1661 Blake3,
1662}
1663
1664impl Display for SourceFileHashAlgorithm {
1665 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1666 f.write_str(match self {
1667 Self::Md5 => "md5",
1668 Self::Sha1 => "sha1",
1669 Self::Sha256 => "sha256",
1670 Self::Blake3 => "blake3",
1671 })
1672 }
1673}
1674
1675impl FromStr for SourceFileHashAlgorithm {
1676 type Err = ();
1677
1678 fn from_str(s: &str) -> Result<SourceFileHashAlgorithm, ()> {
1679 match s {
1680 "md5" => Ok(SourceFileHashAlgorithm::Md5),
1681 "sha1" => Ok(SourceFileHashAlgorithm::Sha1),
1682 "sha256" => Ok(SourceFileHashAlgorithm::Sha256),
1683 "blake3" => Ok(SourceFileHashAlgorithm::Blake3),
1684 _ => Err(()),
1685 }
1686 }
1687}
1688
1689#[derive(#[automatically_derived]
impl ::core::marker::Copy for SourceFileHash { }Copy, #[automatically_derived]
impl ::core::clone::Clone for SourceFileHash {
#[inline]
fn clone(&self) -> SourceFileHash {
let _: ::core::clone::AssertParamIsClone<SourceFileHashAlgorithm>;
let _: ::core::clone::AssertParamIsClone<[u8; 32]>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for SourceFileHash {
#[inline]
fn eq(&self, other: &SourceFileHash) -> bool {
self.kind == other.kind && self.value == other.value
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SourceFileHash {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<SourceFileHashAlgorithm>;
let _: ::core::cmp::AssertParamIsEq<[u8; 32]>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for SourceFileHash {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"SourceFileHash", "kind", &self.kind, "value", &&self.value)
}
}Debug, #[automatically_derived]
impl ::core::hash::Hash for SourceFileHash {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.kind, state);
::core::hash::Hash::hash(&self.value, state)
}
}Hash)]
1691#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for SourceFileHash where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
SourceFileHash {
kind: ref __binding_0, value: ref __binding_1 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for SourceFileHash {
fn encode(&self, __encoder: &mut __E) {
match *self {
SourceFileHash {
kind: ref __binding_0, value: ref __binding_1 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for SourceFileHash {
fn decode(__decoder: &mut __D) -> Self {
SourceFileHash {
kind: ::rustc_serialize::Decodable::decode(__decoder),
value: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable)]
1692pub struct SourceFileHash {
1693 pub kind: SourceFileHashAlgorithm,
1694 value: [u8; 32],
1695}
1696
1697impl Display for SourceFileHash {
1698 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1699 f.write_fmt(format_args!("{0}=", self.kind))write!(f, "{}=", self.kind)?;
1700 for byte in self.value[0..self.hash_len()].into_iter() {
1701 f.write_fmt(format_args!("{0:02x}", byte))write!(f, "{byte:02x}")?;
1702 }
1703 Ok(())
1704 }
1705}
1706
1707impl SourceFileHash {
1708 pub fn new_in_memory(kind: SourceFileHashAlgorithm, src: impl AsRef<[u8]>) -> SourceFileHash {
1709 let mut hash = SourceFileHash { kind, value: Default::default() };
1710 let len = hash.hash_len();
1711 let value = &mut hash.value[..len];
1712 let data = src.as_ref();
1713 match kind {
1714 SourceFileHashAlgorithm::Md5 => {
1715 value.copy_from_slice(&Md5::digest(data));
1716 }
1717 SourceFileHashAlgorithm::Sha1 => {
1718 value.copy_from_slice(&Sha1::digest(data));
1719 }
1720 SourceFileHashAlgorithm::Sha256 => {
1721 value.copy_from_slice(&Sha256::digest(data));
1722 }
1723 SourceFileHashAlgorithm::Blake3 => value.copy_from_slice(blake3::hash(data).as_bytes()),
1724 };
1725 hash
1726 }
1727
1728 pub fn new(kind: SourceFileHashAlgorithm, src: impl Read) -> Result<SourceFileHash, io::Error> {
1729 let mut hash = SourceFileHash { kind, value: Default::default() };
1730 let len = hash.hash_len();
1731 let value = &mut hash.value[..len];
1732 let mut buf = ::alloc::vec::from_elem(0, 16 * 1024)vec![0; 16 * 1024];
1735
1736 fn digest<T>(
1737 mut hasher: T,
1738 mut update: impl FnMut(&mut T, &[u8]),
1739 finish: impl FnOnce(T, &mut [u8]),
1740 mut src: impl Read,
1741 buf: &mut [u8],
1742 value: &mut [u8],
1743 ) -> Result<(), io::Error> {
1744 loop {
1745 let bytes_read = src.read(buf)?;
1746 if bytes_read == 0 {
1747 break;
1748 }
1749 update(&mut hasher, &buf[0..bytes_read]);
1750 }
1751 finish(hasher, value);
1752 Ok(())
1753 }
1754
1755 match kind {
1756 SourceFileHashAlgorithm::Sha256 => {
1757 digest(
1758 Sha256::new(),
1759 |h, b| {
1760 h.update(b);
1761 },
1762 |h, out| out.copy_from_slice(&h.finalize()),
1763 src,
1764 &mut buf,
1765 value,
1766 )?;
1767 }
1768 SourceFileHashAlgorithm::Sha1 => {
1769 digest(
1770 Sha1::new(),
1771 |h, b| {
1772 h.update(b);
1773 },
1774 |h, out| out.copy_from_slice(&h.finalize()),
1775 src,
1776 &mut buf,
1777 value,
1778 )?;
1779 }
1780 SourceFileHashAlgorithm::Md5 => {
1781 digest(
1782 Md5::new(),
1783 |h, b| {
1784 h.update(b);
1785 },
1786 |h, out| out.copy_from_slice(&h.finalize()),
1787 src,
1788 &mut buf,
1789 value,
1790 )?;
1791 }
1792 SourceFileHashAlgorithm::Blake3 => {
1793 digest(
1794 blake3::Hasher::new(),
1795 |h, b| {
1796 h.update(b);
1797 },
1798 |h, out| out.copy_from_slice(h.finalize().as_bytes()),
1799 src,
1800 &mut buf,
1801 value,
1802 )?;
1803 }
1804 }
1805 Ok(hash)
1806 }
1807
1808 pub fn matches(&self, src: &str) -> bool {
1810 Self::new_in_memory(self.kind, src.as_bytes()) == *self
1811 }
1812
1813 pub fn hash_bytes(&self) -> &[u8] {
1815 let len = self.hash_len();
1816 &self.value[..len]
1817 }
1818
1819 fn hash_len(&self) -> usize {
1820 match self.kind {
1821 SourceFileHashAlgorithm::Md5 => 16,
1822 SourceFileHashAlgorithm::Sha1 => 20,
1823 SourceFileHashAlgorithm::Sha256 | SourceFileHashAlgorithm::Blake3 => 32,
1824 }
1825 }
1826}
1827
1828#[derive(#[automatically_derived]
impl ::core::clone::Clone for SourceFileLines {
#[inline]
fn clone(&self) -> SourceFileLines {
match self {
SourceFileLines::Lines(__self_0) =>
SourceFileLines::Lines(::core::clone::Clone::clone(__self_0)),
SourceFileLines::Diffs(__self_0) =>
SourceFileLines::Diffs(::core::clone::Clone::clone(__self_0)),
}
}
}Clone)]
1829pub enum SourceFileLines {
1830 Lines(Vec<RelativeBytePos>),
1832
1833 Diffs(SourceFileDiffs),
1835}
1836
1837impl SourceFileLines {
1838 pub fn is_lines(&self) -> bool {
1839 #[allow(non_exhaustive_omitted_patterns)] match self {
SourceFileLines::Lines(_) => true,
_ => false,
}matches!(self, SourceFileLines::Lines(_))
1840 }
1841}
1842
1843#[derive(#[automatically_derived]
impl ::core::clone::Clone for SourceFileDiffs {
#[inline]
fn clone(&self) -> SourceFileDiffs {
SourceFileDiffs {
bytes_per_diff: ::core::clone::Clone::clone(&self.bytes_per_diff),
num_diffs: ::core::clone::Clone::clone(&self.num_diffs),
raw_diffs: ::core::clone::Clone::clone(&self.raw_diffs),
}
}
}Clone)]
1851pub struct SourceFileDiffs {
1852 bytes_per_diff: usize,
1856
1857 num_diffs: usize,
1860
1861 raw_diffs: Vec<u8>,
1867}
1868
1869pub struct SourceFile {
1871 pub name: FileName,
1875 pub src: Option<Arc<String>>,
1877 pub src_hash: SourceFileHash,
1879 pub checksum_hash: Option<SourceFileHash>,
1883 pub external_src: FreezeLock<ExternalSource>,
1886 pub start_pos: BytePos,
1888 pub normalized_source_len: RelativeBytePos,
1890 pub unnormalized_source_len: u32,
1892 pub lines: FreezeLock<SourceFileLines>,
1894 pub multibyte_chars: Vec<MultiByteChar>,
1896 pub normalized_pos: Vec<NormalizedPos>,
1898 pub stable_id: StableSourceFileId,
1902 pub cnum: CrateNum,
1904}
1905
1906impl Clone for SourceFile {
1907 fn clone(&self) -> Self {
1908 Self {
1909 name: self.name.clone(),
1910 src: self.src.clone(),
1911 src_hash: self.src_hash,
1912 checksum_hash: self.checksum_hash,
1913 external_src: self.external_src.clone(),
1914 start_pos: self.start_pos,
1915 normalized_source_len: self.normalized_source_len,
1916 unnormalized_source_len: self.unnormalized_source_len,
1917 lines: self.lines.clone(),
1918 multibyte_chars: self.multibyte_chars.clone(),
1919 normalized_pos: self.normalized_pos.clone(),
1920 stable_id: self.stable_id,
1921 cnum: self.cnum,
1922 }
1923 }
1924}
1925
1926impl<S: SpanEncoder> Encodable<S> for SourceFile {
1927 fn encode(&self, s: &mut S) {
1928 self.name.encode(s);
1929 self.src_hash.encode(s);
1930 self.checksum_hash.encode(s);
1931 self.normalized_source_len.encode(s);
1933 self.unnormalized_source_len.encode(s);
1934
1935 if !self.lines.read().is_lines() {
::core::panicking::panic("assertion failed: self.lines.read().is_lines()")
};assert!(self.lines.read().is_lines());
1937 let lines = self.lines();
1938 s.emit_u32(lines.len() as u32);
1940
1941 if lines.len() != 0 {
1943 let max_line_length = if lines.len() == 1 {
1944 0
1945 } else {
1946 lines
1947 .array_windows()
1948 .map(|&[fst, snd]| snd - fst)
1949 .map(|bp| bp.to_usize())
1950 .max()
1951 .unwrap()
1952 };
1953
1954 let bytes_per_diff: usize = match max_line_length {
1955 0..=0xFF => 1,
1956 0x100..=0xFFFF => 2,
1957 _ => 4,
1958 };
1959
1960 s.emit_u8(bytes_per_diff as u8);
1962
1963 match (&lines[0], &RelativeBytePos(0)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(lines[0], RelativeBytePos(0));
1965
1966 let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
1968 let num_diffs = lines.len() - 1;
1969 let mut raw_diffs;
1970 match bytes_per_diff {
1971 1 => {
1972 raw_diffs = Vec::with_capacity(num_diffs);
1973 for diff in diff_iter {
1974 raw_diffs.push(diff.0 as u8);
1975 }
1976 }
1977 2 => {
1978 raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
1979 for diff in diff_iter {
1980 raw_diffs.extend_from_slice(&(diff.0 as u16).to_le_bytes());
1981 }
1982 }
1983 4 => {
1984 raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
1985 for diff in diff_iter {
1986 raw_diffs.extend_from_slice(&(diff.0).to_le_bytes());
1987 }
1988 }
1989 _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
1990 }
1991 s.emit_raw_bytes(&raw_diffs);
1992 }
1993
1994 self.multibyte_chars.encode(s);
1995 self.stable_id.encode(s);
1996 self.normalized_pos.encode(s);
1997 self.cnum.encode(s);
1998 }
1999}
2000
2001impl<D: SpanDecoder> Decodable<D> for SourceFile {
2002 fn decode(d: &mut D) -> SourceFile {
2003 let name: FileName = Decodable::decode(d);
2004 let src_hash: SourceFileHash = Decodable::decode(d);
2005 let checksum_hash: Option<SourceFileHash> = Decodable::decode(d);
2006 let normalized_source_len: RelativeBytePos = Decodable::decode(d);
2007 let unnormalized_source_len = Decodable::decode(d);
2008 let lines = {
2009 let num_lines: u32 = Decodable::decode(d);
2010 if num_lines > 0 {
2011 let bytes_per_diff = d.read_u8() as usize;
2013
2014 let num_diffs = num_lines as usize - 1;
2016 let raw_diffs = d.read_raw_bytes(bytes_per_diff * num_diffs).to_vec();
2017 SourceFileLines::Diffs(SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs })
2018 } else {
2019 SourceFileLines::Lines(::alloc::vec::Vec::new()vec![])
2020 }
2021 };
2022 let multibyte_chars: Vec<MultiByteChar> = Decodable::decode(d);
2023 let stable_id = Decodable::decode(d);
2024 let normalized_pos: Vec<NormalizedPos> = Decodable::decode(d);
2025 let cnum: CrateNum = Decodable::decode(d);
2026 SourceFile {
2027 name,
2028 start_pos: BytePos::from_u32(0),
2029 normalized_source_len,
2030 unnormalized_source_len,
2031 src: None,
2032 src_hash,
2033 checksum_hash,
2034 external_src: FreezeLock::frozen(ExternalSource::Unneeded),
2037 lines: FreezeLock::new(lines),
2038 multibyte_chars,
2039 normalized_pos,
2040 stable_id,
2041 cnum,
2042 }
2043 }
2044}
2045
2046impl fmt::Debug for SourceFile {
2047 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
2048 fmt.write_fmt(format_args!("SourceFile({0:?})", self.name))write!(fmt, "SourceFile({:?})", self.name)
2049 }
2050}
2051
2052#[derive(
2074 #[automatically_derived]
impl ::core::fmt::Debug for StableSourceFileId {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"StableSourceFileId", &&self.0)
}
}Debug,
2075 #[automatically_derived]
impl ::core::clone::Clone for StableSourceFileId {
#[inline]
fn clone(&self) -> StableSourceFileId {
let _: ::core::clone::AssertParamIsClone<Hash128>;
*self
}
}Clone,
2076 #[automatically_derived]
impl ::core::marker::Copy for StableSourceFileId { }Copy,
2077 #[automatically_derived]
impl ::core::hash::Hash for StableSourceFileId {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.0, state)
}
}Hash,
2078 #[automatically_derived]
impl ::core::cmp::PartialEq for StableSourceFileId {
#[inline]
fn eq(&self, other: &StableSourceFileId) -> bool { self.0 == other.0 }
}PartialEq,
2079 #[automatically_derived]
impl ::core::cmp::Eq for StableSourceFileId {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Hash128>;
}
}Eq,
2080 const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for StableSourceFileId where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
StableSourceFileId(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic,
2081 const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for StableSourceFileId {
fn encode(&self, __encoder: &mut __E) {
match *self {
StableSourceFileId(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable,
2082 const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for StableSourceFileId {
fn decode(__decoder: &mut __D) -> Self {
StableSourceFileId(::rustc_serialize::Decodable::decode(__decoder))
}
}
};Decodable,
2083 #[automatically_derived]
impl ::core::default::Default for StableSourceFileId {
#[inline]
fn default() -> StableSourceFileId {
StableSourceFileId(::core::default::Default::default())
}
}Default,
2084 #[automatically_derived]
impl ::core::cmp::PartialOrd for StableSourceFileId {
#[inline]
fn partial_cmp(&self, other: &StableSourceFileId)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}PartialOrd,
2085 #[automatically_derived]
impl ::core::cmp::Ord for StableSourceFileId {
#[inline]
fn cmp(&self, other: &StableSourceFileId) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}Ord
2086)]
2087pub struct StableSourceFileId(Hash128);
2088
2089impl StableSourceFileId {
2090 fn from_filename_in_current_crate(filename: &FileName) -> Self {
2091 Self::from_filename_and_stable_crate_id(filename, None)
2092 }
2093
2094 pub fn from_filename_for_export(
2095 filename: &FileName,
2096 local_crate_stable_crate_id: StableCrateId,
2097 ) -> Self {
2098 Self::from_filename_and_stable_crate_id(filename, Some(local_crate_stable_crate_id))
2099 }
2100
2101 fn from_filename_and_stable_crate_id(
2102 filename: &FileName,
2103 stable_crate_id: Option<StableCrateId>,
2104 ) -> Self {
2105 let mut hasher = StableHasher::new();
2106 filename.hash(&mut hasher);
2107 stable_crate_id.hash(&mut hasher);
2108 StableSourceFileId(hasher.finish())
2109 }
2110}
2111
2112impl SourceFile {
2113 const MAX_FILE_SIZE: u32 = u32::MAX - 1;
2114
2115 pub fn new(
2116 name: FileName,
2117 mut src: String,
2118 hash_kind: SourceFileHashAlgorithm,
2119 checksum_hash_kind: Option<SourceFileHashAlgorithm>,
2120 ) -> Result<Self, OffsetOverflowError> {
2121 let src_hash = SourceFileHash::new_in_memory(hash_kind, src.as_bytes());
2123 let checksum_hash = checksum_hash_kind.map(|checksum_hash_kind| {
2124 if checksum_hash_kind == hash_kind {
2125 src_hash
2126 } else {
2127 SourceFileHash::new_in_memory(checksum_hash_kind, src.as_bytes())
2128 }
2129 });
2130 let unnormalized_source_len = u32::try_from(src.len()).map_err(|_| OffsetOverflowError)?;
2132 if unnormalized_source_len > Self::MAX_FILE_SIZE {
2133 return Err(OffsetOverflowError);
2134 }
2135
2136 let normalized_pos = normalize_src(&mut src);
2137
2138 let stable_id = StableSourceFileId::from_filename_in_current_crate(&name);
2139 let normalized_source_len = u32::try_from(src.len()).map_err(|_| OffsetOverflowError)?;
2140 if normalized_source_len > Self::MAX_FILE_SIZE {
2141 return Err(OffsetOverflowError);
2142 }
2143
2144 let (lines, multibyte_chars) = analyze_source_file::analyze_source_file(&src);
2145
2146 Ok(SourceFile {
2147 name,
2148 src: Some(Arc::new(src)),
2149 src_hash,
2150 checksum_hash,
2151 external_src: FreezeLock::frozen(ExternalSource::Unneeded),
2152 start_pos: BytePos::from_u32(0),
2153 normalized_source_len: RelativeBytePos::from_u32(normalized_source_len),
2154 unnormalized_source_len,
2155 lines: FreezeLock::frozen(SourceFileLines::Lines(lines)),
2156 multibyte_chars,
2157 normalized_pos,
2158 stable_id,
2159 cnum: LOCAL_CRATE,
2160 })
2161 }
2162
2163 fn convert_diffs_to_lines_frozen(&self) {
2166 let mut guard = if let Some(guard) = self.lines.try_write() { guard } else { return };
2167
2168 let SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs } = match &*guard {
2169 SourceFileLines::Diffs(diffs) => diffs,
2170 SourceFileLines::Lines(..) => {
2171 FreezeWriteGuard::freeze(guard);
2172 return;
2173 }
2174 };
2175
2176 let num_lines = num_diffs + 1;
2178 let mut lines = Vec::with_capacity(num_lines);
2179 let mut line_start = RelativeBytePos(0);
2180 lines.push(line_start);
2181
2182 match (&*num_diffs, &(raw_diffs.len() / bytes_per_diff)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(*num_diffs, raw_diffs.len() / bytes_per_diff);
2183 match bytes_per_diff {
2184 1 => {
2185 lines.extend(raw_diffs.into_iter().map(|&diff| {
2186 line_start = line_start + RelativeBytePos(diff as u32);
2187 line_start
2188 }));
2189 }
2190 2 => {
2191 lines.extend((0..*num_diffs).map(|i| {
2192 let pos = bytes_per_diff * i;
2193 let bytes = [raw_diffs[pos], raw_diffs[pos + 1]];
2194 let diff = u16::from_le_bytes(bytes);
2195 line_start = line_start + RelativeBytePos(diff as u32);
2196 line_start
2197 }));
2198 }
2199 4 => {
2200 lines.extend((0..*num_diffs).map(|i| {
2201 let pos = bytes_per_diff * i;
2202 let bytes = [
2203 raw_diffs[pos],
2204 raw_diffs[pos + 1],
2205 raw_diffs[pos + 2],
2206 raw_diffs[pos + 3],
2207 ];
2208 let diff = u32::from_le_bytes(bytes);
2209 line_start = line_start + RelativeBytePos(diff);
2210 line_start
2211 }));
2212 }
2213 _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
2214 }
2215
2216 *guard = SourceFileLines::Lines(lines);
2217
2218 FreezeWriteGuard::freeze(guard);
2219 }
2220
2221 pub fn lines(&self) -> &[RelativeBytePos] {
2222 if let Some(SourceFileLines::Lines(lines)) = self.lines.get() {
2223 return &lines[..];
2224 }
2225
2226 outline(|| {
2227 self.convert_diffs_to_lines_frozen();
2228 if let Some(SourceFileLines::Lines(lines)) = self.lines.get() {
2229 return &lines[..];
2230 }
2231 ::core::panicking::panic("internal error: entered unreachable code")unreachable!()
2232 })
2233 }
2234
2235 pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
2237 let pos = self.relative_position(pos);
2238 let line_index = self.lookup_line(pos).unwrap();
2239 let line_start_pos = self.lines()[line_index];
2240 self.absolute_position(line_start_pos)
2241 }
2242
2243 pub fn add_external_src<F>(&self, get_src: F) -> bool
2248 where
2249 F: FnOnce() -> Option<String>,
2250 {
2251 if !self.external_src.is_frozen() {
2252 let src = get_src();
2253 let src = src.and_then(|mut src| {
2254 self.src_hash.matches(&src).then(|| {
2256 normalize_src(&mut src);
2257 src
2258 })
2259 });
2260
2261 self.external_src.try_write().map(|mut external_src| {
2262 if let ExternalSource::Foreign {
2263 kind: src_kind @ ExternalSourceKind::AbsentOk,
2264 ..
2265 } = &mut *external_src
2266 {
2267 *src_kind = if let Some(src) = src {
2268 ExternalSourceKind::Present(Arc::new(src))
2269 } else {
2270 ExternalSourceKind::AbsentErr
2271 };
2272 } else {
2273 {
::core::panicking::panic_fmt(format_args!("unexpected state {0:?}",
*external_src));
}panic!("unexpected state {:?}", *external_src)
2274 }
2275
2276 FreezeWriteGuard::freeze(external_src)
2278 });
2279 }
2280
2281 self.src.is_some() || self.external_src.read().get_source().is_some()
2282 }
2283
2284 pub fn get_line(&self, line_number: usize) -> Option<Cow<'_, str>> {
2287 fn get_until_newline(src: &str, begin: usize) -> &str {
2288 let slice = &src[begin..];
2292 match slice.find('\n') {
2293 Some(e) => &slice[..e],
2294 None => slice,
2295 }
2296 }
2297
2298 let begin = {
2299 let line = self.lines().get(line_number).copied()?;
2300 line.to_usize()
2301 };
2302
2303 if let Some(ref src) = self.src {
2304 Some(Cow::from(get_until_newline(src, begin)))
2305 } else {
2306 self.external_src
2307 .borrow()
2308 .get_source()
2309 .map(|src| Cow::Owned(String::from(get_until_newline(src, begin))))
2310 }
2311 }
2312
2313 pub fn is_real_file(&self) -> bool {
2314 self.name.is_real()
2315 }
2316
2317 #[inline]
2318 pub fn is_imported(&self) -> bool {
2319 self.src.is_none()
2320 }
2321
2322 pub fn count_lines(&self) -> usize {
2323 self.lines().len()
2324 }
2325
2326 #[inline]
2327 pub fn absolute_position(&self, pos: RelativeBytePos) -> BytePos {
2328 BytePos::from_u32(pos.to_u32() + self.start_pos.to_u32())
2329 }
2330
2331 #[inline]
2332 pub fn relative_position(&self, pos: BytePos) -> RelativeBytePos {
2333 RelativeBytePos::from_u32(pos.to_u32() - self.start_pos.to_u32())
2334 }
2335
2336 #[inline]
2337 pub fn end_position(&self) -> BytePos {
2338 self.absolute_position(self.normalized_source_len)
2339 }
2340
2341 pub fn lookup_line(&self, pos: RelativeBytePos) -> Option<usize> {
2346 self.lines().partition_point(|x| x <= &pos).checked_sub(1)
2347 }
2348
2349 pub fn line_bounds(&self, line_index: usize) -> Range<BytePos> {
2350 if self.is_empty() {
2351 return self.start_pos..self.start_pos;
2352 }
2353
2354 let lines = self.lines();
2355 if !(line_index < lines.len()) {
::core::panicking::panic("assertion failed: line_index < lines.len()")
};assert!(line_index < lines.len());
2356 if line_index == (lines.len() - 1) {
2357 self.absolute_position(lines[line_index])..self.end_position()
2358 } else {
2359 self.absolute_position(lines[line_index])..self.absolute_position(lines[line_index + 1])
2360 }
2361 }
2362
2363 #[inline]
2368 pub fn contains(&self, byte_pos: BytePos) -> bool {
2369 byte_pos >= self.start_pos && byte_pos <= self.end_position()
2370 }
2371
2372 #[inline]
2373 pub fn is_empty(&self) -> bool {
2374 self.normalized_source_len.to_u32() == 0
2375 }
2376
2377 pub fn original_relative_byte_pos(&self, pos: BytePos) -> RelativeBytePos {
2380 let pos = self.relative_position(pos);
2381
2382 let diff = match self.normalized_pos.binary_search_by(|np| np.pos.cmp(&pos)) {
2386 Ok(i) => self.normalized_pos[i].diff,
2387 Err(0) => 0,
2388 Err(i) => self.normalized_pos[i - 1].diff,
2389 };
2390
2391 RelativeBytePos::from_u32(pos.0 + diff)
2392 }
2393
2394 pub fn normalized_byte_pos(&self, offset: u32) -> BytePos {
2404 let diff =
2405 match self.normalized_pos.binary_search_by(|np| (np.pos.0 + np.diff).cmp(&offset)) {
2406 Ok(i) => self.normalized_pos[i].diff,
2407 Err(0) => 0,
2408 Err(i) => self.normalized_pos[i - 1].diff,
2409 };
2410
2411 BytePos::from_u32(self.start_pos.0 + offset - diff)
2412 }
2413
2414 fn bytepos_to_file_charpos(&self, bpos: RelativeBytePos) -> CharPos {
2416 let mut total_extra_bytes = 0;
2418
2419 for mbc in self.multibyte_chars.iter() {
2420 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2420",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2420u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0}-byte char at {1:?}",
mbc.bytes, mbc.pos) as &dyn Value))])
});
} else { ; }
};debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
2421 if mbc.pos < bpos {
2422 total_extra_bytes += mbc.bytes as u32 - 1;
2425 if !(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32) {
::core::panicking::panic("assertion failed: bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32")
};assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
2428 } else {
2429 break;
2430 }
2431 }
2432
2433 if !(total_extra_bytes <= bpos.to_u32()) {
::core::panicking::panic("assertion failed: total_extra_bytes <= bpos.to_u32()")
};assert!(total_extra_bytes <= bpos.to_u32());
2434 CharPos(bpos.to_usize() - total_extra_bytes as usize)
2435 }
2436
2437 fn lookup_file_pos(&self, pos: RelativeBytePos) -> (usize, CharPos) {
2440 let chpos = self.bytepos_to_file_charpos(pos);
2441 match self.lookup_line(pos) {
2442 Some(a) => {
2443 let line = a + 1; let linebpos = self.lines()[a];
2445 let linechpos = self.bytepos_to_file_charpos(linebpos);
2446 let col = chpos - linechpos;
2447 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2447",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2447u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("byte pos {0:?} is on the line at byte pos {1:?}",
pos, linebpos) as &dyn Value))])
});
} else { ; }
};debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos);
2448 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2448",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2448u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("char pos {0:?} is on the line at char pos {1:?}",
chpos, linechpos) as &dyn Value))])
});
} else { ; }
};debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos);
2449 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2449",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2449u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("byte is on line: {0}",
line) as &dyn Value))])
});
} else { ; }
};debug!("byte is on line: {}", line);
2450 if !(chpos >= linechpos) {
::core::panicking::panic("assertion failed: chpos >= linechpos")
};assert!(chpos >= linechpos);
2451 (line, col)
2452 }
2453 None => (0, chpos),
2454 }
2455 }
2456
2457 pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) {
2460 let pos = self.relative_position(pos);
2461 let (line, col_or_chpos) = self.lookup_file_pos(pos);
2462 if line > 0 {
2463 let Some(code) = self.get_line(line - 1) else {
2464 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2471",
"rustc_span", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2471u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("couldn\'t find line {1} {0:?}",
self.name, line) as &dyn Value))])
});
} else { ; }
};tracing::info!("couldn't find line {line} {:?}", self.name);
2472 return (line, col_or_chpos, col_or_chpos.0);
2473 };
2474 let display_col = code.chars().take(col_or_chpos.0).map(|ch| char_width(ch)).sum();
2475 (line, col_or_chpos, display_col)
2476 } else {
2477 (0, col_or_chpos, col_or_chpos.0)
2479 }
2480 }
2481}
2482
2483pub fn char_width(ch: char) -> usize {
2484 match ch {
2487 '\t' => 4,
2488 '\u{0000}' | '\u{0001}' | '\u{0002}' | '\u{0003}' | '\u{0004}' | '\u{0005}'
2492 | '\u{0006}' | '\u{0007}' | '\u{0008}' | '\u{000B}' | '\u{000C}' | '\u{000D}'
2493 | '\u{000E}' | '\u{000F}' | '\u{0010}' | '\u{0011}' | '\u{0012}' | '\u{0013}'
2494 | '\u{0014}' | '\u{0015}' | '\u{0016}' | '\u{0017}' | '\u{0018}' | '\u{0019}'
2495 | '\u{001A}' | '\u{001B}' | '\u{001C}' | '\u{001D}' | '\u{001E}' | '\u{001F}'
2496 | '\u{007F}' | '\u{202A}' | '\u{202B}' | '\u{202D}' | '\u{202E}' | '\u{2066}'
2497 | '\u{2067}' | '\u{2068}' | '\u{202C}' | '\u{2069}' => 1,
2498 _ => unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1),
2499 }
2500}
2501
2502pub fn str_width(s: &str) -> usize {
2503 s.chars().map(char_width).sum()
2504}
2505
2506fn normalize_src(src: &mut String) -> Vec<NormalizedPos> {
2508 let mut normalized_pos = ::alloc::vec::Vec::new()vec![];
2509 remove_bom(src, &mut normalized_pos);
2510 normalize_newlines(src, &mut normalized_pos);
2511 normalized_pos
2512}
2513
2514fn remove_bom(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
2516 if src.starts_with('\u{feff}') {
2517 src.drain(..3);
2518 normalized_pos.push(NormalizedPos { pos: RelativeBytePos(0), diff: 3 });
2519 }
2520}
2521
2522fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
2526 if !src.as_bytes().contains(&b'\r') {
2527 return;
2528 }
2529
2530 let mut buf = std::mem::replace(src, String::new()).into_bytes();
2536 let mut gap_len = 0;
2537 let mut tail = buf.as_mut_slice();
2538 let mut cursor = 0;
2539 let original_gap = normalized_pos.last().map_or(0, |l| l.diff);
2540 loop {
2541 let idx = match find_crlf(&tail[gap_len..]) {
2542 None => tail.len(),
2543 Some(idx) => idx + gap_len,
2544 };
2545 tail.copy_within(gap_len..idx, 0);
2546 tail = &mut tail[idx - gap_len..];
2547 if tail.len() == gap_len {
2548 break;
2549 }
2550 cursor += idx - gap_len;
2551 gap_len += 1;
2552 normalized_pos.push(NormalizedPos {
2553 pos: RelativeBytePos::from_usize(cursor + 1),
2554 diff: original_gap + gap_len as u32,
2555 });
2556 }
2557
2558 let new_len = buf.len() - gap_len;
2561 unsafe {
2562 buf.set_len(new_len);
2563 *src = String::from_utf8_unchecked(buf);
2564 }
2565
2566 fn find_crlf(src: &[u8]) -> Option<usize> {
2567 let mut search_idx = 0;
2568 while let Some(idx) = find_cr(&src[search_idx..]) {
2569 if src[search_idx..].get(idx + 1) != Some(&b'\n') {
2570 search_idx += idx + 1;
2571 continue;
2572 }
2573 return Some(search_idx + idx);
2574 }
2575 None
2576 }
2577
2578 fn find_cr(src: &[u8]) -> Option<usize> {
2579 src.iter().position(|&b| b == b'\r')
2580 }
2581}
2582
2583pub trait Pos {
2588 fn from_usize(n: usize) -> Self;
2589 fn to_usize(&self) -> usize;
2590 fn from_u32(n: u32) -> Self;
2591 fn to_u32(&self) -> u32;
2592}
2593
2594macro_rules! impl_pos {
2595 (
2596 $(
2597 $(#[$attr:meta])*
2598 $vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty);
2599 )*
2600 ) => {
2601 $(
2602 $(#[$attr])*
2603 $vis struct $ident($inner_vis $inner_ty);
2604
2605 impl Pos for $ident {
2606 #[inline(always)]
2607 fn from_usize(n: usize) -> $ident {
2608 $ident(n as $inner_ty)
2609 }
2610
2611 #[inline(always)]
2612 fn to_usize(&self) -> usize {
2613 self.0 as usize
2614 }
2615
2616 #[inline(always)]
2617 fn from_u32(n: u32) -> $ident {
2618 $ident(n as $inner_ty)
2619 }
2620
2621 #[inline(always)]
2622 fn to_u32(&self) -> u32 {
2623 self.0 as u32
2624 }
2625 }
2626
2627 impl Add for $ident {
2628 type Output = $ident;
2629
2630 #[inline(always)]
2631 fn add(self, rhs: $ident) -> $ident {
2632 $ident(self.0 + rhs.0)
2633 }
2634 }
2635
2636 impl Sub for $ident {
2637 type Output = $ident;
2638
2639 #[inline(always)]
2640 fn sub(self, rhs: $ident) -> $ident {
2641 $ident(self.0 - rhs.0)
2642 }
2643 }
2644 )*
2645 };
2646}
2647
2648#[doc = r" A character offset."]
#[doc = r""]
#[doc = r" Because of multibyte UTF-8 characters, a byte offset"]
#[doc =
r" is not equivalent to a character offset. The [`SourceMap`] will convert [`BytePos`]"]
#[doc = r" values to `CharPos` values as necessary."]
pub struct CharPos(pub usize);
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for CharPos { }
#[automatically_derived]
impl ::core::clone::Clone for CharPos {
#[inline]
fn clone(&self) -> CharPos {
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for CharPos { }
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for CharPos { }
#[automatically_derived]
impl ::core::cmp::PartialEq for CharPos {
#[inline]
fn eq(&self, other: &CharPos) -> bool { self.0 == other.0 }
}
#[automatically_derived]
impl ::core::cmp::Eq for CharPos {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<usize>;
}
}
#[automatically_derived]
impl ::core::cmp::PartialOrd for CharPos {
#[inline]
fn partial_cmp(&self, other: &CharPos)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::cmp::Ord for CharPos {
#[inline]
fn cmp(&self, other: &CharPos) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::fmt::Debug for CharPos {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "CharPos",
&&self.0)
}
}
impl Pos for CharPos {
#[inline(always)]
fn from_usize(n: usize) -> CharPos { CharPos(n as usize) }
#[inline(always)]
fn to_usize(&self) -> usize { self.0 as usize }
#[inline(always)]
fn from_u32(n: u32) -> CharPos { CharPos(n as usize) }
#[inline(always)]
fn to_u32(&self) -> u32 { self.0 as u32 }
}
impl Add for CharPos {
type Output = CharPos;
#[inline(always)]
fn add(self, rhs: CharPos) -> CharPos { CharPos(self.0 + rhs.0) }
}
impl Sub for CharPos {
type Output = CharPos;
#[inline(always)]
fn sub(self, rhs: CharPos) -> CharPos { CharPos(self.0 - rhs.0) }
}impl_pos! {
2649 #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
2653 pub struct BytePos(pub u32);
2654
2655 #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
2657 pub struct RelativeBytePos(pub u32);
2658
2659 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
2665 pub struct CharPos(pub usize);
2666}
2667
2668impl<S: Encoder> Encodable<S> for BytePos {
2669 fn encode(&self, s: &mut S) {
2670 s.emit_u32(self.0);
2671 }
2672}
2673
2674impl<D: Decoder> Decodable<D> for BytePos {
2675 fn decode(d: &mut D) -> BytePos {
2676 BytePos(d.read_u32())
2677 }
2678}
2679
2680impl<H: HashStableContext> HashStable<H> for RelativeBytePos {
2681 fn hash_stable(&self, hcx: &mut H, hasher: &mut StableHasher) {
2682 self.0.hash_stable(hcx, hasher);
2683 }
2684}
2685
2686impl<S: Encoder> Encodable<S> for RelativeBytePos {
2687 fn encode(&self, s: &mut S) {
2688 s.emit_u32(self.0);
2689 }
2690}
2691
2692impl<D: Decoder> Decodable<D> for RelativeBytePos {
2693 fn decode(d: &mut D) -> RelativeBytePos {
2694 RelativeBytePos(d.read_u32())
2695 }
2696}
2697
2698#[derive(#[automatically_derived]
impl ::core::fmt::Debug for Loc {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field4_finish(f, "Loc", "file",
&self.file, "line", &self.line, "col", &self.col, "col_display",
&&self.col_display)
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for Loc {
#[inline]
fn clone(&self) -> Loc {
Loc {
file: ::core::clone::Clone::clone(&self.file),
line: ::core::clone::Clone::clone(&self.line),
col: ::core::clone::Clone::clone(&self.col),
col_display: ::core::clone::Clone::clone(&self.col_display),
}
}
}Clone)]
2704pub struct Loc {
2705 pub file: Arc<SourceFile>,
2707 pub line: usize,
2709 pub col: CharPos,
2711 pub col_display: usize,
2713}
2714
2715#[derive(#[automatically_derived]
impl ::core::fmt::Debug for SourceFileAndLine {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"SourceFileAndLine", "sf", &self.sf, "line", &&self.line)
}
}Debug)]
2717pub struct SourceFileAndLine {
2718 pub sf: Arc<SourceFile>,
2719 pub line: usize,
2721}
2722#[derive(#[automatically_derived]
impl ::core::fmt::Debug for SourceFileAndBytePos {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"SourceFileAndBytePos", "sf", &self.sf, "pos", &&self.pos)
}
}Debug)]
2723pub struct SourceFileAndBytePos {
2724 pub sf: Arc<SourceFile>,
2725 pub pos: BytePos,
2726}
2727
2728#[derive(#[automatically_derived]
impl ::core::marker::Copy for LineInfo { }Copy, #[automatically_derived]
impl ::core::clone::Clone for LineInfo {
#[inline]
fn clone(&self) -> LineInfo {
let _: ::core::clone::AssertParamIsClone<usize>;
let _: ::core::clone::AssertParamIsClone<CharPos>;
*self
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for LineInfo {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f, "LineInfo",
"line_index", &self.line_index, "start_col", &self.start_col,
"end_col", &&self.end_col)
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for LineInfo {
#[inline]
fn eq(&self, other: &LineInfo) -> bool {
self.line_index == other.line_index &&
self.start_col == other.start_col &&
self.end_col == other.end_col
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for LineInfo {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<usize>;
let _: ::core::cmp::AssertParamIsEq<CharPos>;
}
}Eq)]
2729pub struct LineInfo {
2730 pub line_index: usize,
2732
2733 pub start_col: CharPos,
2735
2736 pub end_col: CharPos,
2738}
2739
2740pub struct FileLines {
2741 pub file: Arc<SourceFile>,
2742 pub lines: Vec<LineInfo>,
2743}
2744
2745pub static SPAN_TRACK: AtomicRef<fn(LocalDefId)> = AtomicRef::new(&((|_| {}) as fn(_)));
2746
2747pub type FileLinesResult = Result<FileLines, SpanLinesError>;
2752
2753#[derive(#[automatically_derived]
impl ::core::clone::Clone for SpanLinesError {
#[inline]
fn clone(&self) -> SpanLinesError {
match self {
SpanLinesError::DistinctSources(__self_0) =>
SpanLinesError::DistinctSources(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for SpanLinesError {
#[inline]
fn eq(&self, other: &SpanLinesError) -> bool {
match (self, other) {
(SpanLinesError::DistinctSources(__self_0),
SpanLinesError::DistinctSources(__arg1_0)) =>
__self_0 == __arg1_0,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SpanLinesError {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Box<DistinctSources>>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for SpanLinesError {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
SpanLinesError::DistinctSources(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"DistinctSources", &__self_0),
}
}
}Debug)]
2754pub enum SpanLinesError {
2755 DistinctSources(Box<DistinctSources>),
2756}
2757
2758#[derive(#[automatically_derived]
impl ::core::clone::Clone for SpanSnippetError {
#[inline]
fn clone(&self) -> SpanSnippetError {
match self {
SpanSnippetError::IllFormedSpan(__self_0) =>
SpanSnippetError::IllFormedSpan(::core::clone::Clone::clone(__self_0)),
SpanSnippetError::DistinctSources(__self_0) =>
SpanSnippetError::DistinctSources(::core::clone::Clone::clone(__self_0)),
SpanSnippetError::MalformedForSourcemap(__self_0) =>
SpanSnippetError::MalformedForSourcemap(::core::clone::Clone::clone(__self_0)),
SpanSnippetError::SourceNotAvailable { filename: __self_0 } =>
SpanSnippetError::SourceNotAvailable {
filename: ::core::clone::Clone::clone(__self_0),
},
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for SpanSnippetError {
#[inline]
fn eq(&self, other: &SpanSnippetError) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(SpanSnippetError::IllFormedSpan(__self_0),
SpanSnippetError::IllFormedSpan(__arg1_0)) =>
__self_0 == __arg1_0,
(SpanSnippetError::DistinctSources(__self_0),
SpanSnippetError::DistinctSources(__arg1_0)) =>
__self_0 == __arg1_0,
(SpanSnippetError::MalformedForSourcemap(__self_0),
SpanSnippetError::MalformedForSourcemap(__arg1_0)) =>
__self_0 == __arg1_0,
(SpanSnippetError::SourceNotAvailable { filename: __self_0 },
SpanSnippetError::SourceNotAvailable { filename: __arg1_0 })
=> __self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SpanSnippetError {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Span>;
let _: ::core::cmp::AssertParamIsEq<Box<DistinctSources>>;
let _: ::core::cmp::AssertParamIsEq<MalformedSourceMapPositions>;
let _: ::core::cmp::AssertParamIsEq<FileName>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for SpanSnippetError {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
SpanSnippetError::IllFormedSpan(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"IllFormedSpan", &__self_0),
SpanSnippetError::DistinctSources(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"DistinctSources", &__self_0),
SpanSnippetError::MalformedForSourcemap(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"MalformedForSourcemap", &__self_0),
SpanSnippetError::SourceNotAvailable { filename: __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f,
"SourceNotAvailable", "filename", &__self_0),
}
}
}Debug)]
2759pub enum SpanSnippetError {
2760 IllFormedSpan(Span),
2761 DistinctSources(Box<DistinctSources>),
2762 MalformedForSourcemap(MalformedSourceMapPositions),
2763 SourceNotAvailable { filename: FileName },
2764}
2765
2766#[derive(#[automatically_derived]
impl ::core::clone::Clone for DistinctSources {
#[inline]
fn clone(&self) -> DistinctSources {
DistinctSources {
begin: ::core::clone::Clone::clone(&self.begin),
end: ::core::clone::Clone::clone(&self.end),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for DistinctSources {
#[inline]
fn eq(&self, other: &DistinctSources) -> bool {
self.begin == other.begin && self.end == other.end
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DistinctSources {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<(FileName, BytePos)>;
let _: ::core::cmp::AssertParamIsEq<(FileName, BytePos)>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for DistinctSources {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"DistinctSources", "begin", &self.begin, "end", &&self.end)
}
}Debug)]
2767pub struct DistinctSources {
2768 pub begin: (FileName, BytePos),
2769 pub end: (FileName, BytePos),
2770}
2771
2772#[derive(#[automatically_derived]
impl ::core::clone::Clone for MalformedSourceMapPositions {
#[inline]
fn clone(&self) -> MalformedSourceMapPositions {
MalformedSourceMapPositions {
name: ::core::clone::Clone::clone(&self.name),
source_len: ::core::clone::Clone::clone(&self.source_len),
begin_pos: ::core::clone::Clone::clone(&self.begin_pos),
end_pos: ::core::clone::Clone::clone(&self.end_pos),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for MalformedSourceMapPositions {
#[inline]
fn eq(&self, other: &MalformedSourceMapPositions) -> bool {
self.name == other.name && self.source_len == other.source_len &&
self.begin_pos == other.begin_pos &&
self.end_pos == other.end_pos
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for MalformedSourceMapPositions {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<FileName>;
let _: ::core::cmp::AssertParamIsEq<usize>;
let _: ::core::cmp::AssertParamIsEq<BytePos>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for MalformedSourceMapPositions {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field4_finish(f,
"MalformedSourceMapPositions", "name", &self.name, "source_len",
&self.source_len, "begin_pos", &self.begin_pos, "end_pos",
&&self.end_pos)
}
}Debug)]
2773pub struct MalformedSourceMapPositions {
2774 pub name: FileName,
2775 pub source_len: usize,
2776 pub begin_pos: BytePos,
2777 pub end_pos: BytePos,
2778}
2779
2780#[derive(#[automatically_derived]
impl ::core::marker::Copy for InnerSpan { }Copy, #[automatically_derived]
impl ::core::clone::Clone for InnerSpan {
#[inline]
fn clone(&self) -> InnerSpan {
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for InnerSpan {
#[inline]
fn eq(&self, other: &InnerSpan) -> bool {
self.start == other.start && self.end == other.end
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for InnerSpan {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<usize>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for InnerSpan {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "InnerSpan",
"start", &self.start, "end", &&self.end)
}
}Debug)]
2782pub struct InnerSpan {
2783 pub start: usize,
2784 pub end: usize,
2785}
2786
2787impl InnerSpan {
2788 pub fn new(start: usize, end: usize) -> InnerSpan {
2789 InnerSpan { start, end }
2790 }
2791}
2792
2793pub trait HashStableContext {
2798 fn span_hash_stable(&mut self, span: Span, hasher: &mut StableHasher);
2800
2801 fn def_path_hash(&self, def_id: DefId) -> DefPathHash;
2803
2804 fn assert_default_hashing_controls(&self, msg: &str);
2807}
2808
2809impl<CTX> HashStable<CTX> for Span
2810where
2811 CTX: HashStableContext,
2812{
2813 fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
2814 ctx.span_hash_stable(*self, hasher)
2816 }
2817}
2818
2819#[derive(#[automatically_derived]
impl ::core::clone::Clone for ErrorGuaranteed {
#[inline]
fn clone(&self) -> ErrorGuaranteed {
let _: ::core::clone::AssertParamIsClone<()>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for ErrorGuaranteed { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for ErrorGuaranteed {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"ErrorGuaranteed", &&self.0)
}
}Debug, #[automatically_derived]
impl ::core::hash::Hash for ErrorGuaranteed {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.0, state)
}
}Hash, #[automatically_derived]
impl ::core::cmp::PartialEq for ErrorGuaranteed {
#[inline]
fn eq(&self, other: &ErrorGuaranteed) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ErrorGuaranteed {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<()>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialOrd for ErrorGuaranteed {
#[inline]
fn partial_cmp(&self, other: &ErrorGuaranteed)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}PartialOrd, #[automatically_derived]
impl ::core::cmp::Ord for ErrorGuaranteed {
#[inline]
fn cmp(&self, other: &ErrorGuaranteed) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}Ord)]
2825#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for ErrorGuaranteed where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
ErrorGuaranteed(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
2826pub struct ErrorGuaranteed(());
2827
2828impl ErrorGuaranteed {
2829 #[deprecated = "should only be used in `DiagCtxtInner::emit_diagnostic`"]
2831 pub fn unchecked_error_guaranteed() -> Self {
2832 ErrorGuaranteed(())
2833 }
2834
2835 pub fn raise_fatal(self) -> ! {
2836 FatalError.raise()
2837 }
2838}
2839
2840impl<E: rustc_serialize::Encoder> Encodable<E> for ErrorGuaranteed {
2841 #[inline]
2842 fn encode(&self, _e: &mut E) {
2843 {
::core::panicking::panic_fmt(format_args!("should never serialize an `ErrorGuaranteed`, as we do not write metadata or incremental caches in case errors occurred"));
}panic!(
2844 "should never serialize an `ErrorGuaranteed`, as we do not write metadata or \
2845 incremental caches in case errors occurred"
2846 )
2847 }
2848}
2849impl<D: rustc_serialize::Decoder> Decodable<D> for ErrorGuaranteed {
2850 #[inline]
2851 fn decode(_d: &mut D) -> ErrorGuaranteed {
2852 {
::core::panicking::panic_fmt(format_args!("`ErrorGuaranteed` should never have been serialized to metadata or incremental caches"));
}panic!(
2853 "`ErrorGuaranteed` should never have been serialized to metadata or incremental caches"
2854 )
2855 }
2856}