1#![allow(internal_features)]
20#![cfg_attr(target_arch = "loongarch64", feature(stdarch_loongarch))]
21#![feature(cfg_select)]
22#![feature(core_io_borrowed_buf)]
23#![feature(if_let_guard)]
24#![feature(map_try_insert)]
25#![feature(negative_impls)]
26#![feature(read_buf)]
27#![feature(rustc_attrs)]
28extern crate self as rustc_span;
34
35use derive_where::derive_where;
36use rustc_data_structures::{AtomicRef, outline};
37use rustc_macros::{Decodable, Encodable, HashStable_Generic};
38use rustc_serialize::opaque::{FileEncoder, MemDecoder};
39use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
40use tracing::debug;
41pub use unicode_width::UNICODE_VERSION;
42
43mod caching_source_map_view;
44pub mod source_map;
45use source_map::{SourceMap, SourceMapInputs};
46
47pub use self::caching_source_map_view::CachingSourceMapView;
48use crate::fatal_error::FatalError;
49
50pub mod edition;
51use edition::Edition;
52pub mod hygiene;
53use hygiene::Transparency;
54pub use hygiene::{
55 DesugaringKind, ExpnData, ExpnHash, ExpnId, ExpnKind, LocalExpnId, MacroKind, SyntaxContext,
56};
57use rustc_data_structures::stable_hasher::HashingControls;
58pub mod def_id;
59use def_id::{CrateNum, DefId, DefIndex, DefPathHash, LOCAL_CRATE, LocalDefId, StableCrateId};
60pub mod edit_distance;
61mod span_encoding;
62pub use span_encoding::{DUMMY_SP, Span};
63
64pub mod symbol;
65pub use symbol::{
66 ByteSymbol, Ident, MacroRulesNormalizedIdent, STDLIB_STABLE_CRATES, Symbol, kw, sym,
67};
68
69mod analyze_source_file;
70pub mod fatal_error;
71
72pub mod profiling;
73
74use std::borrow::Cow;
75use std::cmp::{self, Ordering};
76use std::fmt::Display;
77use std::hash::Hash;
78use std::io::{self, Read};
79use std::ops::{Add, Range, Sub};
80use std::path::{Path, PathBuf};
81use std::str::FromStr;
82use std::sync::Arc;
83use std::{fmt, iter};
84
85use md5::{Digest, Md5};
86use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
87use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock};
88use rustc_data_structures::unord::UnordMap;
89use rustc_hashes::{Hash64, Hash128};
90use sha1::Sha1;
91use sha2::Sha256;
92
93#[cfg(test)]
94mod tests;
95
96pub struct SessionGlobals {
101 symbol_interner: symbol::Interner,
102 span_interner: Lock<span_encoding::SpanInterner>,
103 metavar_spans: MetavarSpansMap,
106 hygiene_data: Lock<hygiene::HygieneData>,
107
108 source_map: Option<Arc<SourceMap>>,
112}
113
114impl SessionGlobals {
115 pub fn new(
116 edition: Edition,
117 extra_symbols: &[&'static str],
118 sm_inputs: Option<SourceMapInputs>,
119 ) -> SessionGlobals {
120 SessionGlobals {
121 symbol_interner: symbol::Interner::with_extra_symbols(extra_symbols),
122 span_interner: Lock::new(span_encoding::SpanInterner::default()),
123 metavar_spans: Default::default(),
124 hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
125 source_map: sm_inputs.map(|inputs| Arc::new(SourceMap::with_inputs(inputs))),
126 }
127 }
128}
129
130pub fn create_session_globals_then<R>(
131 edition: Edition,
132 extra_symbols: &[&'static str],
133 sm_inputs: Option<SourceMapInputs>,
134 f: impl FnOnce() -> R,
135) -> R {
136 if !!SESSION_GLOBALS.is_set() {
{
::core::panicking::panic_fmt(format_args!("SESSION_GLOBALS should never be overwritten! Use another thread if you need another SessionGlobals"));
}
};assert!(
137 !SESSION_GLOBALS.is_set(),
138 "SESSION_GLOBALS should never be overwritten! \
139 Use another thread if you need another SessionGlobals"
140 );
141 let session_globals = SessionGlobals::new(edition, extra_symbols, sm_inputs);
142 SESSION_GLOBALS.set(&session_globals, f)
143}
144
145pub fn set_session_globals_then<R>(session_globals: &SessionGlobals, f: impl FnOnce() -> R) -> R {
146 if !!SESSION_GLOBALS.is_set() {
{
::core::panicking::panic_fmt(format_args!("SESSION_GLOBALS should never be overwritten! Use another thread if you need another SessionGlobals"));
}
};assert!(
147 !SESSION_GLOBALS.is_set(),
148 "SESSION_GLOBALS should never be overwritten! \
149 Use another thread if you need another SessionGlobals"
150 );
151 SESSION_GLOBALS.set(session_globals, f)
152}
153
154pub fn create_session_if_not_set_then<R, F>(edition: Edition, f: F) -> R
156where
157 F: FnOnce(&SessionGlobals) -> R,
158{
159 if !SESSION_GLOBALS.is_set() {
160 let session_globals = SessionGlobals::new(edition, &[], None);
161 SESSION_GLOBALS.set(&session_globals, || SESSION_GLOBALS.with(f))
162 } else {
163 SESSION_GLOBALS.with(f)
164 }
165}
166
167#[inline]
168pub fn with_session_globals<R, F>(f: F) -> R
169where
170 F: FnOnce(&SessionGlobals) -> R,
171{
172 SESSION_GLOBALS.with(f)
173}
174
175pub fn create_default_session_globals_then<R>(f: impl FnOnce() -> R) -> R {
177 create_session_globals_then(edition::DEFAULT_EDITION, &[], None, f)
178}
179
180static SESSION_GLOBALS: ::scoped_tls::ScopedKey<SessionGlobals> =
::scoped_tls::ScopedKey {
inner: {
const FOO: ::std::thread::LocalKey<::std::cell::Cell<*const ()>> =
{
const __RUST_STD_INTERNAL_INIT: ::std::cell::Cell<*const ()>
=
{ ::std::cell::Cell::new(::std::ptr::null()) };
unsafe {
::std::thread::LocalKey::new(const {
if ::std::mem::needs_drop::<::std::cell::Cell<*const ()>>()
{
|_|
{
#[thread_local]
static __RUST_STD_INTERNAL_VAL:
::std::thread::local_impl::EagerStorage<::std::cell::Cell<*const ()>>
=
::std::thread::local_impl::EagerStorage::new(__RUST_STD_INTERNAL_INIT);
__RUST_STD_INTERNAL_VAL.get()
}
} else {
|_|
{
#[thread_local]
static __RUST_STD_INTERNAL_VAL: ::std::cell::Cell<*const ()>
=
__RUST_STD_INTERNAL_INIT;
&__RUST_STD_INTERNAL_VAL
}
}
})
}
};
&FOO
},
_marker: ::std::marker::PhantomData,
};scoped_tls::scoped_thread_local!(static SESSION_GLOBALS: SessionGlobals);
184
185#[derive(#[automatically_derived]
impl ::core::default::Default for MetavarSpansMap {
#[inline]
fn default() -> MetavarSpansMap {
MetavarSpansMap(::core::default::Default::default())
}
}Default)]
186pub struct MetavarSpansMap(FreezeLock<UnordMap<Span, (Span, bool)>>);
187
188impl MetavarSpansMap {
189 pub fn insert(&self, span: Span, var_span: Span) -> bool {
190 match self.0.write().try_insert(span, (var_span, false)) {
191 Ok(_) => true,
192 Err(entry) => entry.entry.get().0 == var_span,
193 }
194 }
195
196 pub fn get(&self, span: Span) -> Option<Span> {
198 if let Some(mut mspans) = self.0.try_write() {
199 if let Some((var_span, read)) = mspans.get_mut(&span) {
200 *read = true;
201 Some(*var_span)
202 } else {
203 None
204 }
205 } else {
206 if let Some((span, true)) = self.0.read().get(&span) { Some(*span) } else { None }
207 }
208 }
209
210 pub fn freeze_and_get_read_spans(&self) -> UnordMap<Span, Span> {
214 self.0.freeze().items().filter(|(_, (_, b))| *b).map(|(s1, (s2, _))| (*s1, *s2)).collect()
215 }
216}
217
218#[inline]
219pub fn with_metavar_spans<R>(f: impl FnOnce(&MetavarSpansMap) -> R) -> R {
220 with_session_globals(|session_globals| f(&session_globals.metavar_spans))
221}
222
223#[doc =
r" Scopes used to determined if it need to apply to `--remap-path-prefix`"]
pub struct RemapPathScopeComponents(<RemapPathScopeComponents as
::bitflags::__private::PublicFlags>::Internal);
#[automatically_derived]
impl ::core::fmt::Debug for RemapPathScopeComponents {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"RemapPathScopeComponents", &&self.0)
}
}
#[automatically_derived]
impl ::core::cmp::Eq for RemapPathScopeComponents {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _:
::core::cmp::AssertParamIsEq<<RemapPathScopeComponents as
::bitflags::__private::PublicFlags>::Internal>;
}
}
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for RemapPathScopeComponents { }
#[automatically_derived]
impl ::core::cmp::PartialEq for RemapPathScopeComponents {
#[inline]
fn eq(&self, other: &RemapPathScopeComponents) -> bool {
self.0 == other.0
}
}
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for RemapPathScopeComponents { }
#[automatically_derived]
impl ::core::clone::Clone for RemapPathScopeComponents {
#[inline]
fn clone(&self) -> RemapPathScopeComponents {
let _:
::core::clone::AssertParamIsClone<<RemapPathScopeComponents as
::bitflags::__private::PublicFlags>::Internal>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for RemapPathScopeComponents { }
#[automatically_derived]
impl ::core::cmp::Ord for RemapPathScopeComponents {
#[inline]
fn cmp(&self, other: &RemapPathScopeComponents) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::cmp::PartialOrd for RemapPathScopeComponents {
#[inline]
fn partial_cmp(&self, other: &RemapPathScopeComponents)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::hash::Hash for RemapPathScopeComponents {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.0, state)
}
}
impl RemapPathScopeComponents {
#[doc = r" Apply remappings to the expansion of `std::file!()` macro"]
#[allow(deprecated, non_upper_case_globals,)]
pub const MACRO: Self = Self::from_bits_retain(1 << 0);
#[doc = r" Apply remappings to printed compiler diagnostics"]
#[allow(deprecated, non_upper_case_globals,)]
pub const DIAGNOSTICS: Self = Self::from_bits_retain(1 << 1);
#[doc = r" Apply remappings to debug information"]
#[allow(deprecated, non_upper_case_globals,)]
pub const DEBUGINFO: Self = Self::from_bits_retain(1 << 3);
#[doc = r" Apply remappings to coverage information"]
#[allow(deprecated, non_upper_case_globals,)]
pub const COVERAGE: Self = Self::from_bits_retain(1 << 4);
#[doc = r" Apply remappings to documentation information"]
#[allow(deprecated, non_upper_case_globals,)]
pub const DOCUMENTATION: Self = Self::from_bits_retain(1 << 5);
#[doc =
r" An alias for `macro`, `debuginfo` and `coverage`. This ensures all paths in compiled"]
#[doc =
r" executables, libraries and objects are remapped but not elsewhere."]
#[allow(deprecated, non_upper_case_globals,)]
pub const OBJECT: Self =
Self::from_bits_retain(Self::MACRO.bits() | Self::DEBUGINFO.bits() |
Self::COVERAGE.bits());
}
impl ::bitflags::Flags for RemapPathScopeComponents {
const FLAGS: &'static [::bitflags::Flag<RemapPathScopeComponents>] =
&[{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("MACRO",
RemapPathScopeComponents::MACRO)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("DIAGNOSTICS",
RemapPathScopeComponents::DIAGNOSTICS)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("DEBUGINFO",
RemapPathScopeComponents::DEBUGINFO)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("COVERAGE",
RemapPathScopeComponents::COVERAGE)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("DOCUMENTATION",
RemapPathScopeComponents::DOCUMENTATION)
},
{
#[allow(deprecated, non_upper_case_globals,)]
::bitflags::Flag::new("OBJECT",
RemapPathScopeComponents::OBJECT)
}];
type Bits = u8;
fn bits(&self) -> u8 { RemapPathScopeComponents::bits(self) }
fn from_bits_retain(bits: u8) -> RemapPathScopeComponents {
RemapPathScopeComponents::from_bits_retain(bits)
}
}
#[allow(dead_code, deprecated, unused_doc_comments, unused_attributes,
unused_mut, unused_imports, non_upper_case_globals, clippy ::
assign_op_pattern, clippy :: indexing_slicing, clippy :: same_name_method,
clippy :: iter_without_into_iter,)]
const _: () =
{
#[repr(transparent)]
pub struct InternalBitFlags(u8);
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for InternalBitFlags { }
#[automatically_derived]
impl ::core::clone::Clone for InternalBitFlags {
#[inline]
fn clone(&self) -> InternalBitFlags {
let _: ::core::clone::AssertParamIsClone<u8>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for InternalBitFlags { }
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for InternalBitFlags { }
#[automatically_derived]
impl ::core::cmp::PartialEq for InternalBitFlags {
#[inline]
fn eq(&self, other: &InternalBitFlags) -> bool {
self.0 == other.0
}
}
#[automatically_derived]
impl ::core::cmp::Eq for InternalBitFlags {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<u8>;
}
}
#[automatically_derived]
impl ::core::cmp::PartialOrd for InternalBitFlags {
#[inline]
fn partial_cmp(&self, other: &InternalBitFlags)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::cmp::Ord for InternalBitFlags {
#[inline]
fn cmp(&self, other: &InternalBitFlags) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::hash::Hash for InternalBitFlags {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.0, state)
}
}
impl ::bitflags::__private::PublicFlags for RemapPathScopeComponents {
type Primitive = u8;
type Internal = InternalBitFlags;
}
impl ::bitflags::__private::core::default::Default for
InternalBitFlags {
#[inline]
fn default() -> Self { InternalBitFlags::empty() }
}
impl ::bitflags::__private::core::fmt::Debug for InternalBitFlags {
fn fmt(&self,
f: &mut ::bitflags::__private::core::fmt::Formatter<'_>)
-> ::bitflags::__private::core::fmt::Result {
if self.is_empty() {
f.write_fmt(format_args!("{0:#x}",
<u8 as ::bitflags::Bits>::EMPTY))
} else {
::bitflags::__private::core::fmt::Display::fmt(self, f)
}
}
}
impl ::bitflags::__private::core::fmt::Display for InternalBitFlags {
fn fmt(&self,
f: &mut ::bitflags::__private::core::fmt::Formatter<'_>)
-> ::bitflags::__private::core::fmt::Result {
::bitflags::parser::to_writer(&RemapPathScopeComponents(*self),
f)
}
}
impl ::bitflags::__private::core::str::FromStr for InternalBitFlags {
type Err = ::bitflags::parser::ParseError;
fn from_str(s: &str)
->
::bitflags::__private::core::result::Result<Self,
Self::Err> {
::bitflags::parser::from_str::<RemapPathScopeComponents>(s).map(|flags|
flags.0)
}
}
impl ::bitflags::__private::core::convert::AsRef<u8> for
InternalBitFlags {
fn as_ref(&self) -> &u8 { &self.0 }
}
impl ::bitflags::__private::core::convert::From<u8> for
InternalBitFlags {
fn from(bits: u8) -> Self { Self::from_bits_retain(bits) }
}
#[allow(dead_code, deprecated, unused_attributes)]
impl InternalBitFlags {
#[inline]
pub const fn empty() -> Self {
Self(<u8 as ::bitflags::Bits>::EMPTY)
}
#[inline]
pub const fn all() -> Self {
let mut truncated = <u8 as ::bitflags::Bits>::EMPTY;
let mut i = 0;
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
{
{
let flag =
<RemapPathScopeComponents as
::bitflags::Flags>::FLAGS[i].value().bits();
truncated = truncated | flag;
i += 1;
}
};
let _ = i;
Self(truncated)
}
#[inline]
pub const fn bits(&self) -> u8 { self.0 }
#[inline]
pub const fn from_bits(bits: u8)
-> ::bitflags::__private::core::option::Option<Self> {
let truncated = Self::from_bits_truncate(bits).0;
if truncated == bits {
::bitflags::__private::core::option::Option::Some(Self(bits))
} else { ::bitflags::__private::core::option::Option::None }
}
#[inline]
pub const fn from_bits_truncate(bits: u8) -> Self {
Self(bits & Self::all().0)
}
#[inline]
pub const fn from_bits_retain(bits: u8) -> Self { Self(bits) }
#[inline]
pub fn from_name(name: &str)
-> ::bitflags::__private::core::option::Option<Self> {
{
if name == "MACRO" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::MACRO.bits()));
}
};
;
{
if name == "DIAGNOSTICS" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::DIAGNOSTICS.bits()));
}
};
;
{
if name == "DEBUGINFO" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::DEBUGINFO.bits()));
}
};
;
{
if name == "COVERAGE" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::COVERAGE.bits()));
}
};
;
{
if name == "DOCUMENTATION" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::DOCUMENTATION.bits()));
}
};
;
{
if name == "OBJECT" {
return ::bitflags::__private::core::option::Option::Some(Self(RemapPathScopeComponents::OBJECT.bits()));
}
};
;
let _ = name;
::bitflags::__private::core::option::Option::None
}
#[inline]
pub const fn is_empty(&self) -> bool {
self.0 == <u8 as ::bitflags::Bits>::EMPTY
}
#[inline]
pub const fn is_all(&self) -> bool {
Self::all().0 | self.0 == self.0
}
#[inline]
pub const fn intersects(&self, other: Self) -> bool {
self.0 & other.0 != <u8 as ::bitflags::Bits>::EMPTY
}
#[inline]
pub const fn contains(&self, other: Self) -> bool {
self.0 & other.0 == other.0
}
#[inline]
pub fn insert(&mut self, other: Self) {
*self = Self(self.0).union(other);
}
#[inline]
pub fn remove(&mut self, other: Self) {
*self = Self(self.0).difference(other);
}
#[inline]
pub fn toggle(&mut self, other: Self) {
*self = Self(self.0).symmetric_difference(other);
}
#[inline]
pub fn set(&mut self, other: Self, value: bool) {
if value { self.insert(other); } else { self.remove(other); }
}
#[inline]
#[must_use]
pub const fn intersection(self, other: Self) -> Self {
Self(self.0 & other.0)
}
#[inline]
#[must_use]
pub const fn union(self, other: Self) -> Self {
Self(self.0 | other.0)
}
#[inline]
#[must_use]
pub const fn difference(self, other: Self) -> Self {
Self(self.0 & !other.0)
}
#[inline]
#[must_use]
pub const fn symmetric_difference(self, other: Self) -> Self {
Self(self.0 ^ other.0)
}
#[inline]
#[must_use]
pub const fn complement(self) -> Self {
Self::from_bits_truncate(!self.0)
}
}
impl ::bitflags::__private::core::fmt::Binary for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Binary::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::Octal for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Octal::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::LowerHex for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::LowerHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::UpperHex for InternalBitFlags {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::UpperHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::ops::BitOr for InternalBitFlags {
type Output = Self;
#[inline]
fn bitor(self, other: InternalBitFlags) -> Self {
self.union(other)
}
}
impl ::bitflags::__private::core::ops::BitOrAssign for
InternalBitFlags {
#[inline]
fn bitor_assign(&mut self, other: Self) { self.insert(other); }
}
impl ::bitflags::__private::core::ops::BitXor for InternalBitFlags {
type Output = Self;
#[inline]
fn bitxor(self, other: Self) -> Self {
self.symmetric_difference(other)
}
}
impl ::bitflags::__private::core::ops::BitXorAssign for
InternalBitFlags {
#[inline]
fn bitxor_assign(&mut self, other: Self) { self.toggle(other); }
}
impl ::bitflags::__private::core::ops::BitAnd for InternalBitFlags {
type Output = Self;
#[inline]
fn bitand(self, other: Self) -> Self { self.intersection(other) }
}
impl ::bitflags::__private::core::ops::BitAndAssign for
InternalBitFlags {
#[inline]
fn bitand_assign(&mut self, other: Self) {
*self =
Self::from_bits_retain(self.bits()).intersection(other);
}
}
impl ::bitflags::__private::core::ops::Sub for InternalBitFlags {
type Output = Self;
#[inline]
fn sub(self, other: Self) -> Self { self.difference(other) }
}
impl ::bitflags::__private::core::ops::SubAssign for InternalBitFlags
{
#[inline]
fn sub_assign(&mut self, other: Self) { self.remove(other); }
}
impl ::bitflags::__private::core::ops::Not for InternalBitFlags {
type Output = Self;
#[inline]
fn not(self) -> Self { self.complement() }
}
impl ::bitflags::__private::core::iter::Extend<InternalBitFlags> for
InternalBitFlags {
fn extend<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(&mut self, iterator: T) {
for item in iterator { self.insert(item) }
}
}
impl ::bitflags::__private::core::iter::FromIterator<InternalBitFlags>
for InternalBitFlags {
fn from_iter<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(iterator: T) -> Self {
use ::bitflags::__private::core::iter::Extend;
let mut result = Self::empty();
result.extend(iterator);
result
}
}
impl InternalBitFlags {
#[inline]
pub const fn iter(&self)
-> ::bitflags::iter::Iter<RemapPathScopeComponents> {
::bitflags::iter::Iter::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
#[inline]
pub const fn iter_names(&self)
-> ::bitflags::iter::IterNames<RemapPathScopeComponents> {
::bitflags::iter::IterNames::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
}
impl ::bitflags::__private::core::iter::IntoIterator for
InternalBitFlags {
type Item = RemapPathScopeComponents;
type IntoIter = ::bitflags::iter::Iter<RemapPathScopeComponents>;
fn into_iter(self) -> Self::IntoIter { self.iter() }
}
impl InternalBitFlags {
#[inline]
pub fn bits_mut(&mut self) -> &mut u8 { &mut self.0 }
}
#[allow(dead_code, deprecated, unused_attributes)]
impl RemapPathScopeComponents {
#[inline]
pub const fn empty() -> Self { Self(InternalBitFlags::empty()) }
#[inline]
pub const fn all() -> Self { Self(InternalBitFlags::all()) }
#[inline]
pub const fn bits(&self) -> u8 { self.0.bits() }
#[inline]
pub const fn from_bits(bits: u8)
-> ::bitflags::__private::core::option::Option<Self> {
match InternalBitFlags::from_bits(bits) {
::bitflags::__private::core::option::Option::Some(bits) =>
::bitflags::__private::core::option::Option::Some(Self(bits)),
::bitflags::__private::core::option::Option::None =>
::bitflags::__private::core::option::Option::None,
}
}
#[inline]
pub const fn from_bits_truncate(bits: u8) -> Self {
Self(InternalBitFlags::from_bits_truncate(bits))
}
#[inline]
pub const fn from_bits_retain(bits: u8) -> Self {
Self(InternalBitFlags::from_bits_retain(bits))
}
#[inline]
pub fn from_name(name: &str)
-> ::bitflags::__private::core::option::Option<Self> {
match InternalBitFlags::from_name(name) {
::bitflags::__private::core::option::Option::Some(bits) =>
::bitflags::__private::core::option::Option::Some(Self(bits)),
::bitflags::__private::core::option::Option::None =>
::bitflags::__private::core::option::Option::None,
}
}
#[inline]
pub const fn is_empty(&self) -> bool { self.0.is_empty() }
#[inline]
pub const fn is_all(&self) -> bool { self.0.is_all() }
#[inline]
pub const fn intersects(&self, other: Self) -> bool {
self.0.intersects(other.0)
}
#[inline]
pub const fn contains(&self, other: Self) -> bool {
self.0.contains(other.0)
}
#[inline]
pub fn insert(&mut self, other: Self) { self.0.insert(other.0) }
#[inline]
pub fn remove(&mut self, other: Self) { self.0.remove(other.0) }
#[inline]
pub fn toggle(&mut self, other: Self) { self.0.toggle(other.0) }
#[inline]
pub fn set(&mut self, other: Self, value: bool) {
self.0.set(other.0, value)
}
#[inline]
#[must_use]
pub const fn intersection(self, other: Self) -> Self {
Self(self.0.intersection(other.0))
}
#[inline]
#[must_use]
pub const fn union(self, other: Self) -> Self {
Self(self.0.union(other.0))
}
#[inline]
#[must_use]
pub const fn difference(self, other: Self) -> Self {
Self(self.0.difference(other.0))
}
#[inline]
#[must_use]
pub const fn symmetric_difference(self, other: Self) -> Self {
Self(self.0.symmetric_difference(other.0))
}
#[inline]
#[must_use]
pub const fn complement(self) -> Self {
Self(self.0.complement())
}
}
impl ::bitflags::__private::core::fmt::Binary for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Binary::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::Octal for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::Octal::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::LowerHex for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::LowerHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::fmt::UpperHex for
RemapPathScopeComponents {
fn fmt(&self, f: &mut ::bitflags::__private::core::fmt::Formatter)
-> ::bitflags::__private::core::fmt::Result {
let inner = self.0;
::bitflags::__private::core::fmt::UpperHex::fmt(&inner, f)
}
}
impl ::bitflags::__private::core::ops::BitOr for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn bitor(self, other: RemapPathScopeComponents) -> Self {
self.union(other)
}
}
impl ::bitflags::__private::core::ops::BitOrAssign for
RemapPathScopeComponents {
#[inline]
fn bitor_assign(&mut self, other: Self) { self.insert(other); }
}
impl ::bitflags::__private::core::ops::BitXor for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn bitxor(self, other: Self) -> Self {
self.symmetric_difference(other)
}
}
impl ::bitflags::__private::core::ops::BitXorAssign for
RemapPathScopeComponents {
#[inline]
fn bitxor_assign(&mut self, other: Self) { self.toggle(other); }
}
impl ::bitflags::__private::core::ops::BitAnd for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn bitand(self, other: Self) -> Self { self.intersection(other) }
}
impl ::bitflags::__private::core::ops::BitAndAssign for
RemapPathScopeComponents {
#[inline]
fn bitand_assign(&mut self, other: Self) {
*self =
Self::from_bits_retain(self.bits()).intersection(other);
}
}
impl ::bitflags::__private::core::ops::Sub for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn sub(self, other: Self) -> Self { self.difference(other) }
}
impl ::bitflags::__private::core::ops::SubAssign for
RemapPathScopeComponents {
#[inline]
fn sub_assign(&mut self, other: Self) { self.remove(other); }
}
impl ::bitflags::__private::core::ops::Not for
RemapPathScopeComponents {
type Output = Self;
#[inline]
fn not(self) -> Self { self.complement() }
}
impl ::bitflags::__private::core::iter::Extend<RemapPathScopeComponents>
for RemapPathScopeComponents {
fn extend<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(&mut self, iterator: T) {
for item in iterator { self.insert(item) }
}
}
impl ::bitflags::__private::core::iter::FromIterator<RemapPathScopeComponents>
for RemapPathScopeComponents {
fn from_iter<T: ::bitflags::__private::core::iter::IntoIterator<Item
= Self>>(iterator: T) -> Self {
use ::bitflags::__private::core::iter::Extend;
let mut result = Self::empty();
result.extend(iterator);
result
}
}
impl RemapPathScopeComponents {
#[inline]
pub const fn iter(&self)
-> ::bitflags::iter::Iter<RemapPathScopeComponents> {
::bitflags::iter::Iter::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
#[inline]
pub const fn iter_names(&self)
-> ::bitflags::iter::IterNames<RemapPathScopeComponents> {
::bitflags::iter::IterNames::__private_const_new(<RemapPathScopeComponents
as ::bitflags::Flags>::FLAGS,
RemapPathScopeComponents::from_bits_retain(self.bits()),
RemapPathScopeComponents::from_bits_retain(self.bits()))
}
}
impl ::bitflags::__private::core::iter::IntoIterator for
RemapPathScopeComponents {
type Item = RemapPathScopeComponents;
type IntoIter = ::bitflags::iter::Iter<RemapPathScopeComponents>;
fn into_iter(self) -> Self::IntoIter { self.iter() }
}
};bitflags::bitflags! {
224 #[derive(Debug, Eq, PartialEq, Clone, Copy, Ord, PartialOrd, Hash)]
226 pub struct RemapPathScopeComponents: u8 {
227 const MACRO = 1 << 0;
229 const DIAGNOSTICS = 1 << 1;
231 const DEBUGINFO = 1 << 3;
233 const COVERAGE = 1 << 4;
235 const DOCUMENTATION = 1 << 5;
237
238 const OBJECT = Self::MACRO.bits() | Self::DEBUGINFO.bits() | Self::COVERAGE.bits();
241 }
242}
243
244impl<E: Encoder> Encodable<E> for RemapPathScopeComponents {
245 #[inline]
246 fn encode(&self, s: &mut E) {
247 s.emit_u8(self.bits());
248 }
249}
250
251impl<D: Decoder> Decodable<D> for RemapPathScopeComponents {
252 #[inline]
253 fn decode(s: &mut D) -> RemapPathScopeComponents {
254 RemapPathScopeComponents::from_bits(s.read_u8())
255 .expect("invalid bits for RemapPathScopeComponents")
256 }
257}
258
259#[derive(#[automatically_derived]
impl ::core::fmt::Debug for RealFileName {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f, "RealFileName",
"local", &self.local, "maybe_remapped", &self.maybe_remapped,
"scopes", &&self.scopes)
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for RealFileName {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Option<InnerRealFileName>>;
let _: ::core::cmp::AssertParamIsEq<InnerRealFileName>;
let _: ::core::cmp::AssertParamIsEq<RemapPathScopeComponents>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for RealFileName {
#[inline]
fn eq(&self, other: &RealFileName) -> bool {
self.local == other.local &&
self.maybe_remapped == other.maybe_remapped &&
self.scopes == other.scopes
}
}PartialEq, #[automatically_derived]
impl ::core::clone::Clone for RealFileName {
#[inline]
fn clone(&self) -> RealFileName {
RealFileName {
local: ::core::clone::Clone::clone(&self.local),
maybe_remapped: ::core::clone::Clone::clone(&self.maybe_remapped),
scopes: ::core::clone::Clone::clone(&self.scopes),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::Ord for RealFileName {
#[inline]
fn cmp(&self, other: &RealFileName) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.local, &other.local) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.maybe_remapped,
&other.maybe_remapped) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&self.scopes, &other.scopes),
cmp => cmp,
},
cmp => cmp,
}
}
}Ord, #[automatically_derived]
impl ::core::cmp::PartialOrd for RealFileName {
#[inline]
fn partial_cmp(&self, other: &RealFileName)
-> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.local, &other.local)
{
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match ::core::cmp::PartialOrd::partial_cmp(&self.maybe_remapped,
&other.maybe_remapped) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::cmp::PartialOrd::partial_cmp(&self.scopes,
&other.scopes),
cmp => cmp,
},
cmp => cmp,
}
}
}PartialOrd, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for RealFileName {
fn decode(__decoder: &mut __D) -> Self {
RealFileName {
local: ::rustc_serialize::Decodable::decode(__decoder),
maybe_remapped: ::rustc_serialize::Decodable::decode(__decoder),
scopes: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for RealFileName {
fn encode(&self, __encoder: &mut __E) {
match *self {
RealFileName {
local: ref __binding_0,
maybe_remapped: ref __binding_1,
scopes: ref __binding_2 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
}
}
}
}
};Encodable)]
291pub struct RealFileName {
292 local: Option<InnerRealFileName>,
294 maybe_remapped: InnerRealFileName,
296 scopes: RemapPathScopeComponents,
298}
299
300#[derive(#[automatically_derived]
impl ::core::fmt::Debug for InnerRealFileName {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f,
"InnerRealFileName", "name", &self.name, "working_directory",
&self.working_directory, "embeddable_name",
&&self.embeddable_name)
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for InnerRealFileName {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<PathBuf>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for InnerRealFileName {
#[inline]
fn eq(&self, other: &InnerRealFileName) -> bool {
self.name == other.name &&
self.working_directory == other.working_directory &&
self.embeddable_name == other.embeddable_name
}
}PartialEq, #[automatically_derived]
impl ::core::clone::Clone for InnerRealFileName {
#[inline]
fn clone(&self) -> InnerRealFileName {
InnerRealFileName {
name: ::core::clone::Clone::clone(&self.name),
working_directory: ::core::clone::Clone::clone(&self.working_directory),
embeddable_name: ::core::clone::Clone::clone(&self.embeddable_name),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::Ord for InnerRealFileName {
#[inline]
fn cmp(&self, other: &InnerRealFileName) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.name, &other.name) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.working_directory,
&other.working_directory) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&self.embeddable_name,
&other.embeddable_name),
cmp => cmp,
},
cmp => cmp,
}
}
}Ord, #[automatically_derived]
impl ::core::cmp::PartialOrd for InnerRealFileName {
#[inline]
fn partial_cmp(&self, other: &InnerRealFileName)
-> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.name, &other.name) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match ::core::cmp::PartialOrd::partial_cmp(&self.working_directory,
&other.working_directory) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::cmp::PartialOrd::partial_cmp(&self.embeddable_name,
&other.embeddable_name),
cmp => cmp,
},
cmp => cmp,
}
}
}PartialOrd, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for InnerRealFileName {
fn decode(__decoder: &mut __D) -> Self {
InnerRealFileName {
name: ::rustc_serialize::Decodable::decode(__decoder),
working_directory: ::rustc_serialize::Decodable::decode(__decoder),
embeddable_name: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for InnerRealFileName {
fn encode(&self, __encoder: &mut __E) {
match *self {
InnerRealFileName {
name: ref __binding_0,
working_directory: ref __binding_1,
embeddable_name: ref __binding_2 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_2,
__encoder);
}
}
}
}
};Encodable, #[automatically_derived]
impl ::core::hash::Hash for InnerRealFileName {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.name, state);
::core::hash::Hash::hash(&self.working_directory, state);
::core::hash::Hash::hash(&self.embeddable_name, state)
}
}Hash)]
304struct InnerRealFileName {
305 name: PathBuf,
307 working_directory: PathBuf,
309 embeddable_name: PathBuf,
311}
312
313impl Hash for RealFileName {
314 #[inline]
315 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
316 if !self.was_fully_remapped() {
321 self.local.hash(state);
322 }
323 self.maybe_remapped.hash(state);
324 self.scopes.bits().hash(state);
325 }
326}
327
328impl RealFileName {
329 #[inline]
335 pub fn path(&self, scope: RemapPathScopeComponents) -> &Path {
336 if !(scope.bits().count_ones() == 1) {
{
::core::panicking::panic_fmt(format_args!("one and only one scope should be passed to `RealFileName::path`: {0:?}",
scope));
}
};assert!(
337 scope.bits().count_ones() == 1,
338 "one and only one scope should be passed to `RealFileName::path`: {scope:?}"
339 );
340 if !self.scopes.contains(scope)
341 && let Some(local_name) = &self.local
342 {
343 local_name.name.as_path()
344 } else {
345 self.maybe_remapped.name.as_path()
346 }
347 }
348
349 #[inline]
360 pub fn embeddable_name(&self, scope: RemapPathScopeComponents) -> (&Path, &Path) {
361 if !(scope.bits().count_ones() == 1) {
{
::core::panicking::panic_fmt(format_args!("one and only one scope should be passed to `RealFileName::embeddable_path`: {0:?}",
scope));
}
};assert!(
362 scope.bits().count_ones() == 1,
363 "one and only one scope should be passed to `RealFileName::embeddable_path`: {scope:?}"
364 );
365 if !self.scopes.contains(scope)
366 && let Some(local_name) = &self.local
367 {
368 (&local_name.working_directory, &local_name.embeddable_name)
369 } else {
370 (&self.maybe_remapped.working_directory, &self.maybe_remapped.embeddable_name)
371 }
372 }
373
374 #[inline]
381 pub fn local_path(&self) -> Option<&Path> {
382 if self.was_not_remapped() {
383 Some(&self.maybe_remapped.name)
384 } else if let Some(local) = &self.local {
385 Some(&local.name)
386 } else {
387 None
388 }
389 }
390
391 #[inline]
398 pub fn into_local_path(self) -> Option<PathBuf> {
399 if self.was_not_remapped() {
400 Some(self.maybe_remapped.name)
401 } else if let Some(local) = self.local {
402 Some(local.name)
403 } else {
404 None
405 }
406 }
407
408 #[inline]
410 pub(crate) fn was_remapped(&self) -> bool {
411 !self.scopes.is_empty()
412 }
413
414 #[inline]
416 fn was_fully_remapped(&self) -> bool {
417 self.scopes.is_all()
418 }
419
420 #[inline]
422 fn was_not_remapped(&self) -> bool {
423 self.scopes.is_empty()
424 }
425
426 #[inline]
430 pub fn empty() -> RealFileName {
431 RealFileName {
432 local: Some(InnerRealFileName {
433 name: PathBuf::new(),
434 working_directory: PathBuf::new(),
435 embeddable_name: PathBuf::new(),
436 }),
437 maybe_remapped: InnerRealFileName {
438 name: PathBuf::new(),
439 working_directory: PathBuf::new(),
440 embeddable_name: PathBuf::new(),
441 },
442 scopes: RemapPathScopeComponents::empty(),
443 }
444 }
445
446 pub fn from_virtual_path(path: &Path) -> RealFileName {
450 let name = InnerRealFileName {
451 name: path.to_owned(),
452 embeddable_name: path.to_owned(),
453 working_directory: PathBuf::new(),
454 };
455 RealFileName { local: None, maybe_remapped: name, scopes: RemapPathScopeComponents::all() }
456 }
457
458 #[inline]
463 pub fn update_for_crate_metadata(&mut self) {
464 if self.was_fully_remapped() || self.was_not_remapped() {
465 self.local = None;
470 }
471 }
472
473 fn to_string_lossy<'a>(&'a self, display_pref: FileNameDisplayPreference) -> Cow<'a, str> {
477 match display_pref {
478 FileNameDisplayPreference::Remapped => self.maybe_remapped.name.to_string_lossy(),
479 FileNameDisplayPreference::Local => {
480 self.local.as_ref().unwrap_or(&self.maybe_remapped).name.to_string_lossy()
481 }
482 FileNameDisplayPreference::Short => self
483 .maybe_remapped
484 .name
485 .file_name()
486 .map_or_else(|| "".into(), |f| f.to_string_lossy()),
487 FileNameDisplayPreference::Scope(scope) => self.path(scope).to_string_lossy(),
488 }
489 }
490}
491
492#[derive(#[automatically_derived]
impl ::core::fmt::Debug for FileName {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
FileName::Real(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Real",
&__self_0),
FileName::CfgSpec(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CfgSpec", &__self_0),
FileName::Anon(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Anon",
&__self_0),
FileName::MacroExpansion(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"MacroExpansion", &__self_0),
FileName::ProcMacroSourceCode(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"ProcMacroSourceCode", &__self_0),
FileName::CliCrateAttr(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CliCrateAttr", &__self_0),
FileName::Custom(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Custom",
&__self_0),
FileName::DocTest(__self_0, __self_1) =>
::core::fmt::Formatter::debug_tuple_field2_finish(f,
"DocTest", __self_0, &__self_1),
FileName::InlineAsm(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"InlineAsm", &__self_0),
}
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for FileName {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<RealFileName>;
let _: ::core::cmp::AssertParamIsEq<Hash64>;
let _: ::core::cmp::AssertParamIsEq<String>;
let _: ::core::cmp::AssertParamIsEq<PathBuf>;
let _: ::core::cmp::AssertParamIsEq<isize>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for FileName {
#[inline]
fn eq(&self, other: &FileName) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(FileName::Real(__self_0), FileName::Real(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::CfgSpec(__self_0), FileName::CfgSpec(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::Anon(__self_0), FileName::Anon(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::MacroExpansion(__self_0),
FileName::MacroExpansion(__arg1_0)) => __self_0 == __arg1_0,
(FileName::ProcMacroSourceCode(__self_0),
FileName::ProcMacroSourceCode(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::CliCrateAttr(__self_0),
FileName::CliCrateAttr(__arg1_0)) => __self_0 == __arg1_0,
(FileName::Custom(__self_0), FileName::Custom(__arg1_0)) =>
__self_0 == __arg1_0,
(FileName::DocTest(__self_0, __self_1),
FileName::DocTest(__arg1_0, __arg1_1)) =>
__self_0 == __arg1_0 && __self_1 == __arg1_1,
(FileName::InlineAsm(__self_0), FileName::InlineAsm(__arg1_0))
=> __self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl ::core::clone::Clone for FileName {
#[inline]
fn clone(&self) -> FileName {
match self {
FileName::Real(__self_0) =>
FileName::Real(::core::clone::Clone::clone(__self_0)),
FileName::CfgSpec(__self_0) =>
FileName::CfgSpec(::core::clone::Clone::clone(__self_0)),
FileName::Anon(__self_0) =>
FileName::Anon(::core::clone::Clone::clone(__self_0)),
FileName::MacroExpansion(__self_0) =>
FileName::MacroExpansion(::core::clone::Clone::clone(__self_0)),
FileName::ProcMacroSourceCode(__self_0) =>
FileName::ProcMacroSourceCode(::core::clone::Clone::clone(__self_0)),
FileName::CliCrateAttr(__self_0) =>
FileName::CliCrateAttr(::core::clone::Clone::clone(__self_0)),
FileName::Custom(__self_0) =>
FileName::Custom(::core::clone::Clone::clone(__self_0)),
FileName::DocTest(__self_0, __self_1) =>
FileName::DocTest(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
FileName::InlineAsm(__self_0) =>
FileName::InlineAsm(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::Ord for FileName {
#[inline]
fn cmp(&self, other: &FileName) -> ::core::cmp::Ordering {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
match ::core::cmp::Ord::cmp(&__self_discr, &__arg1_discr) {
::core::cmp::Ordering::Equal =>
match (self, other) {
(FileName::Real(__self_0), FileName::Real(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::CfgSpec(__self_0), FileName::CfgSpec(__arg1_0))
=> ::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::Anon(__self_0), FileName::Anon(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::MacroExpansion(__self_0),
FileName::MacroExpansion(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::ProcMacroSourceCode(__self_0),
FileName::ProcMacroSourceCode(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::CliCrateAttr(__self_0),
FileName::CliCrateAttr(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::Custom(__self_0), FileName::Custom(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
(FileName::DocTest(__self_0, __self_1),
FileName::DocTest(__arg1_0, __arg1_1)) =>
match ::core::cmp::Ord::cmp(__self_0, __arg1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(__self_1, __arg1_1),
cmp => cmp,
},
(FileName::InlineAsm(__self_0),
FileName::InlineAsm(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0),
_ => unsafe { ::core::intrinsics::unreachable() }
},
cmp => cmp,
}
}
}Ord, #[automatically_derived]
impl ::core::cmp::PartialOrd for FileName {
#[inline]
fn partial_cmp(&self, other: &FileName)
-> ::core::option::Option<::core::cmp::Ordering> {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
match (self, other) {
(FileName::Real(__self_0), FileName::Real(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::CfgSpec(__self_0), FileName::CfgSpec(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::Anon(__self_0), FileName::Anon(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::MacroExpansion(__self_0),
FileName::MacroExpansion(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::ProcMacroSourceCode(__self_0),
FileName::ProcMacroSourceCode(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::CliCrateAttr(__self_0),
FileName::CliCrateAttr(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::Custom(__self_0), FileName::Custom(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
(FileName::DocTest(__self_0, __self_1),
FileName::DocTest(__arg1_0, __arg1_1)) =>
match ::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0)
{
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=> ::core::cmp::PartialOrd::partial_cmp(__self_1, __arg1_1),
cmp => cmp,
},
(FileName::InlineAsm(__self_0), FileName::InlineAsm(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
_ =>
::core::cmp::PartialOrd::partial_cmp(&__self_discr,
&__arg1_discr),
}
}
}PartialOrd, #[automatically_derived]
impl ::core::hash::Hash for FileName {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
FileName::Real(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::CfgSpec(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::Anon(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::MacroExpansion(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::ProcMacroSourceCode(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::CliCrateAttr(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::Custom(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
FileName::DocTest(__self_0, __self_1) => {
::core::hash::Hash::hash(__self_0, state);
::core::hash::Hash::hash(__self_1, state)
}
FileName::InlineAsm(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
}
}
}Hash, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for FileName {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
FileName::Real(::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
FileName::CfgSpec(::rustc_serialize::Decodable::decode(__decoder))
}
2usize => {
FileName::Anon(::rustc_serialize::Decodable::decode(__decoder))
}
3usize => {
FileName::MacroExpansion(::rustc_serialize::Decodable::decode(__decoder))
}
4usize => {
FileName::ProcMacroSourceCode(::rustc_serialize::Decodable::decode(__decoder))
}
5usize => {
FileName::CliCrateAttr(::rustc_serialize::Decodable::decode(__decoder))
}
6usize => {
FileName::Custom(::rustc_serialize::Decodable::decode(__decoder))
}
7usize => {
FileName::DocTest(::rustc_serialize::Decodable::decode(__decoder),
::rustc_serialize::Decodable::decode(__decoder))
}
8usize => {
FileName::InlineAsm(::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `FileName`, expected 0..9, actual {0}",
n));
}
}
}
}
};Decodable, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for FileName {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
FileName::Real(ref __binding_0) => { 0usize }
FileName::CfgSpec(ref __binding_0) => { 1usize }
FileName::Anon(ref __binding_0) => { 2usize }
FileName::MacroExpansion(ref __binding_0) => { 3usize }
FileName::ProcMacroSourceCode(ref __binding_0) => { 4usize }
FileName::CliCrateAttr(ref __binding_0) => { 5usize }
FileName::Custom(ref __binding_0) => { 6usize }
FileName::DocTest(ref __binding_0, ref __binding_1) => {
7usize
}
FileName::InlineAsm(ref __binding_0) => { 8usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
FileName::Real(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::CfgSpec(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::Anon(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::MacroExpansion(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::ProcMacroSourceCode(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::CliCrateAttr(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::Custom(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
FileName::DocTest(ref __binding_0, ref __binding_1) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
FileName::InlineAsm(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable)]
494pub enum FileName {
495 Real(RealFileName),
496 CfgSpec(Hash64),
498 Anon(Hash64),
500 MacroExpansion(Hash64),
503 ProcMacroSourceCode(Hash64),
504 CliCrateAttr(Hash64),
506 Custom(String),
508 DocTest(PathBuf, isize),
509 InlineAsm(Hash64),
511}
512
513pub struct FileNameDisplay<'a> {
514 inner: &'a FileName,
515 display_pref: FileNameDisplayPreference,
516}
517
518#[derive(#[automatically_derived]
impl ::core::clone::Clone for FileNameDisplayPreference {
#[inline]
fn clone(&self) -> FileNameDisplayPreference {
let _: ::core::clone::AssertParamIsClone<RemapPathScopeComponents>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for FileNameDisplayPreference { }Copy)]
520enum FileNameDisplayPreference {
521 Remapped,
522 Local,
523 Short,
524 Scope(RemapPathScopeComponents),
525}
526
527impl fmt::Display for FileNameDisplay<'_> {
528 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
529 use FileName::*;
530 match *self.inner {
531 Real(ref name) => {
532 fmt.write_fmt(format_args!("{0}", name.to_string_lossy(self.display_pref)))write!(fmt, "{}", name.to_string_lossy(self.display_pref))
533 }
534 CfgSpec(_) => fmt.write_fmt(format_args!("<cfgspec>"))write!(fmt, "<cfgspec>"),
535 MacroExpansion(_) => fmt.write_fmt(format_args!("<macro expansion>"))write!(fmt, "<macro expansion>"),
536 Anon(_) => fmt.write_fmt(format_args!("<anon>"))write!(fmt, "<anon>"),
537 ProcMacroSourceCode(_) => fmt.write_fmt(format_args!("<proc-macro source code>"))write!(fmt, "<proc-macro source code>"),
538 CliCrateAttr(_) => fmt.write_fmt(format_args!("<crate attribute>"))write!(fmt, "<crate attribute>"),
539 Custom(ref s) => fmt.write_fmt(format_args!("<{0}>", s))write!(fmt, "<{s}>"),
540 DocTest(ref path, _) => fmt.write_fmt(format_args!("{0}", path.display()))write!(fmt, "{}", path.display()),
541 InlineAsm(_) => fmt.write_fmt(format_args!("<inline asm>"))write!(fmt, "<inline asm>"),
542 }
543 }
544}
545
546impl<'a> FileNameDisplay<'a> {
547 pub fn to_string_lossy(&self) -> Cow<'a, str> {
548 match self.inner {
549 FileName::Real(inner) => inner.to_string_lossy(self.display_pref),
550 _ => Cow::from(self.to_string()),
551 }
552 }
553}
554
555impl FileName {
556 pub fn is_real(&self) -> bool {
557 use FileName::*;
558 match *self {
559 Real(_) => true,
560 Anon(_)
561 | MacroExpansion(_)
562 | ProcMacroSourceCode(_)
563 | CliCrateAttr(_)
564 | Custom(_)
565 | CfgSpec(_)
566 | DocTest(_, _)
567 | InlineAsm(_) => false,
568 }
569 }
570
571 #[inline]
576 pub fn prefer_remapped_unconditionally(&self) -> FileNameDisplay<'_> {
577 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Remapped }
578 }
579
580 #[inline]
585 pub fn prefer_local_unconditionally(&self) -> FileNameDisplay<'_> {
586 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Local }
587 }
588
589 #[inline]
591 pub fn short(&self) -> FileNameDisplay<'_> {
592 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Short }
593 }
594
595 #[inline]
597 pub fn display(&self, scope: RemapPathScopeComponents) -> FileNameDisplay<'_> {
598 FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Scope(scope) }
599 }
600
601 pub fn macro_expansion_source_code(src: &str) -> FileName {
602 let mut hasher = StableHasher::new();
603 src.hash(&mut hasher);
604 FileName::MacroExpansion(hasher.finish())
605 }
606
607 pub fn anon_source_code(src: &str) -> FileName {
608 let mut hasher = StableHasher::new();
609 src.hash(&mut hasher);
610 FileName::Anon(hasher.finish())
611 }
612
613 pub fn proc_macro_source_code(src: &str) -> FileName {
614 let mut hasher = StableHasher::new();
615 src.hash(&mut hasher);
616 FileName::ProcMacroSourceCode(hasher.finish())
617 }
618
619 pub fn cfg_spec_source_code(src: &str) -> FileName {
620 let mut hasher = StableHasher::new();
621 src.hash(&mut hasher);
622 FileName::CfgSpec(hasher.finish())
623 }
624
625 pub fn cli_crate_attr_source_code(src: &str) -> FileName {
626 let mut hasher = StableHasher::new();
627 src.hash(&mut hasher);
628 FileName::CliCrateAttr(hasher.finish())
629 }
630
631 pub fn doc_test_source_code(path: PathBuf, line: isize) -> FileName {
632 FileName::DocTest(path, line)
633 }
634
635 pub fn inline_asm_source_code(src: &str) -> FileName {
636 let mut hasher = StableHasher::new();
637 src.hash(&mut hasher);
638 FileName::InlineAsm(hasher.finish())
639 }
640
641 pub fn into_local_path(self) -> Option<PathBuf> {
646 match self {
647 FileName::Real(path) => path.into_local_path(),
648 FileName::DocTest(path, _) => Some(path),
649 _ => None,
650 }
651 }
652}
653
654#[derive(#[automatically_derived]
impl ::core::clone::Clone for SpanData {
#[inline]
fn clone(&self) -> SpanData {
let _: ::core::clone::AssertParamIsClone<BytePos>;
let _: ::core::clone::AssertParamIsClone<SyntaxContext>;
let _: ::core::clone::AssertParamIsClone<Option<LocalDefId>>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for SpanData { }Copy, #[automatically_derived]
impl ::core::hash::Hash for SpanData {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.lo, state);
::core::hash::Hash::hash(&self.hi, state);
::core::hash::Hash::hash(&self.ctxt, state);
::core::hash::Hash::hash(&self.parent, state)
}
}Hash, #[automatically_derived]
impl ::core::cmp::PartialEq for SpanData {
#[inline]
fn eq(&self, other: &SpanData) -> bool {
self.lo == other.lo && self.hi == other.hi && self.ctxt == other.ctxt
&& self.parent == other.parent
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SpanData {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<BytePos>;
let _: ::core::cmp::AssertParamIsEq<SyntaxContext>;
let _: ::core::cmp::AssertParamIsEq<Option<LocalDefId>>;
}
}Eq)]
670#[automatically_derived]
impl ::core::cmp::Ord for SpanData {
#[inline]
fn cmp(&self, __other: &Self) -> ::core::cmp::Ordering {
match (self, __other) {
(SpanData {
lo: ref __field_lo,
hi: ref __field_hi,
ctxt: ref __field_ctxt,
parent: ref __field_parent }, SpanData {
lo: ref __other_field_lo,
hi: ref __other_field_hi,
ctxt: ref __other_field_ctxt,
parent: ref __other_field_parent }) =>
match ::core::cmp::Ord::cmp(__field_lo, __other_field_lo) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(__field_hi, __other_field_hi) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
__cmp => __cmp,
},
__cmp => __cmp,
},
}
}
}#[derive_where(PartialOrd, Ord)]
671pub struct SpanData {
672 pub lo: BytePos,
673 pub hi: BytePos,
674 #[derive_where(skip)]
677 pub ctxt: SyntaxContext,
680 #[derive_where(skip)]
681 pub parent: Option<LocalDefId>,
684}
685
686impl SpanData {
687 #[inline]
688 pub fn span(&self) -> Span {
689 Span::new(self.lo, self.hi, self.ctxt, self.parent)
690 }
691 #[inline]
692 pub fn with_lo(&self, lo: BytePos) -> Span {
693 Span::new(lo, self.hi, self.ctxt, self.parent)
694 }
695 #[inline]
696 pub fn with_hi(&self, hi: BytePos) -> Span {
697 Span::new(self.lo, hi, self.ctxt, self.parent)
698 }
699 #[inline]
701 fn with_ctxt(&self, ctxt: SyntaxContext) -> Span {
702 Span::new(self.lo, self.hi, ctxt, self.parent)
703 }
704 #[inline]
706 fn with_parent(&self, parent: Option<LocalDefId>) -> Span {
707 Span::new(self.lo, self.hi, self.ctxt, parent)
708 }
709 #[inline]
711 pub fn is_dummy(self) -> bool {
712 self.lo.0 == 0 && self.hi.0 == 0
713 }
714 pub fn contains(self, other: Self) -> bool {
716 self.lo <= other.lo && other.hi <= self.hi
717 }
718}
719
720impl Default for SpanData {
721 fn default() -> Self {
722 Self { lo: BytePos(0), hi: BytePos(0), ctxt: SyntaxContext::root(), parent: None }
723 }
724}
725
726impl PartialOrd for Span {
727 fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
728 PartialOrd::partial_cmp(&self.data(), &rhs.data())
729 }
730}
731impl Ord for Span {
732 fn cmp(&self, rhs: &Self) -> Ordering {
733 Ord::cmp(&self.data(), &rhs.data())
734 }
735}
736
737impl Span {
738 #[inline]
739 pub fn lo(self) -> BytePos {
740 self.data().lo
741 }
742 #[inline]
743 pub fn with_lo(self, lo: BytePos) -> Span {
744 self.data().with_lo(lo)
745 }
746 #[inline]
747 pub fn hi(self) -> BytePos {
748 self.data().hi
749 }
750 #[inline]
751 pub fn with_hi(self, hi: BytePos) -> Span {
752 self.data().with_hi(hi)
753 }
754 #[inline]
755 pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span {
756 self.map_ctxt(|_| ctxt)
757 }
758
759 #[inline]
760 pub fn is_visible(self, sm: &SourceMap) -> bool {
761 !self.is_dummy() && sm.is_span_accessible(self)
762 }
763
764 #[inline]
769 pub fn in_external_macro(self, sm: &SourceMap) -> bool {
770 self.ctxt().in_external_macro(sm)
771 }
772
773 pub fn in_derive_expansion(self) -> bool {
775 #[allow(non_exhaustive_omitted_patterns)] match self.ctxt().outer_expn_data().kind
{
ExpnKind::Macro(MacroKind::Derive, _) => true,
_ => false,
}matches!(self.ctxt().outer_expn_data().kind, ExpnKind::Macro(MacroKind::Derive, _))
776 }
777
778 pub fn is_from_async_await(self) -> bool {
780 #[allow(non_exhaustive_omitted_patterns)] match self.ctxt().outer_expn_data().kind
{
ExpnKind::Desugaring(DesugaringKind::Async | DesugaringKind::Await) =>
true,
_ => false,
}matches!(
781 self.ctxt().outer_expn_data().kind,
782 ExpnKind::Desugaring(DesugaringKind::Async | DesugaringKind::Await),
783 )
784 }
785
786 pub fn can_be_used_for_suggestions(self) -> bool {
788 !self.from_expansion()
789 || (self.in_derive_expansion()
793 && self.parent_callsite().map(|p| (p.lo(), p.hi())) != Some((self.lo(), self.hi())))
794 }
795
796 #[inline]
797 pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span {
798 Span::new(lo, hi, SyntaxContext::root(), None)
799 }
800
801 #[inline]
803 pub fn shrink_to_lo(self) -> Span {
804 let span = self.data_untracked();
805 span.with_hi(span.lo)
806 }
807 #[inline]
809 pub fn shrink_to_hi(self) -> Span {
810 let span = self.data_untracked();
811 span.with_lo(span.hi)
812 }
813
814 #[inline]
815 pub fn is_empty(self) -> bool {
817 let span = self.data_untracked();
818 span.hi == span.lo
819 }
820
821 pub fn substitute_dummy(self, other: Span) -> Span {
823 if self.is_dummy() { other } else { self }
824 }
825
826 pub fn contains(self, other: Span) -> bool {
828 let span = self.data();
829 let other = other.data();
830 span.contains(other)
831 }
832
833 pub fn overlaps(self, other: Span) -> bool {
835 let span = self.data();
836 let other = other.data();
837 span.lo < other.hi && other.lo < span.hi
838 }
839
840 pub fn overlaps_or_adjacent(self, other: Span) -> bool {
842 let span = self.data();
843 let other = other.data();
844 span.lo <= other.hi && other.lo <= span.hi
845 }
846
847 pub fn source_equal(self, other: Span) -> bool {
852 let span = self.data();
853 let other = other.data();
854 span.lo == other.lo && span.hi == other.hi
855 }
856
857 pub fn trim_start(self, other: Span) -> Option<Span> {
859 let span = self.data();
860 let other = other.data();
861 if span.hi > other.hi { Some(span.with_lo(cmp::max(span.lo, other.hi))) } else { None }
862 }
863
864 pub fn trim_end(self, other: Span) -> Option<Span> {
866 let span = self.data();
867 let other = other.data();
868 if span.lo < other.lo { Some(span.with_hi(cmp::min(span.hi, other.lo))) } else { None }
869 }
870
871 pub fn source_callsite(self) -> Span {
874 let ctxt = self.ctxt();
875 if !ctxt.is_root() { ctxt.outer_expn_data().call_site.source_callsite() } else { self }
876 }
877
878 pub fn parent_callsite(self) -> Option<Span> {
881 let ctxt = self.ctxt();
882 (!ctxt.is_root()).then(|| ctxt.outer_expn_data().call_site)
883 }
884
885 pub fn find_ancestor_inside(mut self, outer: Span) -> Option<Span> {
898 while !outer.contains(self) {
899 self = self.parent_callsite()?;
900 }
901 Some(self)
902 }
903
904 pub fn find_ancestor_in_same_ctxt(mut self, other: Span) -> Option<Span> {
917 while !self.eq_ctxt(other) {
918 self = self.parent_callsite()?;
919 }
920 Some(self)
921 }
922
923 pub fn find_ancestor_inside_same_ctxt(mut self, outer: Span) -> Option<Span> {
936 while !outer.contains(self) || !self.eq_ctxt(outer) {
937 self = self.parent_callsite()?;
938 }
939 Some(self)
940 }
941
942 pub fn find_ancestor_not_from_extern_macro(mut self, sm: &SourceMap) -> Option<Span> {
956 while self.in_external_macro(sm) {
957 self = self.parent_callsite()?;
958 }
959 Some(self)
960 }
961
962 pub fn find_ancestor_not_from_macro(mut self) -> Option<Span> {
975 while self.from_expansion() {
976 self = self.parent_callsite()?;
977 }
978 Some(self)
979 }
980
981 pub fn edition(self) -> edition::Edition {
983 self.ctxt().edition()
984 }
985
986 #[inline]
988 pub fn is_rust_2015(self) -> bool {
989 self.edition().is_rust_2015()
990 }
991
992 #[inline]
994 pub fn at_least_rust_2018(self) -> bool {
995 self.edition().at_least_rust_2018()
996 }
997
998 #[inline]
1000 pub fn at_least_rust_2021(self) -> bool {
1001 self.edition().at_least_rust_2021()
1002 }
1003
1004 #[inline]
1006 pub fn at_least_rust_2024(self) -> bool {
1007 self.edition().at_least_rust_2024()
1008 }
1009
1010 pub fn source_callee(self) -> Option<ExpnData> {
1016 let mut ctxt = self.ctxt();
1017 let mut opt_expn_data = None;
1018 while !ctxt.is_root() {
1019 let expn_data = ctxt.outer_expn_data();
1020 ctxt = expn_data.call_site.ctxt();
1021 opt_expn_data = Some(expn_data);
1022 }
1023 opt_expn_data
1024 }
1025
1026 pub fn allows_unstable(self, feature: Symbol) -> bool {
1030 self.ctxt()
1031 .outer_expn_data()
1032 .allow_internal_unstable
1033 .is_some_and(|features| features.contains(&feature))
1034 }
1035
1036 pub fn is_desugaring(self, kind: DesugaringKind) -> bool {
1038 match self.ctxt().outer_expn_data().kind {
1039 ExpnKind::Desugaring(k) => k == kind,
1040 _ => false,
1041 }
1042 }
1043
1044 pub fn desugaring_kind(self) -> Option<DesugaringKind> {
1047 match self.ctxt().outer_expn_data().kind {
1048 ExpnKind::Desugaring(k) => Some(k),
1049 _ => None,
1050 }
1051 }
1052
1053 pub fn allows_unsafe(self) -> bool {
1057 self.ctxt().outer_expn_data().allow_internal_unsafe
1058 }
1059
1060 pub fn macro_backtrace(mut self) -> impl Iterator<Item = ExpnData> {
1061 let mut prev_span = DUMMY_SP;
1062 iter::from_fn(move || {
1063 loop {
1064 let ctxt = self.ctxt();
1065 if ctxt.is_root() {
1066 return None;
1067 }
1068
1069 let expn_data = ctxt.outer_expn_data();
1070 let is_recursive = expn_data.call_site.source_equal(prev_span);
1071
1072 prev_span = self;
1073 self = expn_data.call_site;
1074
1075 if !is_recursive {
1077 return Some(expn_data);
1078 }
1079 }
1080 })
1081 }
1082
1083 pub fn split_at(self, pos: u32) -> (Span, Span) {
1085 let len = self.hi().0 - self.lo().0;
1086 if true {
if !(pos <= len) {
::core::panicking::panic("assertion failed: pos <= len")
};
};debug_assert!(pos <= len);
1087
1088 let split_pos = BytePos(self.lo().0 + pos);
1089 (
1090 Span::new(self.lo(), split_pos, self.ctxt(), self.parent()),
1091 Span::new(split_pos, self.hi(), self.ctxt(), self.parent()),
1092 )
1093 }
1094
1095 fn try_metavars(a: SpanData, b: SpanData, a_orig: Span, b_orig: Span) -> (SpanData, SpanData) {
1097 match with_metavar_spans(|mspans| (mspans.get(a_orig), mspans.get(b_orig))) {
1098 (None, None) => {}
1099 (Some(meta_a), None) => {
1100 let meta_a = meta_a.data();
1101 if meta_a.ctxt == b.ctxt {
1102 return (meta_a, b);
1103 }
1104 }
1105 (None, Some(meta_b)) => {
1106 let meta_b = meta_b.data();
1107 if a.ctxt == meta_b.ctxt {
1108 return (a, meta_b);
1109 }
1110 }
1111 (Some(meta_a), Some(meta_b)) => {
1112 let meta_b = meta_b.data();
1113 if a.ctxt == meta_b.ctxt {
1114 return (a, meta_b);
1115 }
1116 let meta_a = meta_a.data();
1117 if meta_a.ctxt == b.ctxt {
1118 return (meta_a, b);
1119 } else if meta_a.ctxt == meta_b.ctxt {
1120 return (meta_a, meta_b);
1121 }
1122 }
1123 }
1124
1125 (a, b)
1126 }
1127
1128 fn prepare_to_combine(
1130 a_orig: Span,
1131 b_orig: Span,
1132 ) -> Result<(SpanData, SpanData, Option<LocalDefId>), Span> {
1133 let (a, b) = (a_orig.data(), b_orig.data());
1134 if a.ctxt == b.ctxt {
1135 return Ok((a, b, if a.parent == b.parent { a.parent } else { None }));
1136 }
1137
1138 let (a, b) = Span::try_metavars(a, b, a_orig, b_orig);
1139 if a.ctxt == b.ctxt {
1140 return Ok((a, b, if a.parent == b.parent { a.parent } else { None }));
1141 }
1142
1143 let a_is_callsite = a.ctxt.is_root() || a.ctxt == b.span().source_callsite().ctxt();
1151 Err(if a_is_callsite { b_orig } else { a_orig })
1152 }
1153
1154 pub fn with_neighbor(self, neighbor: Span) -> Span {
1156 match Span::prepare_to_combine(self, neighbor) {
1157 Ok((this, ..)) => this.span(),
1158 Err(_) => self,
1159 }
1160 }
1161
1162 pub fn to(self, end: Span) -> Span {
1173 match Span::prepare_to_combine(self, end) {
1174 Ok((from, to, parent)) => {
1175 Span::new(cmp::min(from.lo, to.lo), cmp::max(from.hi, to.hi), from.ctxt, parent)
1176 }
1177 Err(fallback) => fallback,
1178 }
1179 }
1180
1181 pub fn between(self, end: Span) -> Span {
1189 match Span::prepare_to_combine(self, end) {
1190 Ok((from, to, parent)) => {
1191 Span::new(cmp::min(from.hi, to.hi), cmp::max(from.lo, to.lo), from.ctxt, parent)
1192 }
1193 Err(fallback) => fallback,
1194 }
1195 }
1196
1197 pub fn until(self, end: Span) -> Span {
1205 match Span::prepare_to_combine(self, end) {
1206 Ok((from, to, parent)) => {
1207 Span::new(cmp::min(from.lo, to.lo), cmp::max(from.lo, to.lo), from.ctxt, parent)
1208 }
1209 Err(fallback) => fallback,
1210 }
1211 }
1212
1213 pub fn within_macro(self, within: Span, sm: &SourceMap) -> Option<Span> {
1228 match Span::prepare_to_combine(self, within) {
1229 Ok((self_, _, parent))
1236 if self_.hi < self.lo() || self.hi() < self_.lo && !sm.is_imported(within) =>
1237 {
1238 Some(Span::new(self_.lo, self_.hi, self_.ctxt, parent))
1239 }
1240 _ => None,
1241 }
1242 }
1243
1244 pub fn from_inner(self, inner: InnerSpan) -> Span {
1245 let span = self.data();
1246 Span::new(
1247 span.lo + BytePos::from_usize(inner.start),
1248 span.lo + BytePos::from_usize(inner.end),
1249 span.ctxt,
1250 span.parent,
1251 )
1252 }
1253
1254 pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span {
1257 self.with_ctxt_from_mark(expn_id, Transparency::Opaque)
1258 }
1259
1260 pub fn with_call_site_ctxt(self, expn_id: ExpnId) -> Span {
1263 self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
1264 }
1265
1266 pub fn with_mixed_site_ctxt(self, expn_id: ExpnId) -> Span {
1269 self.with_ctxt_from_mark(expn_id, Transparency::SemiOpaque)
1270 }
1271
1272 fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
1276 self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency))
1277 }
1278
1279 #[inline]
1280 pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
1281 self.map_ctxt(|ctxt| ctxt.apply_mark(expn_id, transparency))
1282 }
1283
1284 #[inline]
1285 pub fn remove_mark(&mut self) -> ExpnId {
1286 let mut mark = ExpnId::root();
1287 *self = self.map_ctxt(|mut ctxt| {
1288 mark = ctxt.remove_mark();
1289 ctxt
1290 });
1291 mark
1292 }
1293
1294 #[inline]
1295 pub fn adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
1296 let mut mark = None;
1297 *self = self.map_ctxt(|mut ctxt| {
1298 mark = ctxt.adjust(expn_id);
1299 ctxt
1300 });
1301 mark
1302 }
1303
1304 #[inline]
1305 pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
1306 let mut mark = None;
1307 *self = self.map_ctxt(|mut ctxt| {
1308 mark = ctxt.normalize_to_macros_2_0_and_adjust(expn_id);
1309 ctxt
1310 });
1311 mark
1312 }
1313
1314 #[inline]
1315 pub fn normalize_to_macros_2_0(self) -> Span {
1316 self.map_ctxt(|ctxt| ctxt.normalize_to_macros_2_0())
1317 }
1318
1319 #[inline]
1320 pub fn normalize_to_macro_rules(self) -> Span {
1321 self.map_ctxt(|ctxt| ctxt.normalize_to_macro_rules())
1322 }
1323}
1324
1325impl Default for Span {
1326 fn default() -> Self {
1327 DUMMY_SP
1328 }
1329}
1330
1331impl ::std::fmt::Debug for AttrId {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
fmt.write_fmt(format_args!("AttrId({0})", self.as_u32()))
}
}rustc_index::newtype_index! {
1332 #[orderable]
1333 #[debug_format = "AttrId({})"]
1334 pub struct AttrId {}
1335}
1336
1337pub trait SpanEncoder: Encoder {
1340 fn encode_span(&mut self, span: Span);
1341 fn encode_symbol(&mut self, sym: Symbol);
1342 fn encode_byte_symbol(&mut self, byte_sym: ByteSymbol);
1343 fn encode_expn_id(&mut self, expn_id: ExpnId);
1344 fn encode_syntax_context(&mut self, syntax_context: SyntaxContext);
1345 fn encode_crate_num(&mut self, crate_num: CrateNum);
1348 fn encode_def_index(&mut self, def_index: DefIndex);
1349 fn encode_def_id(&mut self, def_id: DefId);
1350}
1351
1352impl SpanEncoder for FileEncoder {
1353 fn encode_span(&mut self, span: Span) {
1354 let span = span.data();
1355 span.lo.encode(self);
1356 span.hi.encode(self);
1357 }
1358
1359 fn encode_symbol(&mut self, sym: Symbol) {
1360 self.emit_str(sym.as_str());
1361 }
1362
1363 fn encode_byte_symbol(&mut self, byte_sym: ByteSymbol) {
1364 self.emit_byte_str(byte_sym.as_byte_str());
1365 }
1366
1367 fn encode_expn_id(&mut self, _expn_id: ExpnId) {
1368 {
::core::panicking::panic_fmt(format_args!("cannot encode `ExpnId` with `FileEncoder`"));
};panic!("cannot encode `ExpnId` with `FileEncoder`");
1369 }
1370
1371 fn encode_syntax_context(&mut self, _syntax_context: SyntaxContext) {
1372 {
::core::panicking::panic_fmt(format_args!("cannot encode `SyntaxContext` with `FileEncoder`"));
};panic!("cannot encode `SyntaxContext` with `FileEncoder`");
1373 }
1374
1375 fn encode_crate_num(&mut self, crate_num: CrateNum) {
1376 self.emit_u32(crate_num.as_u32());
1377 }
1378
1379 fn encode_def_index(&mut self, _def_index: DefIndex) {
1380 {
::core::panicking::panic_fmt(format_args!("cannot encode `DefIndex` with `FileEncoder`"));
};panic!("cannot encode `DefIndex` with `FileEncoder`");
1381 }
1382
1383 fn encode_def_id(&mut self, def_id: DefId) {
1384 def_id.krate.encode(self);
1385 def_id.index.encode(self);
1386 }
1387}
1388
1389impl<E: SpanEncoder> Encodable<E> for Span {
1390 fn encode(&self, s: &mut E) {
1391 s.encode_span(*self);
1392 }
1393}
1394
1395impl<E: SpanEncoder> Encodable<E> for Symbol {
1396 fn encode(&self, s: &mut E) {
1397 s.encode_symbol(*self);
1398 }
1399}
1400
1401impl<E: SpanEncoder> Encodable<E> for ByteSymbol {
1402 fn encode(&self, s: &mut E) {
1403 s.encode_byte_symbol(*self);
1404 }
1405}
1406
1407impl<E: SpanEncoder> Encodable<E> for ExpnId {
1408 fn encode(&self, s: &mut E) {
1409 s.encode_expn_id(*self)
1410 }
1411}
1412
1413impl<E: SpanEncoder> Encodable<E> for SyntaxContext {
1414 fn encode(&self, s: &mut E) {
1415 s.encode_syntax_context(*self)
1416 }
1417}
1418
1419impl<E: SpanEncoder> Encodable<E> for CrateNum {
1420 fn encode(&self, s: &mut E) {
1421 s.encode_crate_num(*self)
1422 }
1423}
1424
1425impl<E: SpanEncoder> Encodable<E> for DefIndex {
1426 fn encode(&self, s: &mut E) {
1427 s.encode_def_index(*self)
1428 }
1429}
1430
1431impl<E: SpanEncoder> Encodable<E> for DefId {
1432 fn encode(&self, s: &mut E) {
1433 s.encode_def_id(*self)
1434 }
1435}
1436
1437impl<E: SpanEncoder> Encodable<E> for AttrId {
1438 fn encode(&self, _s: &mut E) {
1439 }
1441}
1442
1443pub trait BlobDecoder: Decoder {
1444 fn decode_symbol(&mut self) -> Symbol;
1445 fn decode_byte_symbol(&mut self) -> ByteSymbol;
1446 fn decode_def_index(&mut self) -> DefIndex;
1447}
1448
1449pub trait SpanDecoder: BlobDecoder {
1466 fn decode_span(&mut self) -> Span;
1467 fn decode_expn_id(&mut self) -> ExpnId;
1468 fn decode_syntax_context(&mut self) -> SyntaxContext;
1469 fn decode_crate_num(&mut self) -> CrateNum;
1470 fn decode_def_id(&mut self) -> DefId;
1471 fn decode_attr_id(&mut self) -> AttrId;
1472}
1473
1474impl BlobDecoder for MemDecoder<'_> {
1475 fn decode_symbol(&mut self) -> Symbol {
1476 Symbol::intern(self.read_str())
1477 }
1478
1479 fn decode_byte_symbol(&mut self) -> ByteSymbol {
1480 ByteSymbol::intern(self.read_byte_str())
1481 }
1482
1483 fn decode_def_index(&mut self) -> DefIndex {
1484 {
::core::panicking::panic_fmt(format_args!("cannot decode `DefIndex` with `MemDecoder`"));
};panic!("cannot decode `DefIndex` with `MemDecoder`");
1485 }
1486}
1487
1488impl SpanDecoder for MemDecoder<'_> {
1489 fn decode_span(&mut self) -> Span {
1490 let lo = Decodable::decode(self);
1491 let hi = Decodable::decode(self);
1492
1493 Span::new(lo, hi, SyntaxContext::root(), None)
1494 }
1495
1496 fn decode_expn_id(&mut self) -> ExpnId {
1497 {
::core::panicking::panic_fmt(format_args!("cannot decode `ExpnId` with `MemDecoder`"));
};panic!("cannot decode `ExpnId` with `MemDecoder`");
1498 }
1499
1500 fn decode_syntax_context(&mut self) -> SyntaxContext {
1501 {
::core::panicking::panic_fmt(format_args!("cannot decode `SyntaxContext` with `MemDecoder`"));
};panic!("cannot decode `SyntaxContext` with `MemDecoder`");
1502 }
1503
1504 fn decode_crate_num(&mut self) -> CrateNum {
1505 CrateNum::from_u32(self.read_u32())
1506 }
1507
1508 fn decode_def_id(&mut self) -> DefId {
1509 DefId { krate: Decodable::decode(self), index: Decodable::decode(self) }
1510 }
1511
1512 fn decode_attr_id(&mut self) -> AttrId {
1513 {
::core::panicking::panic_fmt(format_args!("cannot decode `AttrId` with `MemDecoder`"));
};panic!("cannot decode `AttrId` with `MemDecoder`");
1514 }
1515}
1516
1517impl<D: SpanDecoder> Decodable<D> for Span {
1518 fn decode(s: &mut D) -> Span {
1519 s.decode_span()
1520 }
1521}
1522
1523impl<D: BlobDecoder> Decodable<D> for Symbol {
1524 fn decode(s: &mut D) -> Symbol {
1525 s.decode_symbol()
1526 }
1527}
1528
1529impl<D: BlobDecoder> Decodable<D> for ByteSymbol {
1530 fn decode(s: &mut D) -> ByteSymbol {
1531 s.decode_byte_symbol()
1532 }
1533}
1534
1535impl<D: SpanDecoder> Decodable<D> for ExpnId {
1536 fn decode(s: &mut D) -> ExpnId {
1537 s.decode_expn_id()
1538 }
1539}
1540
1541impl<D: SpanDecoder> Decodable<D> for SyntaxContext {
1542 fn decode(s: &mut D) -> SyntaxContext {
1543 s.decode_syntax_context()
1544 }
1545}
1546
1547impl<D: SpanDecoder> Decodable<D> for CrateNum {
1548 fn decode(s: &mut D) -> CrateNum {
1549 s.decode_crate_num()
1550 }
1551}
1552
1553impl<D: BlobDecoder> Decodable<D> for DefIndex {
1554 fn decode(s: &mut D) -> DefIndex {
1555 s.decode_def_index()
1556 }
1557}
1558
1559impl<D: SpanDecoder> Decodable<D> for DefId {
1560 fn decode(s: &mut D) -> DefId {
1561 s.decode_def_id()
1562 }
1563}
1564
1565impl<D: SpanDecoder> Decodable<D> for AttrId {
1566 fn decode(s: &mut D) -> AttrId {
1567 s.decode_attr_id()
1568 }
1569}
1570
1571impl fmt::Debug for Span {
1572 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1573 fn fallback(span: Span, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1577 f.debug_struct("Span")
1578 .field("lo", &span.lo())
1579 .field("hi", &span.hi())
1580 .field("ctxt", &span.ctxt())
1581 .finish()
1582 }
1583
1584 if SESSION_GLOBALS.is_set() {
1585 with_session_globals(|session_globals| {
1586 if let Some(source_map) = &session_globals.source_map {
1587 f.write_fmt(format_args!("{0} ({1:?})",
source_map.span_to_diagnostic_string(*self), self.ctxt()))write!(f, "{} ({:?})", source_map.span_to_diagnostic_string(*self), self.ctxt())
1588 } else {
1589 fallback(*self, f)
1590 }
1591 })
1592 } else {
1593 fallback(*self, f)
1594 }
1595 }
1596}
1597
1598impl fmt::Debug for SpanData {
1599 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1600 fmt::Debug::fmt(&self.span(), f)
1601 }
1602}
1603
1604#[derive(#[automatically_derived]
impl ::core::marker::Copy for MultiByteChar { }Copy, #[automatically_derived]
impl ::core::clone::Clone for MultiByteChar {
#[inline]
fn clone(&self) -> MultiByteChar {
let _: ::core::clone::AssertParamIsClone<RelativeBytePos>;
let _: ::core::clone::AssertParamIsClone<u8>;
*self
}
}Clone, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for MultiByteChar {
fn encode(&self, __encoder: &mut __E) {
match *self {
MultiByteChar { pos: ref __binding_0, bytes: ref __binding_1
} => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for MultiByteChar {
fn decode(__decoder: &mut __D) -> Self {
MultiByteChar {
pos: ::rustc_serialize::Decodable::decode(__decoder),
bytes: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, #[automatically_derived]
impl ::core::cmp::Eq for MultiByteChar {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<RelativeBytePos>;
let _: ::core::cmp::AssertParamIsEq<u8>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for MultiByteChar {
#[inline]
fn eq(&self, other: &MultiByteChar) -> bool {
self.bytes == other.bytes && self.pos == other.pos
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for MultiByteChar {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "MultiByteChar",
"pos", &self.pos, "bytes", &&self.bytes)
}
}Debug, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for MultiByteChar where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
MultiByteChar { pos: ref __binding_0, bytes: ref __binding_1
} => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
1606pub struct MultiByteChar {
1607 pub pos: RelativeBytePos,
1609 pub bytes: u8,
1611}
1612
1613#[derive(#[automatically_derived]
impl ::core::marker::Copy for NormalizedPos { }Copy, #[automatically_derived]
impl ::core::clone::Clone for NormalizedPos {
#[inline]
fn clone(&self) -> NormalizedPos {
let _: ::core::clone::AssertParamIsClone<RelativeBytePos>;
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
}Clone, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for NormalizedPos {
fn encode(&self, __encoder: &mut __E) {
match *self {
NormalizedPos { pos: ref __binding_0, diff: ref __binding_1
} => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for NormalizedPos {
fn decode(__decoder: &mut __D) -> Self {
NormalizedPos {
pos: ::rustc_serialize::Decodable::decode(__decoder),
diff: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable, #[automatically_derived]
impl ::core::cmp::Eq for NormalizedPos {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<RelativeBytePos>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for NormalizedPos {
#[inline]
fn eq(&self, other: &NormalizedPos) -> bool {
self.diff == other.diff && self.pos == other.pos
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for NormalizedPos {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "NormalizedPos",
"pos", &self.pos, "diff", &&self.diff)
}
}Debug, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for NormalizedPos where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
NormalizedPos { pos: ref __binding_0, diff: ref __binding_1
} => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
1615pub struct NormalizedPos {
1616 pub pos: RelativeBytePos,
1618 pub diff: u32,
1620}
1621
1622#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for ExternalSource {
#[inline]
fn eq(&self, other: &ExternalSource) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(ExternalSource::Foreign {
kind: __self_0, metadata_index: __self_1 },
ExternalSource::Foreign {
kind: __arg1_0, metadata_index: __arg1_1 }) =>
__self_1 == __arg1_1 && __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ExternalSource {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<ExternalSourceKind>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}Eq, #[automatically_derived]
impl ::core::clone::Clone for ExternalSource {
#[inline]
fn clone(&self) -> ExternalSource {
match self {
ExternalSource::Unneeded => ExternalSource::Unneeded,
ExternalSource::Foreign { kind: __self_0, metadata_index: __self_1
} =>
ExternalSource::Foreign {
kind: ::core::clone::Clone::clone(__self_0),
metadata_index: ::core::clone::Clone::clone(__self_1),
},
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ExternalSource {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
ExternalSource::Unneeded =>
::core::fmt::Formatter::write_str(f, "Unneeded"),
ExternalSource::Foreign { kind: __self_0, metadata_index: __self_1
} =>
::core::fmt::Formatter::debug_struct_field2_finish(f,
"Foreign", "kind", __self_0, "metadata_index", &__self_1),
}
}
}Debug)]
1623pub enum ExternalSource {
1624 Unneeded,
1626 Foreign {
1627 kind: ExternalSourceKind,
1628 metadata_index: u32,
1630 },
1631}
1632
1633#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for ExternalSourceKind {
#[inline]
fn eq(&self, other: &ExternalSourceKind) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(ExternalSourceKind::Present(__self_0),
ExternalSourceKind::Present(__arg1_0)) =>
__self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ExternalSourceKind {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Arc<String>>;
}
}Eq, #[automatically_derived]
impl ::core::clone::Clone for ExternalSourceKind {
#[inline]
fn clone(&self) -> ExternalSourceKind {
match self {
ExternalSourceKind::Present(__self_0) =>
ExternalSourceKind::Present(::core::clone::Clone::clone(__self_0)),
ExternalSourceKind::AbsentOk => ExternalSourceKind::AbsentOk,
ExternalSourceKind::AbsentErr => ExternalSourceKind::AbsentErr,
}
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for ExternalSourceKind {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
ExternalSourceKind::Present(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Present", &__self_0),
ExternalSourceKind::AbsentOk =>
::core::fmt::Formatter::write_str(f, "AbsentOk"),
ExternalSourceKind::AbsentErr =>
::core::fmt::Formatter::write_str(f, "AbsentErr"),
}
}
}Debug)]
1635pub enum ExternalSourceKind {
1636 Present(Arc<String>),
1638 AbsentOk,
1640 AbsentErr,
1642}
1643
1644impl ExternalSource {
1645 pub fn get_source(&self) -> Option<&str> {
1646 match self {
1647 ExternalSource::Foreign { kind: ExternalSourceKind::Present(src), .. } => Some(src),
1648 _ => None,
1649 }
1650 }
1651}
1652
1653#[derive(#[automatically_derived]
impl ::core::fmt::Debug for OffsetOverflowError {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f, "OffsetOverflowError")
}
}Debug)]
1654pub struct OffsetOverflowError;
1655
1656#[derive(#[automatically_derived]
impl ::core::marker::Copy for SourceFileHashAlgorithm { }Copy, #[automatically_derived]
impl ::core::clone::Clone for SourceFileHashAlgorithm {
#[inline]
fn clone(&self) -> SourceFileHashAlgorithm { *self }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for SourceFileHashAlgorithm {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
SourceFileHashAlgorithm::Md5 => "Md5",
SourceFileHashAlgorithm::Sha1 => "Sha1",
SourceFileHashAlgorithm::Sha256 => "Sha256",
SourceFileHashAlgorithm::Blake3 => "Blake3",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for SourceFileHashAlgorithm {
#[inline]
fn eq(&self, other: &SourceFileHashAlgorithm) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SourceFileHashAlgorithm {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialOrd for SourceFileHashAlgorithm {
#[inline]
fn partial_cmp(&self, other: &SourceFileHashAlgorithm)
-> ::core::option::Option<::core::cmp::Ordering> {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
::core::cmp::PartialOrd::partial_cmp(&__self_discr, &__arg1_discr)
}
}PartialOrd, #[automatically_derived]
impl ::core::cmp::Ord for SourceFileHashAlgorithm {
#[inline]
fn cmp(&self, other: &SourceFileHashAlgorithm) -> ::core::cmp::Ordering {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
::core::cmp::Ord::cmp(&__self_discr, &__arg1_discr)
}
}Ord, #[automatically_derived]
impl ::core::hash::Hash for SourceFileHashAlgorithm {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state)
}
}Hash, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for SourceFileHashAlgorithm {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
SourceFileHashAlgorithm::Md5 => { 0usize }
SourceFileHashAlgorithm::Sha1 => { 1usize }
SourceFileHashAlgorithm::Sha256 => { 2usize }
SourceFileHashAlgorithm::Blake3 => { 3usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
SourceFileHashAlgorithm::Md5 => {}
SourceFileHashAlgorithm::Sha1 => {}
SourceFileHashAlgorithm::Sha256 => {}
SourceFileHashAlgorithm::Blake3 => {}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for SourceFileHashAlgorithm {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => { SourceFileHashAlgorithm::Md5 }
1usize => { SourceFileHashAlgorithm::Sha1 }
2usize => { SourceFileHashAlgorithm::Sha256 }
3usize => { SourceFileHashAlgorithm::Blake3 }
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `SourceFileHashAlgorithm`, expected 0..4, actual {0}",
n));
}
}
}
}
};Decodable)]
1657#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for SourceFileHashAlgorithm where __CTX: crate::HashStableContext
{
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
SourceFileHashAlgorithm::Md5 => {}
SourceFileHashAlgorithm::Sha1 => {}
SourceFileHashAlgorithm::Sha256 => {}
SourceFileHashAlgorithm::Blake3 => {}
}
}
}
};HashStable_Generic)]
1658pub enum SourceFileHashAlgorithm {
1659 Md5,
1660 Sha1,
1661 Sha256,
1662 Blake3,
1663}
1664
1665impl Display for SourceFileHashAlgorithm {
1666 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1667 f.write_str(match self {
1668 Self::Md5 => "md5",
1669 Self::Sha1 => "sha1",
1670 Self::Sha256 => "sha256",
1671 Self::Blake3 => "blake3",
1672 })
1673 }
1674}
1675
1676impl FromStr for SourceFileHashAlgorithm {
1677 type Err = ();
1678
1679 fn from_str(s: &str) -> Result<SourceFileHashAlgorithm, ()> {
1680 match s {
1681 "md5" => Ok(SourceFileHashAlgorithm::Md5),
1682 "sha1" => Ok(SourceFileHashAlgorithm::Sha1),
1683 "sha256" => Ok(SourceFileHashAlgorithm::Sha256),
1684 "blake3" => Ok(SourceFileHashAlgorithm::Blake3),
1685 _ => Err(()),
1686 }
1687 }
1688}
1689
1690#[derive(#[automatically_derived]
impl ::core::marker::Copy for SourceFileHash { }Copy, #[automatically_derived]
impl ::core::clone::Clone for SourceFileHash {
#[inline]
fn clone(&self) -> SourceFileHash {
let _: ::core::clone::AssertParamIsClone<SourceFileHashAlgorithm>;
let _: ::core::clone::AssertParamIsClone<[u8; 32]>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for SourceFileHash {
#[inline]
fn eq(&self, other: &SourceFileHash) -> bool {
self.kind == other.kind && self.value == other.value
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SourceFileHash {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<SourceFileHashAlgorithm>;
let _: ::core::cmp::AssertParamIsEq<[u8; 32]>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for SourceFileHash {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"SourceFileHash", "kind", &self.kind, "value", &&self.value)
}
}Debug, #[automatically_derived]
impl ::core::hash::Hash for SourceFileHash {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.kind, state);
::core::hash::Hash::hash(&self.value, state)
}
}Hash)]
1692#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for SourceFileHash where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
SourceFileHash {
kind: ref __binding_0, value: ref __binding_1 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for SourceFileHash {
fn encode(&self, __encoder: &mut __E) {
match *self {
SourceFileHash {
kind: ref __binding_0, value: ref __binding_1 } => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
::rustc_serialize::Encodable::<__E>::encode(__binding_1,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for SourceFileHash {
fn decode(__decoder: &mut __D) -> Self {
SourceFileHash {
kind: ::rustc_serialize::Decodable::decode(__decoder),
value: ::rustc_serialize::Decodable::decode(__decoder),
}
}
}
};Decodable)]
1693pub struct SourceFileHash {
1694 pub kind: SourceFileHashAlgorithm,
1695 value: [u8; 32],
1696}
1697
1698impl Display for SourceFileHash {
1699 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1700 f.write_fmt(format_args!("{0}=", self.kind))write!(f, "{}=", self.kind)?;
1701 for byte in self.value[0..self.hash_len()].into_iter() {
1702 f.write_fmt(format_args!("{0:02x}", byte))write!(f, "{byte:02x}")?;
1703 }
1704 Ok(())
1705 }
1706}
1707
1708impl SourceFileHash {
1709 pub fn new_in_memory(kind: SourceFileHashAlgorithm, src: impl AsRef<[u8]>) -> SourceFileHash {
1710 let mut hash = SourceFileHash { kind, value: Default::default() };
1711 let len = hash.hash_len();
1712 let value = &mut hash.value[..len];
1713 let data = src.as_ref();
1714 match kind {
1715 SourceFileHashAlgorithm::Md5 => {
1716 value.copy_from_slice(&Md5::digest(data));
1717 }
1718 SourceFileHashAlgorithm::Sha1 => {
1719 value.copy_from_slice(&Sha1::digest(data));
1720 }
1721 SourceFileHashAlgorithm::Sha256 => {
1722 value.copy_from_slice(&Sha256::digest(data));
1723 }
1724 SourceFileHashAlgorithm::Blake3 => value.copy_from_slice(blake3::hash(data).as_bytes()),
1725 };
1726 hash
1727 }
1728
1729 pub fn new(kind: SourceFileHashAlgorithm, src: impl Read) -> Result<SourceFileHash, io::Error> {
1730 let mut hash = SourceFileHash { kind, value: Default::default() };
1731 let len = hash.hash_len();
1732 let value = &mut hash.value[..len];
1733 let mut buf = ::alloc::vec::from_elem(0, 16 * 1024)vec![0; 16 * 1024];
1736
1737 fn digest<T>(
1738 mut hasher: T,
1739 mut update: impl FnMut(&mut T, &[u8]),
1740 finish: impl FnOnce(T, &mut [u8]),
1741 mut src: impl Read,
1742 buf: &mut [u8],
1743 value: &mut [u8],
1744 ) -> Result<(), io::Error> {
1745 loop {
1746 let bytes_read = src.read(buf)?;
1747 if bytes_read == 0 {
1748 break;
1749 }
1750 update(&mut hasher, &buf[0..bytes_read]);
1751 }
1752 finish(hasher, value);
1753 Ok(())
1754 }
1755
1756 match kind {
1757 SourceFileHashAlgorithm::Sha256 => {
1758 digest(
1759 Sha256::new(),
1760 |h, b| {
1761 h.update(b);
1762 },
1763 |h, out| out.copy_from_slice(&h.finalize()),
1764 src,
1765 &mut buf,
1766 value,
1767 )?;
1768 }
1769 SourceFileHashAlgorithm::Sha1 => {
1770 digest(
1771 Sha1::new(),
1772 |h, b| {
1773 h.update(b);
1774 },
1775 |h, out| out.copy_from_slice(&h.finalize()),
1776 src,
1777 &mut buf,
1778 value,
1779 )?;
1780 }
1781 SourceFileHashAlgorithm::Md5 => {
1782 digest(
1783 Md5::new(),
1784 |h, b| {
1785 h.update(b);
1786 },
1787 |h, out| out.copy_from_slice(&h.finalize()),
1788 src,
1789 &mut buf,
1790 value,
1791 )?;
1792 }
1793 SourceFileHashAlgorithm::Blake3 => {
1794 digest(
1795 blake3::Hasher::new(),
1796 |h, b| {
1797 h.update(b);
1798 },
1799 |h, out| out.copy_from_slice(h.finalize().as_bytes()),
1800 src,
1801 &mut buf,
1802 value,
1803 )?;
1804 }
1805 }
1806 Ok(hash)
1807 }
1808
1809 pub fn matches(&self, src: &str) -> bool {
1811 Self::new_in_memory(self.kind, src.as_bytes()) == *self
1812 }
1813
1814 pub fn hash_bytes(&self) -> &[u8] {
1816 let len = self.hash_len();
1817 &self.value[..len]
1818 }
1819
1820 fn hash_len(&self) -> usize {
1821 match self.kind {
1822 SourceFileHashAlgorithm::Md5 => 16,
1823 SourceFileHashAlgorithm::Sha1 => 20,
1824 SourceFileHashAlgorithm::Sha256 | SourceFileHashAlgorithm::Blake3 => 32,
1825 }
1826 }
1827}
1828
1829#[derive(#[automatically_derived]
impl ::core::clone::Clone for SourceFileLines {
#[inline]
fn clone(&self) -> SourceFileLines {
match self {
SourceFileLines::Lines(__self_0) =>
SourceFileLines::Lines(::core::clone::Clone::clone(__self_0)),
SourceFileLines::Diffs(__self_0) =>
SourceFileLines::Diffs(::core::clone::Clone::clone(__self_0)),
}
}
}Clone)]
1830pub enum SourceFileLines {
1831 Lines(Vec<RelativeBytePos>),
1833
1834 Diffs(SourceFileDiffs),
1836}
1837
1838impl SourceFileLines {
1839 pub fn is_lines(&self) -> bool {
1840 #[allow(non_exhaustive_omitted_patterns)] match self {
SourceFileLines::Lines(_) => true,
_ => false,
}matches!(self, SourceFileLines::Lines(_))
1841 }
1842}
1843
1844#[derive(#[automatically_derived]
impl ::core::clone::Clone for SourceFileDiffs {
#[inline]
fn clone(&self) -> SourceFileDiffs {
SourceFileDiffs {
bytes_per_diff: ::core::clone::Clone::clone(&self.bytes_per_diff),
num_diffs: ::core::clone::Clone::clone(&self.num_diffs),
raw_diffs: ::core::clone::Clone::clone(&self.raw_diffs),
}
}
}Clone)]
1852pub struct SourceFileDiffs {
1853 bytes_per_diff: usize,
1857
1858 num_diffs: usize,
1861
1862 raw_diffs: Vec<u8>,
1868}
1869
1870pub struct SourceFile {
1872 pub name: FileName,
1876 pub src: Option<Arc<String>>,
1878 pub src_hash: SourceFileHash,
1880 pub checksum_hash: Option<SourceFileHash>,
1884 pub external_src: FreezeLock<ExternalSource>,
1887 pub start_pos: BytePos,
1889 pub normalized_source_len: RelativeBytePos,
1891 pub unnormalized_source_len: u32,
1893 pub lines: FreezeLock<SourceFileLines>,
1895 pub multibyte_chars: Vec<MultiByteChar>,
1897 pub normalized_pos: Vec<NormalizedPos>,
1899 pub stable_id: StableSourceFileId,
1903 pub cnum: CrateNum,
1905}
1906
1907impl Clone for SourceFile {
1908 fn clone(&self) -> Self {
1909 Self {
1910 name: self.name.clone(),
1911 src: self.src.clone(),
1912 src_hash: self.src_hash,
1913 checksum_hash: self.checksum_hash,
1914 external_src: self.external_src.clone(),
1915 start_pos: self.start_pos,
1916 normalized_source_len: self.normalized_source_len,
1917 unnormalized_source_len: self.unnormalized_source_len,
1918 lines: self.lines.clone(),
1919 multibyte_chars: self.multibyte_chars.clone(),
1920 normalized_pos: self.normalized_pos.clone(),
1921 stable_id: self.stable_id,
1922 cnum: self.cnum,
1923 }
1924 }
1925}
1926
1927impl<S: SpanEncoder> Encodable<S> for SourceFile {
1928 fn encode(&self, s: &mut S) {
1929 self.name.encode(s);
1930 self.src_hash.encode(s);
1931 self.checksum_hash.encode(s);
1932 self.normalized_source_len.encode(s);
1934 self.unnormalized_source_len.encode(s);
1935
1936 if !self.lines.read().is_lines() {
::core::panicking::panic("assertion failed: self.lines.read().is_lines()")
};assert!(self.lines.read().is_lines());
1938 let lines = self.lines();
1939 s.emit_u32(lines.len() as u32);
1941
1942 if lines.len() != 0 {
1944 let max_line_length = if lines.len() == 1 {
1945 0
1946 } else {
1947 lines
1948 .array_windows()
1949 .map(|&[fst, snd]| snd - fst)
1950 .map(|bp| bp.to_usize())
1951 .max()
1952 .unwrap()
1953 };
1954
1955 let bytes_per_diff: usize = match max_line_length {
1956 0..=0xFF => 1,
1957 0x100..=0xFFFF => 2,
1958 _ => 4,
1959 };
1960
1961 s.emit_u8(bytes_per_diff as u8);
1963
1964 match (&lines[0], &RelativeBytePos(0)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(lines[0], RelativeBytePos(0));
1966
1967 let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
1969 let num_diffs = lines.len() - 1;
1970 let mut raw_diffs;
1971 match bytes_per_diff {
1972 1 => {
1973 raw_diffs = Vec::with_capacity(num_diffs);
1974 for diff in diff_iter {
1975 raw_diffs.push(diff.0 as u8);
1976 }
1977 }
1978 2 => {
1979 raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
1980 for diff in diff_iter {
1981 raw_diffs.extend_from_slice(&(diff.0 as u16).to_le_bytes());
1982 }
1983 }
1984 4 => {
1985 raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
1986 for diff in diff_iter {
1987 raw_diffs.extend_from_slice(&(diff.0).to_le_bytes());
1988 }
1989 }
1990 _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
1991 }
1992 s.emit_raw_bytes(&raw_diffs);
1993 }
1994
1995 self.multibyte_chars.encode(s);
1996 self.stable_id.encode(s);
1997 self.normalized_pos.encode(s);
1998 self.cnum.encode(s);
1999 }
2000}
2001
2002impl<D: SpanDecoder> Decodable<D> for SourceFile {
2003 fn decode(d: &mut D) -> SourceFile {
2004 let name: FileName = Decodable::decode(d);
2005 let src_hash: SourceFileHash = Decodable::decode(d);
2006 let checksum_hash: Option<SourceFileHash> = Decodable::decode(d);
2007 let normalized_source_len: RelativeBytePos = Decodable::decode(d);
2008 let unnormalized_source_len = Decodable::decode(d);
2009 let lines = {
2010 let num_lines: u32 = Decodable::decode(d);
2011 if num_lines > 0 {
2012 let bytes_per_diff = d.read_u8() as usize;
2014
2015 let num_diffs = num_lines as usize - 1;
2017 let raw_diffs = d.read_raw_bytes(bytes_per_diff * num_diffs).to_vec();
2018 SourceFileLines::Diffs(SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs })
2019 } else {
2020 SourceFileLines::Lines(::alloc::vec::Vec::new()vec![])
2021 }
2022 };
2023 let multibyte_chars: Vec<MultiByteChar> = Decodable::decode(d);
2024 let stable_id = Decodable::decode(d);
2025 let normalized_pos: Vec<NormalizedPos> = Decodable::decode(d);
2026 let cnum: CrateNum = Decodable::decode(d);
2027 SourceFile {
2028 name,
2029 start_pos: BytePos::from_u32(0),
2030 normalized_source_len,
2031 unnormalized_source_len,
2032 src: None,
2033 src_hash,
2034 checksum_hash,
2035 external_src: FreezeLock::frozen(ExternalSource::Unneeded),
2038 lines: FreezeLock::new(lines),
2039 multibyte_chars,
2040 normalized_pos,
2041 stable_id,
2042 cnum,
2043 }
2044 }
2045}
2046
2047impl fmt::Debug for SourceFile {
2048 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
2049 fmt.write_fmt(format_args!("SourceFile({0:?})", self.name))write!(fmt, "SourceFile({:?})", self.name)
2050 }
2051}
2052
2053#[derive(
2075 #[automatically_derived]
impl ::core::fmt::Debug for StableSourceFileId {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"StableSourceFileId", &&self.0)
}
}Debug,
2076 #[automatically_derived]
impl ::core::clone::Clone for StableSourceFileId {
#[inline]
fn clone(&self) -> StableSourceFileId {
let _: ::core::clone::AssertParamIsClone<Hash128>;
*self
}
}Clone,
2077 #[automatically_derived]
impl ::core::marker::Copy for StableSourceFileId { }Copy,
2078 #[automatically_derived]
impl ::core::hash::Hash for StableSourceFileId {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.0, state)
}
}Hash,
2079 #[automatically_derived]
impl ::core::cmp::PartialEq for StableSourceFileId {
#[inline]
fn eq(&self, other: &StableSourceFileId) -> bool { self.0 == other.0 }
}PartialEq,
2080 #[automatically_derived]
impl ::core::cmp::Eq for StableSourceFileId {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Hash128>;
}
}Eq,
2081 const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for StableSourceFileId where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
StableSourceFileId(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic,
2082 const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for StableSourceFileId {
fn encode(&self, __encoder: &mut __E) {
match *self {
StableSourceFileId(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable,
2083 const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for StableSourceFileId {
fn decode(__decoder: &mut __D) -> Self {
StableSourceFileId(::rustc_serialize::Decodable::decode(__decoder))
}
}
};Decodable,
2084 #[automatically_derived]
impl ::core::default::Default for StableSourceFileId {
#[inline]
fn default() -> StableSourceFileId {
StableSourceFileId(::core::default::Default::default())
}
}Default,
2085 #[automatically_derived]
impl ::core::cmp::PartialOrd for StableSourceFileId {
#[inline]
fn partial_cmp(&self, other: &StableSourceFileId)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}PartialOrd,
2086 #[automatically_derived]
impl ::core::cmp::Ord for StableSourceFileId {
#[inline]
fn cmp(&self, other: &StableSourceFileId) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}Ord
2087)]
2088pub struct StableSourceFileId(Hash128);
2089
2090impl StableSourceFileId {
2091 fn from_filename_in_current_crate(filename: &FileName) -> Self {
2092 Self::from_filename_and_stable_crate_id(filename, None)
2093 }
2094
2095 pub fn from_filename_for_export(
2096 filename: &FileName,
2097 local_crate_stable_crate_id: StableCrateId,
2098 ) -> Self {
2099 Self::from_filename_and_stable_crate_id(filename, Some(local_crate_stable_crate_id))
2100 }
2101
2102 fn from_filename_and_stable_crate_id(
2103 filename: &FileName,
2104 stable_crate_id: Option<StableCrateId>,
2105 ) -> Self {
2106 let mut hasher = StableHasher::new();
2107 filename.hash(&mut hasher);
2108 stable_crate_id.hash(&mut hasher);
2109 StableSourceFileId(hasher.finish())
2110 }
2111}
2112
2113impl SourceFile {
2114 const MAX_FILE_SIZE: u32 = u32::MAX - 1;
2115
2116 pub fn new(
2117 name: FileName,
2118 mut src: String,
2119 hash_kind: SourceFileHashAlgorithm,
2120 checksum_hash_kind: Option<SourceFileHashAlgorithm>,
2121 ) -> Result<Self, OffsetOverflowError> {
2122 let src_hash = SourceFileHash::new_in_memory(hash_kind, src.as_bytes());
2124 let checksum_hash = checksum_hash_kind.map(|checksum_hash_kind| {
2125 if checksum_hash_kind == hash_kind {
2126 src_hash
2127 } else {
2128 SourceFileHash::new_in_memory(checksum_hash_kind, src.as_bytes())
2129 }
2130 });
2131 let unnormalized_source_len = u32::try_from(src.len()).map_err(|_| OffsetOverflowError)?;
2133 if unnormalized_source_len > Self::MAX_FILE_SIZE {
2134 return Err(OffsetOverflowError);
2135 }
2136
2137 let normalized_pos = normalize_src(&mut src);
2138
2139 let stable_id = StableSourceFileId::from_filename_in_current_crate(&name);
2140 let normalized_source_len = u32::try_from(src.len()).map_err(|_| OffsetOverflowError)?;
2141 if normalized_source_len > Self::MAX_FILE_SIZE {
2142 return Err(OffsetOverflowError);
2143 }
2144
2145 let (lines, multibyte_chars) = analyze_source_file::analyze_source_file(&src);
2146
2147 Ok(SourceFile {
2148 name,
2149 src: Some(Arc::new(src)),
2150 src_hash,
2151 checksum_hash,
2152 external_src: FreezeLock::frozen(ExternalSource::Unneeded),
2153 start_pos: BytePos::from_u32(0),
2154 normalized_source_len: RelativeBytePos::from_u32(normalized_source_len),
2155 unnormalized_source_len,
2156 lines: FreezeLock::frozen(SourceFileLines::Lines(lines)),
2157 multibyte_chars,
2158 normalized_pos,
2159 stable_id,
2160 cnum: LOCAL_CRATE,
2161 })
2162 }
2163
2164 fn convert_diffs_to_lines_frozen(&self) {
2167 let mut guard = if let Some(guard) = self.lines.try_write() { guard } else { return };
2168
2169 let SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs } = match &*guard {
2170 SourceFileLines::Diffs(diffs) => diffs,
2171 SourceFileLines::Lines(..) => {
2172 FreezeWriteGuard::freeze(guard);
2173 return;
2174 }
2175 };
2176
2177 let num_lines = num_diffs + 1;
2179 let mut lines = Vec::with_capacity(num_lines);
2180 let mut line_start = RelativeBytePos(0);
2181 lines.push(line_start);
2182
2183 match (&*num_diffs, &(raw_diffs.len() / bytes_per_diff)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(*num_diffs, raw_diffs.len() / bytes_per_diff);
2184 match bytes_per_diff {
2185 1 => {
2186 lines.extend(raw_diffs.into_iter().map(|&diff| {
2187 line_start = line_start + RelativeBytePos(diff as u32);
2188 line_start
2189 }));
2190 }
2191 2 => {
2192 lines.extend((0..*num_diffs).map(|i| {
2193 let pos = bytes_per_diff * i;
2194 let bytes = [raw_diffs[pos], raw_diffs[pos + 1]];
2195 let diff = u16::from_le_bytes(bytes);
2196 line_start = line_start + RelativeBytePos(diff as u32);
2197 line_start
2198 }));
2199 }
2200 4 => {
2201 lines.extend((0..*num_diffs).map(|i| {
2202 let pos = bytes_per_diff * i;
2203 let bytes = [
2204 raw_diffs[pos],
2205 raw_diffs[pos + 1],
2206 raw_diffs[pos + 2],
2207 raw_diffs[pos + 3],
2208 ];
2209 let diff = u32::from_le_bytes(bytes);
2210 line_start = line_start + RelativeBytePos(diff);
2211 line_start
2212 }));
2213 }
2214 _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
2215 }
2216
2217 *guard = SourceFileLines::Lines(lines);
2218
2219 FreezeWriteGuard::freeze(guard);
2220 }
2221
2222 pub fn lines(&self) -> &[RelativeBytePos] {
2223 if let Some(SourceFileLines::Lines(lines)) = self.lines.get() {
2224 return &lines[..];
2225 }
2226
2227 outline(|| {
2228 self.convert_diffs_to_lines_frozen();
2229 if let Some(SourceFileLines::Lines(lines)) = self.lines.get() {
2230 return &lines[..];
2231 }
2232 ::core::panicking::panic("internal error: entered unreachable code")unreachable!()
2233 })
2234 }
2235
2236 pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
2238 let pos = self.relative_position(pos);
2239 let line_index = self.lookup_line(pos).unwrap();
2240 let line_start_pos = self.lines()[line_index];
2241 self.absolute_position(line_start_pos)
2242 }
2243
2244 pub fn add_external_src<F>(&self, get_src: F) -> bool
2249 where
2250 F: FnOnce() -> Option<String>,
2251 {
2252 if !self.external_src.is_frozen() {
2253 let src = get_src();
2254 let src = src.and_then(|mut src| {
2255 self.src_hash.matches(&src).then(|| {
2257 normalize_src(&mut src);
2258 src
2259 })
2260 });
2261
2262 self.external_src.try_write().map(|mut external_src| {
2263 if let ExternalSource::Foreign {
2264 kind: src_kind @ ExternalSourceKind::AbsentOk,
2265 ..
2266 } = &mut *external_src
2267 {
2268 *src_kind = if let Some(src) = src {
2269 ExternalSourceKind::Present(Arc::new(src))
2270 } else {
2271 ExternalSourceKind::AbsentErr
2272 };
2273 } else {
2274 {
::core::panicking::panic_fmt(format_args!("unexpected state {0:?}",
*external_src));
}panic!("unexpected state {:?}", *external_src)
2275 }
2276
2277 FreezeWriteGuard::freeze(external_src)
2279 });
2280 }
2281
2282 self.src.is_some() || self.external_src.read().get_source().is_some()
2283 }
2284
2285 pub fn get_line(&self, line_number: usize) -> Option<Cow<'_, str>> {
2288 fn get_until_newline(src: &str, begin: usize) -> &str {
2289 let slice = &src[begin..];
2293 match slice.find('\n') {
2294 Some(e) => &slice[..e],
2295 None => slice,
2296 }
2297 }
2298
2299 let begin = {
2300 let line = self.lines().get(line_number).copied()?;
2301 line.to_usize()
2302 };
2303
2304 if let Some(ref src) = self.src {
2305 Some(Cow::from(get_until_newline(src, begin)))
2306 } else {
2307 self.external_src
2308 .borrow()
2309 .get_source()
2310 .map(|src| Cow::Owned(String::from(get_until_newline(src, begin))))
2311 }
2312 }
2313
2314 pub fn is_real_file(&self) -> bool {
2315 self.name.is_real()
2316 }
2317
2318 #[inline]
2319 pub fn is_imported(&self) -> bool {
2320 self.src.is_none()
2321 }
2322
2323 pub fn count_lines(&self) -> usize {
2324 self.lines().len()
2325 }
2326
2327 #[inline]
2328 pub fn absolute_position(&self, pos: RelativeBytePos) -> BytePos {
2329 BytePos::from_u32(pos.to_u32() + self.start_pos.to_u32())
2330 }
2331
2332 #[inline]
2333 pub fn relative_position(&self, pos: BytePos) -> RelativeBytePos {
2334 RelativeBytePos::from_u32(pos.to_u32() - self.start_pos.to_u32())
2335 }
2336
2337 #[inline]
2338 pub fn end_position(&self) -> BytePos {
2339 self.absolute_position(self.normalized_source_len)
2340 }
2341
2342 pub fn lookup_line(&self, pos: RelativeBytePos) -> Option<usize> {
2347 self.lines().partition_point(|x| x <= &pos).checked_sub(1)
2348 }
2349
2350 pub fn line_bounds(&self, line_index: usize) -> Range<BytePos> {
2351 if self.is_empty() {
2352 return self.start_pos..self.start_pos;
2353 }
2354
2355 let lines = self.lines();
2356 if !(line_index < lines.len()) {
::core::panicking::panic("assertion failed: line_index < lines.len()")
};assert!(line_index < lines.len());
2357 if line_index == (lines.len() - 1) {
2358 self.absolute_position(lines[line_index])..self.end_position()
2359 } else {
2360 self.absolute_position(lines[line_index])..self.absolute_position(lines[line_index + 1])
2361 }
2362 }
2363
2364 #[inline]
2369 pub fn contains(&self, byte_pos: BytePos) -> bool {
2370 byte_pos >= self.start_pos && byte_pos <= self.end_position()
2371 }
2372
2373 #[inline]
2374 pub fn is_empty(&self) -> bool {
2375 self.normalized_source_len.to_u32() == 0
2376 }
2377
2378 pub fn original_relative_byte_pos(&self, pos: BytePos) -> RelativeBytePos {
2381 let pos = self.relative_position(pos);
2382
2383 let diff = match self.normalized_pos.binary_search_by(|np| np.pos.cmp(&pos)) {
2387 Ok(i) => self.normalized_pos[i].diff,
2388 Err(0) => 0,
2389 Err(i) => self.normalized_pos[i - 1].diff,
2390 };
2391
2392 RelativeBytePos::from_u32(pos.0 + diff)
2393 }
2394
2395 pub fn normalized_byte_pos(&self, offset: u32) -> BytePos {
2405 let diff = match self
2406 .normalized_pos
2407 .binary_search_by(|np| (np.pos.0 + np.diff).cmp(&(self.start_pos.0 + offset)))
2408 {
2409 Ok(i) => self.normalized_pos[i].diff,
2410 Err(0) => 0,
2411 Err(i) => self.normalized_pos[i - 1].diff,
2412 };
2413
2414 BytePos::from_u32(self.start_pos.0 + offset - diff)
2415 }
2416
2417 fn bytepos_to_file_charpos(&self, bpos: RelativeBytePos) -> CharPos {
2419 let mut total_extra_bytes = 0;
2421
2422 for mbc in self.multibyte_chars.iter() {
2423 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2423",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2423u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0}-byte char at {1:?}",
mbc.bytes, mbc.pos) as &dyn Value))])
});
} else { ; }
};debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
2424 if mbc.pos < bpos {
2425 total_extra_bytes += mbc.bytes as u32 - 1;
2428 if !(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32) {
::core::panicking::panic("assertion failed: bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32")
};assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
2431 } else {
2432 break;
2433 }
2434 }
2435
2436 if !(total_extra_bytes <= bpos.to_u32()) {
::core::panicking::panic("assertion failed: total_extra_bytes <= bpos.to_u32()")
};assert!(total_extra_bytes <= bpos.to_u32());
2437 CharPos(bpos.to_usize() - total_extra_bytes as usize)
2438 }
2439
2440 fn lookup_file_pos(&self, pos: RelativeBytePos) -> (usize, CharPos) {
2443 let chpos = self.bytepos_to_file_charpos(pos);
2444 match self.lookup_line(pos) {
2445 Some(a) => {
2446 let line = a + 1; let linebpos = self.lines()[a];
2448 let linechpos = self.bytepos_to_file_charpos(linebpos);
2449 let col = chpos - linechpos;
2450 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2450",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2450u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("byte pos {0:?} is on the line at byte pos {1:?}",
pos, linebpos) as &dyn Value))])
});
} else { ; }
};debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos);
2451 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2451",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2451u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("char pos {0:?} is on the line at char pos {1:?}",
chpos, linechpos) as &dyn Value))])
});
} else { ; }
};debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos);
2452 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2452",
"rustc_span", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2452u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("byte is on line: {0}",
line) as &dyn Value))])
});
} else { ; }
};debug!("byte is on line: {}", line);
2453 if !(chpos >= linechpos) {
::core::panicking::panic("assertion failed: chpos >= linechpos")
};assert!(chpos >= linechpos);
2454 (line, col)
2455 }
2456 None => (0, chpos),
2457 }
2458 }
2459
2460 pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) {
2463 let pos = self.relative_position(pos);
2464 let (line, col_or_chpos) = self.lookup_file_pos(pos);
2465 if line > 0 {
2466 let Some(code) = self.get_line(line - 1) else {
2467 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_span/src/lib.rs:2474",
"rustc_span", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_span/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(2474u32),
::tracing_core::__macro_support::Option::Some("rustc_span"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("couldn\'t find line {1} {0:?}",
self.name, line) as &dyn Value))])
});
} else { ; }
};tracing::info!("couldn't find line {line} {:?}", self.name);
2475 return (line, col_or_chpos, col_or_chpos.0);
2476 };
2477 let display_col = code.chars().take(col_or_chpos.0).map(|ch| char_width(ch)).sum();
2478 (line, col_or_chpos, display_col)
2479 } else {
2480 (0, col_or_chpos, col_or_chpos.0)
2482 }
2483 }
2484}
2485
2486pub fn char_width(ch: char) -> usize {
2487 match ch {
2490 '\t' => 4,
2491 '\u{0000}' | '\u{0001}' | '\u{0002}' | '\u{0003}' | '\u{0004}' | '\u{0005}'
2495 | '\u{0006}' | '\u{0007}' | '\u{0008}' | '\u{000B}' | '\u{000C}' | '\u{000D}'
2496 | '\u{000E}' | '\u{000F}' | '\u{0010}' | '\u{0011}' | '\u{0012}' | '\u{0013}'
2497 | '\u{0014}' | '\u{0015}' | '\u{0016}' | '\u{0017}' | '\u{0018}' | '\u{0019}'
2498 | '\u{001A}' | '\u{001B}' | '\u{001C}' | '\u{001D}' | '\u{001E}' | '\u{001F}'
2499 | '\u{007F}' | '\u{202A}' | '\u{202B}' | '\u{202D}' | '\u{202E}' | '\u{2066}'
2500 | '\u{2067}' | '\u{2068}' | '\u{202C}' | '\u{2069}' => 1,
2501 _ => unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1),
2502 }
2503}
2504
2505pub fn str_width(s: &str) -> usize {
2506 s.chars().map(char_width).sum()
2507}
2508
2509fn normalize_src(src: &mut String) -> Vec<NormalizedPos> {
2511 let mut normalized_pos = ::alloc::vec::Vec::new()vec![];
2512 remove_bom(src, &mut normalized_pos);
2513 normalize_newlines(src, &mut normalized_pos);
2514 normalized_pos
2515}
2516
2517fn remove_bom(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
2519 if src.starts_with('\u{feff}') {
2520 src.drain(..3);
2521 normalized_pos.push(NormalizedPos { pos: RelativeBytePos(0), diff: 3 });
2522 }
2523}
2524
2525fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
2529 if !src.as_bytes().contains(&b'\r') {
2530 return;
2531 }
2532
2533 let mut buf = std::mem::replace(src, String::new()).into_bytes();
2539 let mut gap_len = 0;
2540 let mut tail = buf.as_mut_slice();
2541 let mut cursor = 0;
2542 let original_gap = normalized_pos.last().map_or(0, |l| l.diff);
2543 loop {
2544 let idx = match find_crlf(&tail[gap_len..]) {
2545 None => tail.len(),
2546 Some(idx) => idx + gap_len,
2547 };
2548 tail.copy_within(gap_len..idx, 0);
2549 tail = &mut tail[idx - gap_len..];
2550 if tail.len() == gap_len {
2551 break;
2552 }
2553 cursor += idx - gap_len;
2554 gap_len += 1;
2555 normalized_pos.push(NormalizedPos {
2556 pos: RelativeBytePos::from_usize(cursor + 1),
2557 diff: original_gap + gap_len as u32,
2558 });
2559 }
2560
2561 let new_len = buf.len() - gap_len;
2564 unsafe {
2565 buf.set_len(new_len);
2566 *src = String::from_utf8_unchecked(buf);
2567 }
2568
2569 fn find_crlf(src: &[u8]) -> Option<usize> {
2570 let mut search_idx = 0;
2571 while let Some(idx) = find_cr(&src[search_idx..]) {
2572 if src[search_idx..].get(idx + 1) != Some(&b'\n') {
2573 search_idx += idx + 1;
2574 continue;
2575 }
2576 return Some(search_idx + idx);
2577 }
2578 None
2579 }
2580
2581 fn find_cr(src: &[u8]) -> Option<usize> {
2582 src.iter().position(|&b| b == b'\r')
2583 }
2584}
2585
2586pub trait Pos {
2591 fn from_usize(n: usize) -> Self;
2592 fn to_usize(&self) -> usize;
2593 fn from_u32(n: u32) -> Self;
2594 fn to_u32(&self) -> u32;
2595}
2596
2597macro_rules! impl_pos {
2598 (
2599 $(
2600 $(#[$attr:meta])*
2601 $vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty);
2602 )*
2603 ) => {
2604 $(
2605 $(#[$attr])*
2606 $vis struct $ident($inner_vis $inner_ty);
2607
2608 impl Pos for $ident {
2609 #[inline(always)]
2610 fn from_usize(n: usize) -> $ident {
2611 $ident(n as $inner_ty)
2612 }
2613
2614 #[inline(always)]
2615 fn to_usize(&self) -> usize {
2616 self.0 as usize
2617 }
2618
2619 #[inline(always)]
2620 fn from_u32(n: u32) -> $ident {
2621 $ident(n as $inner_ty)
2622 }
2623
2624 #[inline(always)]
2625 fn to_u32(&self) -> u32 {
2626 self.0 as u32
2627 }
2628 }
2629
2630 impl Add for $ident {
2631 type Output = $ident;
2632
2633 #[inline(always)]
2634 fn add(self, rhs: $ident) -> $ident {
2635 $ident(self.0 + rhs.0)
2636 }
2637 }
2638
2639 impl Sub for $ident {
2640 type Output = $ident;
2641
2642 #[inline(always)]
2643 fn sub(self, rhs: $ident) -> $ident {
2644 $ident(self.0 - rhs.0)
2645 }
2646 }
2647 )*
2648 };
2649}
2650
2651#[doc = r" A character offset."]
#[doc = r""]
#[doc = r" Because of multibyte UTF-8 characters, a byte offset"]
#[doc =
r" is not equivalent to a character offset. The [`SourceMap`] will convert [`BytePos`]"]
#[doc = r" values to `CharPos` values as necessary."]
pub struct CharPos(pub usize);
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for CharPos { }
#[automatically_derived]
impl ::core::clone::Clone for CharPos {
#[inline]
fn clone(&self) -> CharPos {
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for CharPos { }
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for CharPos { }
#[automatically_derived]
impl ::core::cmp::PartialEq for CharPos {
#[inline]
fn eq(&self, other: &CharPos) -> bool { self.0 == other.0 }
}
#[automatically_derived]
impl ::core::cmp::Eq for CharPos {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<usize>;
}
}
#[automatically_derived]
impl ::core::cmp::PartialOrd for CharPos {
#[inline]
fn partial_cmp(&self, other: &CharPos)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::cmp::Ord for CharPos {
#[inline]
fn cmp(&self, other: &CharPos) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}
#[automatically_derived]
impl ::core::fmt::Debug for CharPos {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "CharPos",
&&self.0)
}
}
impl Pos for CharPos {
#[inline(always)]
fn from_usize(n: usize) -> CharPos { CharPos(n as usize) }
#[inline(always)]
fn to_usize(&self) -> usize { self.0 as usize }
#[inline(always)]
fn from_u32(n: u32) -> CharPos { CharPos(n as usize) }
#[inline(always)]
fn to_u32(&self) -> u32 { self.0 as u32 }
}
impl Add for CharPos {
type Output = CharPos;
#[inline(always)]
fn add(self, rhs: CharPos) -> CharPos { CharPos(self.0 + rhs.0) }
}
impl Sub for CharPos {
type Output = CharPos;
#[inline(always)]
fn sub(self, rhs: CharPos) -> CharPos { CharPos(self.0 - rhs.0) }
}impl_pos! {
2652 #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
2656 pub struct BytePos(pub u32);
2657
2658 #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
2660 pub struct RelativeBytePos(pub u32);
2661
2662 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
2668 pub struct CharPos(pub usize);
2669}
2670
2671impl<S: Encoder> Encodable<S> for BytePos {
2672 fn encode(&self, s: &mut S) {
2673 s.emit_u32(self.0);
2674 }
2675}
2676
2677impl<D: Decoder> Decodable<D> for BytePos {
2678 fn decode(d: &mut D) -> BytePos {
2679 BytePos(d.read_u32())
2680 }
2681}
2682
2683impl<H: HashStableContext> HashStable<H> for RelativeBytePos {
2684 fn hash_stable(&self, hcx: &mut H, hasher: &mut StableHasher) {
2685 self.0.hash_stable(hcx, hasher);
2686 }
2687}
2688
2689impl<S: Encoder> Encodable<S> for RelativeBytePos {
2690 fn encode(&self, s: &mut S) {
2691 s.emit_u32(self.0);
2692 }
2693}
2694
2695impl<D: Decoder> Decodable<D> for RelativeBytePos {
2696 fn decode(d: &mut D) -> RelativeBytePos {
2697 RelativeBytePos(d.read_u32())
2698 }
2699}
2700
2701#[derive(#[automatically_derived]
impl ::core::fmt::Debug for Loc {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field4_finish(f, "Loc", "file",
&self.file, "line", &self.line, "col", &self.col, "col_display",
&&self.col_display)
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for Loc {
#[inline]
fn clone(&self) -> Loc {
Loc {
file: ::core::clone::Clone::clone(&self.file),
line: ::core::clone::Clone::clone(&self.line),
col: ::core::clone::Clone::clone(&self.col),
col_display: ::core::clone::Clone::clone(&self.col_display),
}
}
}Clone)]
2707pub struct Loc {
2708 pub file: Arc<SourceFile>,
2710 pub line: usize,
2712 pub col: CharPos,
2714 pub col_display: usize,
2716}
2717
2718#[derive(#[automatically_derived]
impl ::core::fmt::Debug for SourceFileAndLine {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"SourceFileAndLine", "sf", &self.sf, "line", &&self.line)
}
}Debug)]
2720pub struct SourceFileAndLine {
2721 pub sf: Arc<SourceFile>,
2722 pub line: usize,
2724}
2725#[derive(#[automatically_derived]
impl ::core::fmt::Debug for SourceFileAndBytePos {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"SourceFileAndBytePos", "sf", &self.sf, "pos", &&self.pos)
}
}Debug)]
2726pub struct SourceFileAndBytePos {
2727 pub sf: Arc<SourceFile>,
2728 pub pos: BytePos,
2729}
2730
2731#[derive(#[automatically_derived]
impl ::core::marker::Copy for LineInfo { }Copy, #[automatically_derived]
impl ::core::clone::Clone for LineInfo {
#[inline]
fn clone(&self) -> LineInfo {
let _: ::core::clone::AssertParamIsClone<usize>;
let _: ::core::clone::AssertParamIsClone<CharPos>;
*self
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for LineInfo {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f, "LineInfo",
"line_index", &self.line_index, "start_col", &self.start_col,
"end_col", &&self.end_col)
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for LineInfo {
#[inline]
fn eq(&self, other: &LineInfo) -> bool {
self.line_index == other.line_index &&
self.start_col == other.start_col &&
self.end_col == other.end_col
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for LineInfo {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<usize>;
let _: ::core::cmp::AssertParamIsEq<CharPos>;
}
}Eq)]
2732pub struct LineInfo {
2733 pub line_index: usize,
2735
2736 pub start_col: CharPos,
2738
2739 pub end_col: CharPos,
2741}
2742
2743pub struct FileLines {
2744 pub file: Arc<SourceFile>,
2745 pub lines: Vec<LineInfo>,
2746}
2747
2748pub static SPAN_TRACK: AtomicRef<fn(LocalDefId)> = AtomicRef::new(&((|_| {}) as fn(_)));
2749
2750pub type FileLinesResult = Result<FileLines, SpanLinesError>;
2755
2756#[derive(#[automatically_derived]
impl ::core::clone::Clone for SpanLinesError {
#[inline]
fn clone(&self) -> SpanLinesError {
match self {
SpanLinesError::DistinctSources(__self_0) =>
SpanLinesError::DistinctSources(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for SpanLinesError {
#[inline]
fn eq(&self, other: &SpanLinesError) -> bool {
match (self, other) {
(SpanLinesError::DistinctSources(__self_0),
SpanLinesError::DistinctSources(__arg1_0)) =>
__self_0 == __arg1_0,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SpanLinesError {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Box<DistinctSources>>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for SpanLinesError {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
SpanLinesError::DistinctSources(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"DistinctSources", &__self_0),
}
}
}Debug)]
2757pub enum SpanLinesError {
2758 DistinctSources(Box<DistinctSources>),
2759}
2760
2761#[derive(#[automatically_derived]
impl ::core::clone::Clone for SpanSnippetError {
#[inline]
fn clone(&self) -> SpanSnippetError {
match self {
SpanSnippetError::IllFormedSpan(__self_0) =>
SpanSnippetError::IllFormedSpan(::core::clone::Clone::clone(__self_0)),
SpanSnippetError::DistinctSources(__self_0) =>
SpanSnippetError::DistinctSources(::core::clone::Clone::clone(__self_0)),
SpanSnippetError::MalformedForSourcemap(__self_0) =>
SpanSnippetError::MalformedForSourcemap(::core::clone::Clone::clone(__self_0)),
SpanSnippetError::SourceNotAvailable { filename: __self_0 } =>
SpanSnippetError::SourceNotAvailable {
filename: ::core::clone::Clone::clone(__self_0),
},
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for SpanSnippetError {
#[inline]
fn eq(&self, other: &SpanSnippetError) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(SpanSnippetError::IllFormedSpan(__self_0),
SpanSnippetError::IllFormedSpan(__arg1_0)) =>
__self_0 == __arg1_0,
(SpanSnippetError::DistinctSources(__self_0),
SpanSnippetError::DistinctSources(__arg1_0)) =>
__self_0 == __arg1_0,
(SpanSnippetError::MalformedForSourcemap(__self_0),
SpanSnippetError::MalformedForSourcemap(__arg1_0)) =>
__self_0 == __arg1_0,
(SpanSnippetError::SourceNotAvailable { filename: __self_0 },
SpanSnippetError::SourceNotAvailable { filename: __arg1_0 })
=> __self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for SpanSnippetError {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Span>;
let _: ::core::cmp::AssertParamIsEq<Box<DistinctSources>>;
let _: ::core::cmp::AssertParamIsEq<MalformedSourceMapPositions>;
let _: ::core::cmp::AssertParamIsEq<FileName>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for SpanSnippetError {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
SpanSnippetError::IllFormedSpan(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"IllFormedSpan", &__self_0),
SpanSnippetError::DistinctSources(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"DistinctSources", &__self_0),
SpanSnippetError::MalformedForSourcemap(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"MalformedForSourcemap", &__self_0),
SpanSnippetError::SourceNotAvailable { filename: __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f,
"SourceNotAvailable", "filename", &__self_0),
}
}
}Debug)]
2762pub enum SpanSnippetError {
2763 IllFormedSpan(Span),
2764 DistinctSources(Box<DistinctSources>),
2765 MalformedForSourcemap(MalformedSourceMapPositions),
2766 SourceNotAvailable { filename: FileName },
2767}
2768
2769#[derive(#[automatically_derived]
impl ::core::clone::Clone for DistinctSources {
#[inline]
fn clone(&self) -> DistinctSources {
DistinctSources {
begin: ::core::clone::Clone::clone(&self.begin),
end: ::core::clone::Clone::clone(&self.end),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for DistinctSources {
#[inline]
fn eq(&self, other: &DistinctSources) -> bool {
self.begin == other.begin && self.end == other.end
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DistinctSources {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<(FileName, BytePos)>;
let _: ::core::cmp::AssertParamIsEq<(FileName, BytePos)>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for DistinctSources {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"DistinctSources", "begin", &self.begin, "end", &&self.end)
}
}Debug)]
2770pub struct DistinctSources {
2771 pub begin: (FileName, BytePos),
2772 pub end: (FileName, BytePos),
2773}
2774
2775#[derive(#[automatically_derived]
impl ::core::clone::Clone for MalformedSourceMapPositions {
#[inline]
fn clone(&self) -> MalformedSourceMapPositions {
MalformedSourceMapPositions {
name: ::core::clone::Clone::clone(&self.name),
source_len: ::core::clone::Clone::clone(&self.source_len),
begin_pos: ::core::clone::Clone::clone(&self.begin_pos),
end_pos: ::core::clone::Clone::clone(&self.end_pos),
}
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for MalformedSourceMapPositions {
#[inline]
fn eq(&self, other: &MalformedSourceMapPositions) -> bool {
self.name == other.name && self.source_len == other.source_len &&
self.begin_pos == other.begin_pos &&
self.end_pos == other.end_pos
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for MalformedSourceMapPositions {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<FileName>;
let _: ::core::cmp::AssertParamIsEq<usize>;
let _: ::core::cmp::AssertParamIsEq<BytePos>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for MalformedSourceMapPositions {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field4_finish(f,
"MalformedSourceMapPositions", "name", &self.name, "source_len",
&self.source_len, "begin_pos", &self.begin_pos, "end_pos",
&&self.end_pos)
}
}Debug)]
2776pub struct MalformedSourceMapPositions {
2777 pub name: FileName,
2778 pub source_len: usize,
2779 pub begin_pos: BytePos,
2780 pub end_pos: BytePos,
2781}
2782
2783#[derive(#[automatically_derived]
impl ::core::marker::Copy for InnerSpan { }Copy, #[automatically_derived]
impl ::core::clone::Clone for InnerSpan {
#[inline]
fn clone(&self) -> InnerSpan {
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for InnerSpan {
#[inline]
fn eq(&self, other: &InnerSpan) -> bool {
self.start == other.start && self.end == other.end
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for InnerSpan {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<usize>;
}
}Eq, #[automatically_derived]
impl ::core::fmt::Debug for InnerSpan {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "InnerSpan",
"start", &self.start, "end", &&self.end)
}
}Debug)]
2785pub struct InnerSpan {
2786 pub start: usize,
2787 pub end: usize,
2788}
2789
2790impl InnerSpan {
2791 pub fn new(start: usize, end: usize) -> InnerSpan {
2792 InnerSpan { start, end }
2793 }
2794}
2795
2796pub trait HashStableContext {
2801 fn def_path_hash(&self, def_id: DefId) -> DefPathHash;
2802 fn hash_spans(&self) -> bool;
2803 fn unstable_opts_incremental_ignore_spans(&self) -> bool;
2806 fn def_span(&self, def_id: LocalDefId) -> Span;
2807 fn span_data_to_lines_and_cols(
2808 &mut self,
2809 span: &SpanData,
2810 ) -> Option<(&SourceFile, usize, BytePos, usize, BytePos)>;
2811 fn hashing_controls(&self) -> HashingControls;
2812}
2813
2814impl<CTX> HashStable<CTX> for Span
2815where
2816 CTX: HashStableContext,
2817{
2818 fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
2831 const TAG_VALID_SPAN: u8 = 0;
2832 const TAG_INVALID_SPAN: u8 = 1;
2833 const TAG_RELATIVE_SPAN: u8 = 2;
2834
2835 if !ctx.hash_spans() {
2836 return;
2837 }
2838
2839 let span = self.data_untracked();
2840 span.ctxt.hash_stable(ctx, hasher);
2841 span.parent.hash_stable(ctx, hasher);
2842
2843 if span.is_dummy() {
2844 Hash::hash(&TAG_INVALID_SPAN, hasher);
2845 return;
2846 }
2847
2848 let parent = span.parent.map(|parent| ctx.def_span(parent).data_untracked());
2849 if let Some(parent) = parent
2850 && parent.contains(span)
2851 {
2852 Hash::hash(&TAG_RELATIVE_SPAN, hasher);
2856 (span.lo - parent.lo).to_u32().hash_stable(ctx, hasher);
2857 (span.hi - parent.lo).to_u32().hash_stable(ctx, hasher);
2858 return;
2859 }
2860
2861 let Some((file, line_lo, col_lo, line_hi, col_hi)) = ctx.span_data_to_lines_and_cols(&span)
2865 else {
2866 Hash::hash(&TAG_INVALID_SPAN, hasher);
2867 return;
2868 };
2869
2870 if let Some(parent) = parent
2871 && file.contains(parent.lo)
2872 {
2873 Hash::hash(&TAG_RELATIVE_SPAN, hasher);
2876 Hash::hash(&(span.lo.0.wrapping_sub(parent.lo.0)), hasher);
2877 Hash::hash(&(span.hi.0.wrapping_sub(parent.lo.0)), hasher);
2878 return;
2879 }
2880
2881 Hash::hash(&TAG_VALID_SPAN, hasher);
2882 Hash::hash(&file.stable_id, hasher);
2883
2884 let col_lo_trunc = (col_lo.0 as u64) & 0xFF;
2894 let line_lo_trunc = ((line_lo as u64) & 0xFF_FF_FF) << 8;
2895 let col_hi_trunc = (col_hi.0 as u64) & 0xFF << 32;
2896 let line_hi_trunc = ((line_hi as u64) & 0xFF_FF_FF) << 40;
2897 let col_line = col_lo_trunc | line_lo_trunc | col_hi_trunc | line_hi_trunc;
2898 let len = (span.hi - span.lo).0;
2899 Hash::hash(&col_line, hasher);
2900 Hash::hash(&len, hasher);
2901 }
2902}
2903
2904#[derive(#[automatically_derived]
impl ::core::clone::Clone for ErrorGuaranteed {
#[inline]
fn clone(&self) -> ErrorGuaranteed {
let _: ::core::clone::AssertParamIsClone<()>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for ErrorGuaranteed { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for ErrorGuaranteed {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"ErrorGuaranteed", &&self.0)
}
}Debug, #[automatically_derived]
impl ::core::hash::Hash for ErrorGuaranteed {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.0, state)
}
}Hash, #[automatically_derived]
impl ::core::cmp::PartialEq for ErrorGuaranteed {
#[inline]
fn eq(&self, other: &ErrorGuaranteed) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ErrorGuaranteed {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<()>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialOrd for ErrorGuaranteed {
#[inline]
fn partial_cmp(&self, other: &ErrorGuaranteed)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}PartialOrd, #[automatically_derived]
impl ::core::cmp::Ord for ErrorGuaranteed {
#[inline]
fn cmp(&self, other: &ErrorGuaranteed) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}Ord)]
2910#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for ErrorGuaranteed where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
ErrorGuaranteed(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic)]
2911pub struct ErrorGuaranteed(());
2912
2913impl ErrorGuaranteed {
2914 #[deprecated = "should only be used in `DiagCtxtInner::emit_diagnostic`"]
2916 pub fn unchecked_error_guaranteed() -> Self {
2917 ErrorGuaranteed(())
2918 }
2919
2920 pub fn raise_fatal(self) -> ! {
2921 FatalError.raise()
2922 }
2923}
2924
2925impl<E: rustc_serialize::Encoder> Encodable<E> for ErrorGuaranteed {
2926 #[inline]
2927 fn encode(&self, _e: &mut E) {
2928 {
::core::panicking::panic_fmt(format_args!("should never serialize an `ErrorGuaranteed`, as we do not write metadata or incremental caches in case errors occurred"));
}panic!(
2929 "should never serialize an `ErrorGuaranteed`, as we do not write metadata or \
2930 incremental caches in case errors occurred"
2931 )
2932 }
2933}
2934impl<D: rustc_serialize::Decoder> Decodable<D> for ErrorGuaranteed {
2935 #[inline]
2936 fn decode(_d: &mut D) -> ErrorGuaranteed {
2937 {
::core::panicking::panic_fmt(format_args!("`ErrorGuaranteed` should never have been serialized to metadata or incremental caches"));
}panic!(
2938 "`ErrorGuaranteed` should never have been serialized to metadata or incremental caches"
2939 )
2940 }
2941}