1use std::fmt;
2use std::hash::{BuildHasherDefault, Hash, Hasher};
34use rustc_data_structures::AtomicRef;
5use rustc_data_structures::fingerprint::Fingerprint;
6use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableOrd, ToStableHashKey};
7use rustc_data_structures::unhash::Unhasher;
8use rustc_hashes::Hash64;
9use rustc_index::Idx;
10use rustc_macros::{BlobDecodable, Decodable, Encodable, HashStable_Generic};
11use rustc_serialize::{Decodable, Encodable};
1213use crate::{HashStableContext, SpanDecoder, SpanEncoder, Symbol};
1415pub type StableCrateIdMap =
16 indexmap::IndexMap<StableCrateId, CrateNum, BuildHasherDefault<Unhasher>>;
1718impl ::std::fmt::Debug for CrateNum {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
fmt.write_fmt(format_args!("crate{0}", self.as_u32()))
}
}rustc_index::newtype_index! {
19#[orderable]
20 #[debug_format = "crate{}"]
21pub struct CrateNum {}
22}2324/// Item definitions in the currently-compiled crate would have the `CrateNum`
25/// `LOCAL_CRATE` in their `DefId`.
26pub const LOCAL_CRATE: CrateNum = CrateNum::ZERO;
2728impl CrateNum {
29#[inline]
30pub fn new(x: usize) -> CrateNum {
31CrateNum::from_usize(x)
32 }
3334// FIXME(typed_def_id): Replace this with `as_mod_def_id`.
35#[inline]
36pub fn as_def_id(self) -> DefId {
37DefId { krate: self, index: CRATE_DEF_INDEX }
38 }
3940#[inline]
41pub fn as_mod_def_id(self) -> ModDefId {
42ModDefId::new_unchecked(DefId { krate: self, index: CRATE_DEF_INDEX })
43 }
44}
4546impl fmt::Displayfor CrateNum {
47fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
48 fmt::Display::fmt(&self.as_u32(), f)
49 }
50}
5152/// A `DefPathHash` is a fixed-size representation of a `DefPath` that is
53/// stable across crate and compilation session boundaries. It consists of two
54/// separate 64-bit hashes. The first uniquely identifies the crate this
55/// `DefPathHash` originates from (see [StableCrateId]), and the second
56/// uniquely identifies the corresponding `DefPath` within that crate. Together
57/// they form a unique identifier within an entire crate graph.
58///
59/// There is a very small chance of hash collisions, which would mean that two
60/// different `DefPath`s map to the same `DefPathHash`. Proceeding compilation
61/// with such a hash collision would very probably lead to an ICE, and in the
62/// worst case lead to a silent mis-compilation. The compiler therefore actively
63/// and exhaustively checks for such hash collisions and aborts compilation if
64/// it finds one.
65///
66/// `DefPathHash` uses 64-bit hashes for both the crate-id part and the
67/// crate-internal part, even though it is likely that there are many more
68/// `LocalDefId`s in a single crate than there are individual crates in a crate
69/// graph. Since we use the same number of bits in both cases, the collision
70/// probability for the crate-local part will be quite a bit higher (though
71/// still very small).
72///
73/// This imbalance is not by accident: A hash collision in the
74/// crate-local part of a `DefPathHash` will be detected and reported while
75/// compiling the crate in question. Such a collision does not depend on
76/// outside factors and can be easily fixed by the crate maintainer (e.g. by
77/// renaming the item in question or by bumping the crate version in a harmless
78/// way).
79///
80/// A collision between crate-id hashes on the other hand is harder to fix
81/// because it depends on the set of crates in the entire crate graph of a
82/// compilation session. Again, using the same crate with a different version
83/// number would fix the issue with a high probability -- but that might be
84/// easier said then done if the crates in questions are dependencies of
85/// third-party crates.
86///
87/// That being said, given a high quality hash function, the collision
88/// probabilities in question are very small. For example, for a big crate like
89/// `rustc_middle` (with ~50000 `LocalDefId`s as of the time of writing) there
90/// is a probability of roughly 1 in 14,750,000,000 of a crate-internal
91/// collision occurring. For a big crate graph with 1000 crates in it, there is
92/// a probability of 1 in 36,890,000,000,000 of a `StableCrateId` collision.
93#[derive(#[automatically_derived]
impl ::core::marker::Copy for DefPathHash { }Copy, #[automatically_derived]
impl ::core::clone::Clone for DefPathHash {
#[inline]
fn clone(&self) -> DefPathHash {
let _: ::core::clone::AssertParamIsClone<Fingerprint>;
*self
}
}Clone, #[automatically_derived]
impl ::core::hash::Hash for DefPathHash {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.0, state)
}
}Hash, #[automatically_derived]
impl ::core::cmp::PartialEq for DefPathHash {
#[inline]
fn eq(&self, other: &DefPathHash) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DefPathHash {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Fingerprint>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialOrd for DefPathHash {
#[inline]
fn partial_cmp(&self, other: &DefPathHash)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}PartialOrd, #[automatically_derived]
impl ::core::cmp::Ord for DefPathHash {
#[inline]
fn cmp(&self, other: &DefPathHash) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}Ord, #[automatically_derived]
impl ::core::fmt::Debug for DefPathHash {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "DefPathHash",
&&self.0)
}
}Debug)]
94#[derive(const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for DefPathHash where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
DefPathHash(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for DefPathHash {
fn encode(&self, __encoder: &mut __E) {
match *self {
DefPathHash(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for DefPathHash {
fn decode(__decoder: &mut __D) -> Self {
DefPathHash(::rustc_serialize::Decodable::decode(__decoder))
}
}
};Decodable)]
95pub struct DefPathHash(pub Fingerprint);
9697impl DefPathHash {
98/// Returns the [StableCrateId] identifying the crate this [DefPathHash]
99 /// originates from.
100#[inline]
101pub fn stable_crate_id(&self) -> StableCrateId {
102StableCrateId(self.0.split().0)
103 }
104105/// Returns the crate-local part of the [DefPathHash].
106#[inline]
107pub fn local_hash(&self) -> Hash64 {
108self.0.split().1
109}
110111/// Builds a new [DefPathHash] with the given [StableCrateId] and
112 /// `local_hash`, where `local_hash` must be unique within its crate.
113#[inline]
114pub fn new(stable_crate_id: StableCrateId, local_hash: Hash64) -> DefPathHash {
115DefPathHash(Fingerprint::new(stable_crate_id.0, local_hash))
116 }
117}
118119impl Defaultfor DefPathHash {
120fn default() -> Self {
121DefPathHash(Fingerprint::ZERO)
122 }
123}
124125impl StableOrdfor DefPathHash {
126const CAN_USE_UNSTABLE_SORT: bool = true;
127128// `DefPathHash` sort order is not affected by (de)serialization.
129const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
130}
131132/// A [`StableCrateId`] is a 64-bit hash of a crate name, together with all
133/// `-Cmetadata` arguments, and some other data. It is to [`CrateNum`] what [`DefPathHash`] is to
134/// [`DefId`]. It is stable across compilation sessions.
135///
136/// Since the ID is a hash value, there is a small chance that two crates
137/// end up with the same [`StableCrateId`]. The compiler will check for such
138/// collisions when loading crates and abort compilation in order to avoid
139/// further trouble.
140///
141/// For more information on the possibility of hash collisions in rustc,
142/// see the discussion in [`DefId`].
143#[derive(#[automatically_derived]
impl ::core::marker::Copy for StableCrateId { }Copy, #[automatically_derived]
impl ::core::clone::Clone for StableCrateId {
#[inline]
fn clone(&self) -> StableCrateId {
let _: ::core::clone::AssertParamIsClone<Hash64>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for StableCrateId {
#[inline]
fn eq(&self, other: &StableCrateId) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for StableCrateId {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Hash64>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialOrd for StableCrateId {
#[inline]
fn partial_cmp(&self, other: &StableCrateId)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}PartialOrd, #[automatically_derived]
impl ::core::cmp::Ord for StableCrateId {
#[inline]
fn cmp(&self, other: &StableCrateId) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}Ord, #[automatically_derived]
impl ::core::fmt::Debug for StableCrateId {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "StableCrateId",
&&self.0)
}
}Debug)]
144#[derive(#[automatically_derived]
impl ::core::hash::Hash for StableCrateId {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.0, state)
}
}Hash, const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for StableCrateId where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
StableCrateId(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable_Generic, const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for StableCrateId {
fn encode(&self, __encoder: &mut __E) {
match *self {
StableCrateId(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};Encodable, const _: () =
{
impl<__D: ::rustc_span::BlobDecoder> ::rustc_serialize::Decodable<__D>
for StableCrateId {
fn decode(__decoder: &mut __D) -> Self {
StableCrateId(::rustc_serialize::Decodable::decode(__decoder))
}
}
};BlobDecodable)]
145pub struct StableCrateId(pub(crate) Hash64);
146147impl StableCrateId {
148/// Computes the stable ID for a crate with the given name and
149 /// `-Cmetadata` arguments.
150pub fn new(
151 crate_name: Symbol,
152 is_exe: bool,
153mut metadata: Vec<String>,
154 cfg_version: &'static str,
155 ) -> StableCrateId {
156let mut hasher = StableHasher::new();
157// We must hash the string text of the crate name, not the id, as the id is not stable
158 // across builds.
159crate_name.as_str().hash(&mut hasher);
160161// We don't want the stable crate ID to depend on the order of
162 // -C metadata arguments, so sort them:
163metadata.sort();
164// Every distinct -C metadata value is only incorporated once:
165metadata.dedup();
166167hasher.write(b"metadata");
168for s in &metadata {
169// Also incorporate the length of a metadata string, so that we generate
170 // different values for `-Cmetadata=ab -Cmetadata=c` and
171 // `-Cmetadata=a -Cmetadata=bc`
172hasher.write_usize(s.len());
173 hasher.write(s.as_bytes());
174 }
175176// Also incorporate crate type, so that we don't get symbol conflicts when
177 // linking against a library of the same name, if this is an executable.
178hasher.write(if is_exe { b"exe" } else { b"lib" });
179180// Also incorporate the rustc version. Otherwise, with -Zsymbol-mangling-version=v0
181 // and no -Cmetadata, symbols from the same crate compiled with different versions of
182 // rustc are named the same.
183 //
184 // RUSTC_FORCE_RUSTC_VERSION is used to inject rustc version information
185 // during testing.
186if let Some(val) = std::env::var_os("RUSTC_FORCE_RUSTC_VERSION") {
187hasher.write(val.to_string_lossy().into_owned().as_bytes())
188 } else {
189hasher.write(cfg_version.as_bytes())
190 }
191192StableCrateId(hasher.finish())
193 }
194195#[inline]
196pub fn as_u64(self) -> u64 {
197self.0.as_u64()
198 }
199}
200201impl fmt::LowerHexfor StableCrateId {
202fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
203 fmt::LowerHex::fmt(&self.0, f)
204 }
205}
206207impl ::std::fmt::Debug for DefIndex {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
fmt.write_fmt(format_args!("DefIndex({0})", self.as_u32()))
}
}rustc_index::newtype_index! {
208/// A DefIndex is an index into the hir-map for a crate, identifying a
209 /// particular definition. It should really be considered an interned
210 /// shorthand for a particular DefPath.
211#[orderable]
212 #[debug_format = "DefIndex({})"]
213pub struct DefIndex {
214/// The crate root is always assigned index 0 by the AST Map code,
215 /// thanks to `NodeCollector::new`.
216const CRATE_DEF_INDEX = 0;
217 }
218}219220/// A `DefId` identifies a particular *definition*, by combining a crate
221/// index and a def index.
222///
223/// You can create a `DefId` from a `LocalDefId` using `local_def_id.to_def_id()`.
224#[derive(#[automatically_derived]
impl ::core::clone::Clone for DefId {
#[inline]
fn clone(&self) -> DefId {
let _: ::core::clone::AssertParamIsClone<DefIndex>;
let _: ::core::clone::AssertParamIsClone<CrateNum>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for DefId {
#[inline]
fn eq(&self, other: &DefId) -> bool {
self.index == other.index && self.krate == other.krate
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for DefId {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<DefIndex>;
let _: ::core::cmp::AssertParamIsEq<CrateNum>;
}
}Eq, #[automatically_derived]
impl ::core::marker::Copy for DefId { }Copy)]
225// On below-64 bit systems we can simply use the derived `Hash` impl
226#[cfg_attr(not(target_pointer_width = "64"), derive(Hash))]
227#[repr(C)]
228#[rustc_pass_by_value]
229// We guarantee field order. Note that the order is essential here, see below why.
230pub struct DefId {
231// cfg-ing the order of fields so that the `DefIndex` which is high entropy always ends up in
232 // the lower bits no matter the endianness. This allows the compiler to turn that `Hash` impl
233 // into a direct call to `u64::hash(_)`.
234#[cfg(not(all(target_pointer_width = "64", target_endian = "big")))]
235pub index: DefIndex,
236pub krate: CrateNum,
237#[cfg(all(target_pointer_width = "64", target_endian = "big"))]
238pub index: DefIndex,
239}
240241// To ensure correctness of incremental compilation,
242// `DefId` must not implement `Ord` or `PartialOrd`.
243// See https://github.com/rust-lang/rust/issues/90317.
244impl !Ordfor DefId {}
245impl !PartialOrdfor DefId {}
246247// On 64-bit systems, we can hash the whole `DefId` as one `u64` instead of two `u32`s. This
248// improves performance without impairing `FxHash` quality. So the below code gets compiled to a
249// noop on little endian systems because the memory layout of `DefId` is as follows:
250//
251// ```
252// +-1--------------31-+-32-------------63-+
253// ! index ! krate !
254// +-------------------+-------------------+
255// ```
256//
257// The order here has direct impact on `FxHash` quality because we have far more `DefIndex` per
258// crate than we have `Crate`s within one compilation. Or in other words, this arrangement puts
259// more entropy in the low bits than the high bits. The reason this matters is that `FxHash`, which
260// is used throughout rustc, has problems distributing the entropy from the high bits, so reversing
261// the order would lead to a large number of collisions and thus far worse performance.
262//
263// On 64-bit big-endian systems, this compiles to a 64-bit rotation by 32 bits, which is still
264// faster than another `FxHash` round.
265#[cfg(target_pointer_width = "64")]
266impl Hashfor DefId {
267fn hash<H: Hasher>(&self, h: &mut H) {
268 (((self.krate.as_u32() as u64) << 32) | (self.index.as_u32() as u64)).hash(h)
269 }
270}
271272impl DefId {
273/// Makes a local `DefId` from the given `DefIndex`.
274#[inline]
275pub fn local(index: DefIndex) -> DefId {
276DefId { krate: LOCAL_CRATE, index }
277 }
278279/// Returns whether the item is defined in the crate currently being compiled.
280#[inline]
281pub fn is_local(self) -> bool {
282self.krate == LOCAL_CRATE283 }
284285#[inline]
286pub fn as_local(self) -> Option<LocalDefId> {
287self.is_local().then(|| LocalDefId { local_def_index: self.index })
288 }
289290#[inline]
291 #[track_caller]
292pub fn expect_local(self) -> LocalDefId {
293// NOTE: `match` below is required to apply `#[track_caller]`,
294 // i.e. don't use closures.
295match self.as_local() {
296Some(local_def_id) => local_def_id,
297None => {
::core::panicking::panic_fmt(format_args!("DefId::expect_local: `{0:?}` isn\'t local",
self));
}panic!("DefId::expect_local: `{self:?}` isn't local"),
298 }
299 }
300301#[inline]
302pub fn is_crate_root(self) -> bool {
303self.index == CRATE_DEF_INDEX304 }
305306#[inline]
307pub fn as_crate_root(self) -> Option<CrateNum> {
308self.is_crate_root().then_some(self.krate)
309 }
310311#[inline]
312pub fn is_top_level_module(self) -> bool {
313self.is_local() && self.is_crate_root()
314 }
315}
316317impl From<LocalDefId> for DefId {
318fn from(local: LocalDefId) -> DefId {
319local.to_def_id()
320 }
321}
322323pub fn default_def_id_debug(def_id: DefId, f: &mut fmt::Formatter<'_>) -> fmt::Result {
324f.debug_struct("DefId").field("krate", &def_id.krate).field("index", &def_id.index).finish()
325}
326327pub static DEF_ID_DEBUG: AtomicRef<fn(DefId, &mut fmt::Formatter<'_>) -> fmt::Result> =
328AtomicRef::new(&(default_def_id_debugas fn(_, &mut fmt::Formatter<'_>) -> _));
329330impl fmt::Debugfor DefId {
331fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
332 (*DEF_ID_DEBUG)(*self, f)
333 }
334}
335336pub type DefIdMap<T> = ::rustc_data_structures::unord::UnordMap<DefId, T>;
pub type DefIdSet = ::rustc_data_structures::unord::UnordSet<DefId>;
pub type DefIdMapEntry<'a, T> =
::rustc_data_structures::fx::StdEntry<'a, DefId, T>;rustc_data_structures::define_id_collections!(DefIdMap, DefIdSet, DefIdMapEntry, DefId);
337338/// A `LocalDefId` is equivalent to a `DefId` with `krate == LOCAL_CRATE`. Since
339/// we encode this information in the type, we can ensure at compile time that
340/// no `DefId`s from upstream crates get thrown into the mix. There are quite a
341/// few cases where we know that only `DefId`s from the local crate are expected;
342/// a `DefId` from a different crate would signify a bug somewhere. This
343/// is when `LocalDefId` comes in handy.
344#[derive(#[automatically_derived]
impl ::core::clone::Clone for LocalDefId {
#[inline]
fn clone(&self) -> LocalDefId {
let _: ::core::clone::AssertParamIsClone<DefIndex>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for LocalDefId { }Copy, #[automatically_derived]
impl ::core::cmp::PartialEq for LocalDefId {
#[inline]
fn eq(&self, other: &LocalDefId) -> bool {
self.local_def_index == other.local_def_index
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for LocalDefId {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<DefIndex>;
}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for LocalDefId {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.local_def_index, state)
}
}Hash)]
345pub struct LocalDefId {
346pub local_def_index: DefIndex,
347}
348349// To ensure correctness of incremental compilation,
350// `LocalDefId` must not implement `Ord` or `PartialOrd`.
351// See https://github.com/rust-lang/rust/issues/90317.
352impl !Ordfor LocalDefId {}
353impl !PartialOrdfor LocalDefId {}
354355pub const CRATE_DEF_ID: LocalDefId = LocalDefId { local_def_index: CRATE_DEF_INDEX };
356357impl Idxfor LocalDefId {
358#[inline]
359fn new(idx: usize) -> Self {
360LocalDefId { local_def_index: Idx::new(idx) }
361 }
362#[inline]
363fn index(self) -> usize {
364self.local_def_index.index()
365 }
366}
367368impl LocalDefId {
369#[inline]
370pub fn to_def_id(self) -> DefId {
371DefId { krate: LOCAL_CRATE, index: self.local_def_index }
372 }
373374#[inline]
375pub fn is_top_level_module(self) -> bool {
376self == CRATE_DEF_ID377 }
378}
379380impl fmt::Debugfor LocalDefId {
381fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
382self.to_def_id().fmt(f)
383 }
384}
385386impl<E: SpanEncoder> Encodable<E> for LocalDefId {
387fn encode(&self, s: &mut E) {
388self.to_def_id().encode(s);
389 }
390}
391392impl<D: SpanDecoder> Decodable<D> for LocalDefId {
393fn decode(d: &mut D) -> LocalDefId {
394DefId::decode(d).expect_local()
395 }
396}
397398pub type LocalDefIdMap<T> =
::rustc_data_structures::unord::UnordMap<LocalDefId, T>;
pub type LocalDefIdSet = ::rustc_data_structures::unord::UnordSet<LocalDefId>;
pub type LocalDefIdMapEntry<'a, T> =
::rustc_data_structures::fx::StdEntry<'a, LocalDefId, T>;rustc_data_structures::define_id_collections!(
399LocalDefIdMap,
400LocalDefIdSet,
401LocalDefIdMapEntry,
402 LocalDefId
403);
404405impl<CTX: HashStableContext> HashStable<CTX> for DefId {
406#[inline]
407fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
408hcx.def_path_hash(*self).hash_stable(hcx, hasher);
409 }
410}
411412impl<CTX: HashStableContext> HashStable<CTX> for LocalDefId {
413#[inline]
414fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
415hcx.def_path_hash(self.to_def_id()).local_hash().hash_stable(hcx, hasher);
416 }
417}
418419impl<CTX: HashStableContext> HashStable<CTX> for CrateNum {
420#[inline]
421fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
422self.as_def_id().to_stable_hash_key(hcx).stable_crate_id().hash_stable(hcx, hasher);
423 }
424}
425426impl<CTX: HashStableContext> ToStableHashKey<CTX> for DefId {
427type KeyType = DefPathHash;
428429#[inline]
430fn to_stable_hash_key(&self, hcx: &CTX) -> DefPathHash {
431hcx.def_path_hash(*self)
432 }
433}
434435impl<CTX: HashStableContext> ToStableHashKey<CTX> for LocalDefId {
436type KeyType = DefPathHash;
437438#[inline]
439fn to_stable_hash_key(&self, hcx: &CTX) -> DefPathHash {
440hcx.def_path_hash(self.to_def_id())
441 }
442}
443444impl<CTX: HashStableContext> ToStableHashKey<CTX> for CrateNum {
445type KeyType = DefPathHash;
446447#[inline]
448fn to_stable_hash_key(&self, hcx: &CTX) -> DefPathHash {
449self.as_def_id().to_stable_hash_key(hcx)
450 }
451}
452453impl<CTX: HashStableContext> ToStableHashKey<CTX> for DefPathHash {
454type KeyType = DefPathHash;
455456#[inline]
457fn to_stable_hash_key(&self, _: &CTX) -> DefPathHash {
458*self459 }
460}
461462macro_rules!typed_def_id {
463 ($Name:ident, $LocalName:ident) => {
464#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)]
465pub struct $Name(DefId);
466467impl $Name {
468#[inline]
469pub const fn new_unchecked(def_id: DefId) -> Self {
470Self(def_id)
471 }
472473#[inline]
474pub fn to_def_id(self) -> DefId {
475self.into()
476 }
477478#[inline]
479pub fn is_local(self) -> bool {
480self.0.is_local()
481 }
482483#[inline]
484pub fn as_local(self) -> Option<$LocalName> {
485self.0.as_local().map($LocalName::new_unchecked)
486 }
487 }
488489impl From<$LocalName> for $Name {
490#[inline]
491fn from(local: $LocalName) -> Self {
492Self(local.0.to_def_id())
493 }
494 }
495496impl From<$Name> for DefId {
497#[inline]
498fn from(typed: $Name) -> Self {
499 typed.0
500}
501 }
502503#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)]
504pub struct $LocalName(LocalDefId);
505506impl !Ord for $LocalName {}
507impl !PartialOrd for $LocalName {}
508509impl $LocalName {
510#[inline]
511pub const fn new_unchecked(def_id: LocalDefId) -> Self {
512Self(def_id)
513 }
514515#[inline]
516pub fn to_def_id(self) -> DefId {
517self.0.into()
518 }
519520#[inline]
521pub fn to_local_def_id(self) -> LocalDefId {
522self.0
523}
524 }
525526impl From<$LocalName> for LocalDefId {
527#[inline]
528fn from(typed: $LocalName) -> Self {
529 typed.0
530}
531 }
532533impl From<$LocalName> for DefId {
534#[inline]
535fn from(typed: $LocalName) -> Self {
536 typed.0.into()
537 }
538 }
539 };
540}
541542// N.B.: when adding new typed `DefId`s update the corresponding trait impls in
543// `rustc_middle::dep_graph::def_node` for `DepNodeParams`.
544pub struct ModDefId(DefId);
#[automatically_derived]
impl ::core::fmt::Debug for ModDefId {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "ModDefId",
&&self.0)
}
}
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for ModDefId { }
#[automatically_derived]
impl ::core::clone::Clone for ModDefId {
#[inline]
fn clone(&self) -> ModDefId {
let _: ::core::clone::AssertParamIsClone<DefId>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for ModDefId { }
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for ModDefId { }
#[automatically_derived]
impl ::core::cmp::PartialEq for ModDefId {
#[inline]
fn eq(&self, other: &ModDefId) -> bool { self.0 == other.0 }
}
#[automatically_derived]
impl ::core::cmp::Eq for ModDefId {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<DefId>;
}
}
#[automatically_derived]
impl ::core::hash::Hash for ModDefId {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.0, state)
}
}
const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for ModDefId {
fn encode(&self, __encoder: &mut __E) {
match *self {
ModDefId(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};
const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for ModDefId {
fn decode(__decoder: &mut __D) -> Self {
ModDefId(::rustc_serialize::Decodable::decode(__decoder))
}
}
};
const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for ModDefId where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
ModDefId(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};
impl ModDefId {
#[inline]
pub const fn new_unchecked(def_id: DefId) -> Self { Self(def_id) }
#[inline]
pub fn to_def_id(self) -> DefId { self.into() }
#[inline]
pub fn is_local(self) -> bool { self.0.is_local() }
#[inline]
pub fn as_local(self) -> Option<LocalModDefId> {
self.0.as_local().map(LocalModDefId::new_unchecked)
}
}
impl From<LocalModDefId> for ModDefId {
#[inline]
fn from(local: LocalModDefId) -> Self { Self(local.0.to_def_id()) }
}
impl From<ModDefId> for DefId {
#[inline]
fn from(typed: ModDefId) -> Self { typed.0 }
}
pub struct LocalModDefId(LocalDefId);
#[automatically_derived]
impl ::core::fmt::Debug for LocalModDefId {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "LocalModDefId",
&&self.0)
}
}
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for LocalModDefId { }
#[automatically_derived]
impl ::core::clone::Clone for LocalModDefId {
#[inline]
fn clone(&self) -> LocalModDefId {
let _: ::core::clone::AssertParamIsClone<LocalDefId>;
*self
}
}
#[automatically_derived]
impl ::core::marker::Copy for LocalModDefId { }
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for LocalModDefId { }
#[automatically_derived]
impl ::core::cmp::PartialEq for LocalModDefId {
#[inline]
fn eq(&self, other: &LocalModDefId) -> bool { self.0 == other.0 }
}
#[automatically_derived]
impl ::core::cmp::Eq for LocalModDefId {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<LocalDefId>;
}
}
#[automatically_derived]
impl ::core::hash::Hash for LocalModDefId {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.0, state)
}
}
const _: () =
{
impl<__E: ::rustc_span::SpanEncoder> ::rustc_serialize::Encodable<__E>
for LocalModDefId {
fn encode(&self, __encoder: &mut __E) {
match *self {
LocalModDefId(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};
const _: () =
{
impl<__D: ::rustc_span::SpanDecoder> ::rustc_serialize::Decodable<__D>
for LocalModDefId {
fn decode(__decoder: &mut __D) -> Self {
LocalModDefId(::rustc_serialize::Decodable::decode(__decoder))
}
}
};
const _: () =
{
impl<__CTX> ::rustc_data_structures::stable_hasher::HashStable<__CTX>
for LocalModDefId where __CTX: crate::HashStableContext {
#[inline]
fn hash_stable(&self, __hcx: &mut __CTX,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
LocalModDefId(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};
impl !Ord for LocalModDefId {}
impl !PartialOrd for LocalModDefId {}
impl LocalModDefId {
#[inline]
pub const fn new_unchecked(def_id: LocalDefId) -> Self { Self(def_id) }
#[inline]
pub fn to_def_id(self) -> DefId { self.0.into() }
#[inline]
pub fn to_local_def_id(self) -> LocalDefId { self.0 }
}
impl From<LocalModDefId> for LocalDefId {
#[inline]
fn from(typed: LocalModDefId) -> Self { typed.0 }
}
impl From<LocalModDefId> for DefId {
#[inline]
fn from(typed: LocalModDefId) -> Self { typed.0.into() }
}typed_def_id! { ModDefId, LocalModDefId }545546impl LocalModDefId {
547pub const CRATE_DEF_ID: Self = Self::new_unchecked(CRATE_DEF_ID);
548}
549550impl ModDefId {
551pub fn is_top_level_module(self) -> bool {
552self.0.is_top_level_module()
553 }
554}
555556impl LocalModDefId {
557pub fn is_top_level_module(self) -> bool {
558self.0.is_top_level_module()
559 }
560}