1use std::borrow::Cow;
2use std::fmt;
3use std::hash::Hash;
45use rustc_data_structures::base_n::{BaseNString, CASE_INSENSITIVE, ToBaseN};
6use rustc_data_structures::fingerprint::Fingerprint;
7use rustc_data_structures::fx::FxIndexMap;
8use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey};
9use rustc_data_structures::unord::UnordMap;
10use rustc_hashes::Hash128;
11use rustc_hir::ItemId;
12use rustc_hir::attrs::{InlineAttr, Linkage};
13use rustc_hir::def_id::{CrateNum, DefId, DefIdSet, LOCAL_CRATE};
14use rustc_macros::{HashStable, TyDecodable, TyEncodable};
15use rustc_query_system::ich::StableHashingContext;
16use rustc_session::config::OptLevel;
17use rustc_span::{Span, Symbol};
18use rustc_target::spec::SymbolVisibility;
19use tracing::debug;
2021use crate::dep_graph::{DepNode, WorkProduct, WorkProductId};
22use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
23use crate::ty::{self, GenericArgs, Instance, InstanceKind, SymbolName, Ty, TyCtxt};
2425/// Describes how a monomorphization will be instantiated in object files.
26#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for InstantiationMode {
#[inline]
fn eq(&self, other: &InstantiationMode) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(InstantiationMode::GloballyShared { may_conflict: __self_0 },
InstantiationMode::GloballyShared { may_conflict: __arg1_0
}) => __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq)]
27pub enum InstantiationMode {
28/// There will be exactly one instance of the given MonoItem. It will have
29 /// external linkage so that it can be linked to from other codegen units.
30GloballyShared {
31/// In some compilation scenarios we may decide to take functions that
32 /// are typically `LocalCopy` and instead move them to `GloballyShared`
33 /// to avoid codegenning them a bunch of times. In this situation,
34 /// however, our local copy may conflict with other crates also
35 /// inlining the same function.
36 ///
37 /// This flag indicates that this situation is occurring, and informs
38 /// symbol name calculation that some extra mangling is needed to
39 /// avoid conflicts. Note that this may eventually go away entirely if
40 /// ThinLTO enables us to *always* have a globally shared instance of a
41 /// function within one crate's compilation.
42may_conflict: bool,
43 },
4445/// Each codegen unit containing a reference to the given MonoItem will
46 /// have its own private copy of the function (with internal linkage).
47LocalCopy,
48}
4950#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for NormalizationErrorInMono {
#[inline]
fn eq(&self, other: &NormalizationErrorInMono) -> bool { true }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for NormalizationErrorInMono {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {}
}Eq, #[automatically_derived]
impl ::core::clone::Clone for NormalizationErrorInMono {
#[inline]
fn clone(&self) -> NormalizationErrorInMono { *self }
}Clone, #[automatically_derived]
impl ::core::marker::Copy for NormalizationErrorInMono { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for NormalizationErrorInMono {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f, "NormalizationErrorInMono")
}
}Debug, #[automatically_derived]
impl ::core::hash::Hash for NormalizationErrorInMono {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {}
}Hash, const _: () =
{
impl<'__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for NormalizationErrorInMono {
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self { NormalizationErrorInMono => {} }
}
}
};HashStable, const _: () =
{
impl<'tcx, __E: ::rustc_middle::ty::codec::TyEncoder<'tcx>>
::rustc_serialize::Encodable<__E> for NormalizationErrorInMono {
fn encode(&self, __encoder: &mut __E) {
match *self { NormalizationErrorInMono => {} }
}
}
};TyEncodable, const _: () =
{
impl<'tcx, __D: ::rustc_middle::ty::codec::TyDecoder<'tcx>>
::rustc_serialize::Decodable<__D> for NormalizationErrorInMono {
fn decode(__decoder: &mut __D) -> Self {
NormalizationErrorInMono
}
}
};TyDecodable)]
51pub struct NormalizationErrorInMono;
5253#[derive(#[automatically_derived]
impl<'tcx> ::core::cmp::PartialEq for MonoItem<'tcx> {
#[inline]
fn eq(&self, other: &MonoItem<'tcx>) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(MonoItem::Fn(__self_0), MonoItem::Fn(__arg1_0)) =>
__self_0 == __arg1_0,
(MonoItem::Static(__self_0), MonoItem::Static(__arg1_0)) =>
__self_0 == __arg1_0,
(MonoItem::GlobalAsm(__self_0), MonoItem::GlobalAsm(__arg1_0))
=> __self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl<'tcx> ::core::cmp::Eq for MonoItem<'tcx> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Instance<'tcx>>;
let _: ::core::cmp::AssertParamIsEq<DefId>;
let _: ::core::cmp::AssertParamIsEq<ItemId>;
}
}Eq, #[automatically_derived]
impl<'tcx> ::core::clone::Clone for MonoItem<'tcx> {
#[inline]
fn clone(&self) -> MonoItem<'tcx> {
let _: ::core::clone::AssertParamIsClone<Instance<'tcx>>;
let _: ::core::clone::AssertParamIsClone<DefId>;
let _: ::core::clone::AssertParamIsClone<ItemId>;
*self
}
}Clone, #[automatically_derived]
impl<'tcx> ::core::marker::Copy for MonoItem<'tcx> { }Copy, #[automatically_derived]
impl<'tcx> ::core::fmt::Debug for MonoItem<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
MonoItem::Fn(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Fn",
&__self_0),
MonoItem::Static(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Static",
&__self_0),
MonoItem::GlobalAsm(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"GlobalAsm", &__self_0),
}
}
}Debug, #[automatically_derived]
impl<'tcx> ::core::hash::Hash for MonoItem<'tcx> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
MonoItem::Fn(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
MonoItem::Static(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
MonoItem::GlobalAsm(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
}
}
}Hash, const _: () =
{
impl<'tcx, '__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for MonoItem<'tcx> {
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
MonoItem::Fn(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
MonoItem::Static(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
MonoItem::GlobalAsm(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable, const _: () =
{
impl<'tcx, __E: ::rustc_middle::ty::codec::TyEncoder<'tcx>>
::rustc_serialize::Encodable<__E> for MonoItem<'tcx> {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
MonoItem::Fn(ref __binding_0) => { 0usize }
MonoItem::Static(ref __binding_0) => { 1usize }
MonoItem::GlobalAsm(ref __binding_0) => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
MonoItem::Fn(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
MonoItem::Static(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
MonoItem::GlobalAsm(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};TyEncodable, const _: () =
{
impl<'tcx, __D: ::rustc_middle::ty::codec::TyDecoder<'tcx>>
::rustc_serialize::Decodable<__D> for MonoItem<'tcx> {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
MonoItem::Fn(::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
MonoItem::Static(::rustc_serialize::Decodable::decode(__decoder))
}
2usize => {
MonoItem::GlobalAsm(::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `MonoItem`, expected 0..3, actual {0}",
n));
}
}
}
}
};TyDecodable)]
54pub enum MonoItem<'tcx> {
55 Fn(Instance<'tcx>),
56 Static(DefId),
57 GlobalAsm(ItemId),
58}
5960fn opt_incr_drop_glue_mode<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> InstantiationMode {
61// Non-ADTs can't have a Drop impl. This case is mostly hit by closures whose captures require
62 // dropping.
63let ty::Adt(adt_def, _) = ty.kind() else {
64return InstantiationMode::LocalCopy;
65 };
6667// Types that don't have a direct Drop impl, but have fields that require dropping.
68let Some(dtor) = adt_def.destructor(tcx) else {
69// We use LocalCopy for drops of enums only; this code is inherited from
70 // https://github.com/rust-lang/rust/pull/67332 and the theory is that we get to optimize
71 // out code like drop_in_place(Option::None) before crate-local ThinLTO, which improves
72 // compile time. At the time of writing, simply removing this entire check does seem to
73 // regress incr-opt compile times. But it sure seems like a more sophisticated check could
74 // do better here.
75if adt_def.is_enum() {
76return InstantiationMode::LocalCopy;
77 } else {
78return InstantiationMode::GloballyShared { may_conflict: true };
79 }
80 };
8182// We've gotten to a drop_in_place for a type that directly implements Drop.
83 // The drop glue is a wrapper for the Drop::drop impl, and we are an optimized build, so in an
84 // effort to coordinate with the mode that the actual impl will get, we make the glue also
85 // LocalCopy.
86if tcx.cross_crate_inlinable(dtor.did) {
87 InstantiationMode::LocalCopy88 } else {
89 InstantiationMode::GloballyShared { may_conflict: true }
90 }
91}
9293impl<'tcx> MonoItem<'tcx> {
94/// Returns `true` if the mono item is user-defined (i.e. not compiler-generated, like shims).
95pub fn is_user_defined(&self) -> bool {
96match *self {
97 MonoItem::Fn(instance) => #[allow(non_exhaustive_omitted_patterns)] match instance.def {
InstanceKind::Item(..) => true,
_ => false,
}matches!(instance.def, InstanceKind::Item(..)),
98 MonoItem::Static(..) | MonoItem::GlobalAsm(..) => true,
99 }
100 }
101102// Note: if you change how item size estimates work, you might need to
103 // change NON_INCR_MIN_CGU_SIZE as well.
104pub fn size_estimate(&self, tcx: TyCtxt<'tcx>) -> usize {
105match *self {
106 MonoItem::Fn(instance) => tcx.size_estimate(instance),
107// Conservatively estimate the size of a static declaration or
108 // assembly item to be 1.
109MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1,
110 }
111 }
112113pub fn is_generic_fn(&self) -> bool {
114match self {
115 MonoItem::Fn(instance) => instance.args.non_erasable_generics().next().is_some(),
116 MonoItem::Static(..) | MonoItem::GlobalAsm(..) => false,
117 }
118 }
119120pub fn symbol_name(&self, tcx: TyCtxt<'tcx>) -> SymbolName<'tcx> {
121match *self {
122 MonoItem::Fn(instance) => tcx.symbol_name(instance),
123 MonoItem::Static(def_id) => tcx.symbol_name(Instance::mono(tcx, def_id)),
124 MonoItem::GlobalAsm(item_id) => {
125SymbolName::new(tcx, &::alloc::__export::must_use({
::alloc::fmt::format(format_args!("global_asm_{0:?}",
item_id.owner_id))
})format!("global_asm_{:?}", item_id.owner_id))
126 }
127 }
128 }
129130pub fn instantiation_mode(&self, tcx: TyCtxt<'tcx>) -> InstantiationMode {
131// The case handling here is written in the same style as cross_crate_inlinable, we first
132 // handle the cases where we must use a particular instantiation mode, then cascade down
133 // through a sequence of heuristics.
134135 // The first thing we do is detect MonoItems which we must instantiate exactly once in the
136 // whole program.
137138 // Statics and global_asm! must be instantiated exactly once.
139let instance = match *self {
140 MonoItem::Fn(instance) => instance,
141 MonoItem::Static(..) | MonoItem::GlobalAsm(..) => {
142return InstantiationMode::GloballyShared { may_conflict: false };
143 }
144 };
145146// Similarly, the executable entrypoint must be instantiated exactly once.
147if tcx.is_entrypoint(instance.def_id()) {
148return InstantiationMode::GloballyShared { may_conflict: false };
149 }
150151// If the function is #[naked] or contains any other attribute that requires exactly-once
152 // instantiation:
153 // We emit an unused_attributes lint for this case, which should be kept in sync if possible.
154let codegen_fn_attrs = tcx.codegen_instance_attrs(instance.def);
155if codegen_fn_attrs.contains_extern_indicator()
156 || codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED)
157 {
158return InstantiationMode::GloballyShared { may_conflict: false };
159 }
160161// This is technically a heuristic even though it's in the "not a heuristic" part of
162 // instantiation mode selection.
163 // It is surely possible to untangle this; the root problem is that the way we instantiate
164 // InstanceKind other than Item is very complicated.
165 //
166 // The fallback case is to give everything else GloballyShared at OptLevel::No and
167 // LocalCopy at all other opt levels. This is a good default, except for one specific build
168 // configuration: Optimized incremental builds.
169 // In the current compiler architecture there is a fundamental tension between
170 // optimizations (which want big CGUs with as many things LocalCopy as possible) and
171 // incrementality (which wants small CGUs with as many things GloballyShared as possible).
172 // The heuristics implemented here do better than a completely naive approach in the
173 // compiler benchmark suite, but there is no reason to believe they are optimal.
174if let InstanceKind::DropGlue(_, Some(ty)) = instance.def {
175if tcx.sess.opts.optimize == OptLevel::No {
176return InstantiationMode::GloballyShared { may_conflict: false };
177 }
178if tcx.sess.opts.incremental.is_none() {
179return InstantiationMode::LocalCopy;
180 }
181return opt_incr_drop_glue_mode(tcx, ty);
182 }
183184// We need to ensure that we do not decide the InstantiationMode of an exported symbol is
185 // LocalCopy. Since exported symbols are computed based on the output of
186 // cross_crate_inlinable, we are beholden to our previous decisions.
187 //
188 // Note that just like above, this check for requires_inline is technically a heuristic
189 // even though it's in the "not a heuristic" part of instantiation mode selection.
190if !tcx.cross_crate_inlinable(instance.def_id()) && !instance.def.requires_inline(tcx) {
191return InstantiationMode::GloballyShared { may_conflict: false };
192 }
193194// Beginning of heuristics. The handling of link-dead-code and inline(always) are QoL only,
195 // the compiler should not crash and linkage should work, but codegen may be undesirable.
196197 // -Clink-dead-code was given an unfortunate name; the point of the flag is to assist
198 // coverage tools which rely on having every function in the program appear in the
199 // generated code. If we select LocalCopy, functions which are not used because they are
200 // missing test coverage will disappear from such coverage reports, defeating the point.
201 // Note that -Cinstrument-coverage does not require such assistance from us, only coverage
202 // tools implemented without compiler support ironically require a special compiler flag.
203if tcx.sess.link_dead_code() {
204return InstantiationMode::GloballyShared { may_conflict: true };
205 }
206207// To ensure that #[inline(always)] can be inlined as much as possible, especially in unoptimized
208 // builds, we always select LocalCopy.
209if codegen_fn_attrs.inline.always() {
210return InstantiationMode::LocalCopy;
211 }
212213// #[inline(never)] functions in general are poor candidates for inlining and thus since
214 // LocalCopy generally increases code size for the benefit of optimizations from inlining,
215 // we want to give them GloballyShared codegen.
216 // The slight problem is that generic functions need to always support cross-crate
217 // compilation, so all previous stages of the compiler are obligated to treat generic
218 // functions the same as those that unconditionally get LocalCopy codegen. It's only when
219 // we get here that we can at least not codegen a #[inline(never)] generic function in all
220 // of our CGUs.
221if let InlineAttr::Never = codegen_fn_attrs.inline
222 && self.is_generic_fn()
223 {
224return InstantiationMode::GloballyShared { may_conflict: true };
225 }
226227// The fallthrough case is to generate LocalCopy for all optimized builds, and
228 // GloballyShared with conflict prevention when optimizations are disabled.
229match tcx.sess.opts.optimize {
230 OptLevel::No => InstantiationMode::GloballyShared { may_conflict: true },
231_ => InstantiationMode::LocalCopy,
232 }
233 }
234235pub fn explicit_linkage(&self, tcx: TyCtxt<'tcx>) -> Option<Linkage> {
236let instance_kind = match *self {
237 MonoItem::Fn(ref instance) => instance.def,
238 MonoItem::Static(def_id) => InstanceKind::Item(def_id),
239 MonoItem::GlobalAsm(..) => return None,
240 };
241242tcx.codegen_instance_attrs(instance_kind).linkage
243 }
244245/// Returns `true` if this instance is instantiable - whether it has no unsatisfied
246 /// predicates.
247 ///
248 /// In order to codegen an item, all of its predicates must hold, because
249 /// otherwise the item does not make sense. Type-checking ensures that
250 /// the predicates of every item that is *used by* a valid item *do*
251 /// hold, so we can rely on that.
252 ///
253 /// However, we codegen collector roots (reachable items) and functions
254 /// in vtables when they are seen, even if they are not used, and so they
255 /// might not be instantiable. For example, a programmer can define this
256 /// public function:
257 ///
258 /// pub fn foo<'a>(s: &'a mut ()) where &'a mut (): Clone {
259 /// <&mut () as Clone>::clone(&s);
260 /// }
261 ///
262 /// That function can't be codegened, because the method `<&mut () as Clone>::clone`
263 /// does not exist. Luckily for us, that function can't ever be used,
264 /// because that would require for `&'a mut (): Clone` to hold, so we
265 /// can just not emit any code, or even a linker reference for it.
266 ///
267 /// Similarly, if a vtable method has such a signature, and therefore can't
268 /// be used, we can just not emit it and have a placeholder (a null pointer,
269 /// which will never be accessed) in its place.
270pub fn is_instantiable(&self, tcx: TyCtxt<'tcx>) -> bool {
271{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_middle/src/mir/mono.rs:271",
"rustc_middle::mir::mono", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_middle/src/mir/mono.rs"),
::tracing_core::__macro_support::Option::Some(271u32),
::tracing_core::__macro_support::Option::Some("rustc_middle::mir::mono"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("is_instantiable({0:?})",
self) as &dyn Value))])
});
} else { ; }
};debug!("is_instantiable({:?})", self);
272let (def_id, args) = match *self {
273 MonoItem::Fn(ref instance) => (instance.def_id(), instance.args),
274 MonoItem::Static(def_id) => (def_id, GenericArgs::empty()),
275// global asm never has predicates
276MonoItem::GlobalAsm(..) => return true,
277 };
278279 !tcx.instantiate_and_check_impossible_predicates((def_id, &args))
280 }
281282pub fn local_span(&self, tcx: TyCtxt<'tcx>) -> Option<Span> {
283match *self {
284 MonoItem::Fn(Instance { def, .. }) => def.def_id().as_local(),
285 MonoItem::Static(def_id) => def_id.as_local(),
286 MonoItem::GlobalAsm(item_id) => Some(item_id.owner_id.def_id),
287 }
288 .map(|def_id| tcx.def_span(def_id))
289 }
290291// Only used by rustc_codegen_cranelift
292pub fn codegen_dep_node(&self, tcx: TyCtxt<'tcx>) -> DepNode {
293crate::dep_graph::make_compile_mono_item(tcx, self)
294 }
295296/// Returns the item's `CrateNum`
297pub fn krate(&self) -> CrateNum {
298match self {
299 MonoItem::Fn(instance) => instance.def_id().krate,
300 MonoItem::Static(def_id) => def_id.krate,
301 MonoItem::GlobalAsm(..) => LOCAL_CRATE,
302 }
303 }
304305/// Returns the item's `DefId`
306pub fn def_id(&self) -> DefId {
307match *self {
308 MonoItem::Fn(Instance { def, .. }) => def.def_id(),
309 MonoItem::Static(def_id) => def_id,
310 MonoItem::GlobalAsm(item_id) => item_id.owner_id.to_def_id(),
311 }
312 }
313}
314315impl<'tcx> fmt::Displayfor MonoItem<'tcx> {
316fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
317match *self {
318 MonoItem::Fn(instance) => f.write_fmt(format_args!("fn {0}", instance))write!(f, "fn {instance}"),
319 MonoItem::Static(def_id) => {
320f.write_fmt(format_args!("static {0}",
Instance::new_raw(def_id, GenericArgs::empty())))write!(f, "static {}", Instance::new_raw(def_id, GenericArgs::empty()))321 }
322 MonoItem::GlobalAsm(..) => f.write_fmt(format_args!("global_asm"))write!(f, "global_asm"),
323 }
324 }
325}
326327impl ToStableHashKey<StableHashingContext<'_>> for MonoItem<'_> {
328type KeyType = Fingerprint;
329330fn to_stable_hash_key(&self, hcx: &StableHashingContext<'_>) -> Self::KeyType {
331let mut hasher = StableHasher::new();
332self.hash_stable(&mut hcx.clone(), &mut hasher);
333hasher.finish()
334 }
335}
336337#[derive(#[automatically_derived]
impl<'tcx> ::core::fmt::Debug for MonoItemPartitions<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"MonoItemPartitions", "codegen_units", &self.codegen_units,
"all_mono_items", &&self.all_mono_items)
}
}Debug, const _: () =
{
impl<'tcx, '__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for MonoItemPartitions<'tcx> {
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
MonoItemPartitions {
codegen_units: ref __binding_0,
all_mono_items: ref __binding_1 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable, #[automatically_derived]
impl<'tcx> ::core::marker::Copy for MonoItemPartitions<'tcx> { }Copy, #[automatically_derived]
impl<'tcx> ::core::clone::Clone for MonoItemPartitions<'tcx> {
#[inline]
fn clone(&self) -> MonoItemPartitions<'tcx> {
let _: ::core::clone::AssertParamIsClone<&'tcx [CodegenUnit<'tcx>]>;
let _: ::core::clone::AssertParamIsClone<&'tcx DefIdSet>;
*self
}
}Clone)]
338pub struct MonoItemPartitions<'tcx> {
339pub codegen_units: &'tcx [CodegenUnit<'tcx>],
340pub all_mono_items: &'tcx DefIdSet,
341}
342343#[derive(#[automatically_derived]
impl<'tcx> ::core::fmt::Debug for CodegenUnit<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field5_finish(f, "CodegenUnit",
"name", &self.name, "items", &self.items, "size_estimate",
&self.size_estimate, "primary", &self.primary,
"is_code_coverage_dead_code_cgu",
&&self.is_code_coverage_dead_code_cgu)
}
}Debug, const _: () =
{
impl<'tcx, '__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for CodegenUnit<'tcx> {
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
CodegenUnit {
name: ref __binding_0,
items: ref __binding_1,
size_estimate: ref __binding_2,
primary: ref __binding_3,
is_code_coverage_dead_code_cgu: ref __binding_4 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
{ __binding_2.hash_stable(__hcx, __hasher); }
{ __binding_3.hash_stable(__hcx, __hasher); }
{ __binding_4.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable)]
344pub struct CodegenUnit<'tcx> {
345/// A name for this CGU. Incremental compilation requires that
346 /// name be unique amongst **all** crates. Therefore, it should
347 /// contain something unique to this crate (e.g., a module path)
348 /// as well as the crate name and disambiguator.
349name: Symbol,
350 items: FxIndexMap<MonoItem<'tcx>, MonoItemData>,
351 size_estimate: usize,
352 primary: bool,
353/// True if this is CGU is used to hold code coverage information for dead code,
354 /// false otherwise.
355is_code_coverage_dead_code_cgu: bool,
356}
357358/// Auxiliary info about a `MonoItem`.
359#[derive(#[automatically_derived]
impl ::core::marker::Copy for MonoItemData { }Copy, #[automatically_derived]
impl ::core::clone::Clone for MonoItemData {
#[inline]
fn clone(&self) -> MonoItemData {
let _: ::core::clone::AssertParamIsClone<bool>;
let _: ::core::clone::AssertParamIsClone<Linkage>;
let _: ::core::clone::AssertParamIsClone<Visibility>;
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for MonoItemData {
#[inline]
fn eq(&self, other: &MonoItemData) -> bool {
self.inlined == other.inlined && self.linkage == other.linkage &&
self.visibility == other.visibility &&
self.size_estimate == other.size_estimate
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for MonoItemData {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field4_finish(f, "MonoItemData",
"inlined", &self.inlined, "linkage", &self.linkage, "visibility",
&self.visibility, "size_estimate", &&self.size_estimate)
}
}Debug, const _: () =
{
impl<'__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for MonoItemData {
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
MonoItemData {
inlined: ref __binding_0,
linkage: ref __binding_1,
visibility: ref __binding_2,
size_estimate: ref __binding_3 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
{ __binding_2.hash_stable(__hcx, __hasher); }
{ __binding_3.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable)]
360pub struct MonoItemData {
361/// A cached copy of the result of `MonoItem::instantiation_mode`, where
362 /// `GloballyShared` maps to `false` and `LocalCopy` maps to `true`.
363pub inlined: bool,
364365pub linkage: Linkage,
366pub visibility: Visibility,
367368/// A cached copy of the result of `MonoItem::size_estimate`.
369pub size_estimate: usize,
370}
371372/// Specifies the symbol visibility with regards to dynamic linking.
373///
374/// Visibility doesn't have any effect when linkage is internal.
375///
376/// DSO means dynamic shared object, that is a dynamically linked executable or dylib.
377#[derive(#[automatically_derived]
impl ::core::marker::Copy for Visibility { }Copy, #[automatically_derived]
impl ::core::clone::Clone for Visibility {
#[inline]
fn clone(&self) -> Visibility { *self }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for Visibility {
#[inline]
fn eq(&self, other: &Visibility) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for Visibility {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
Visibility::Default => "Default",
Visibility::Hidden => "Hidden",
Visibility::Protected => "Protected",
})
}
}Debug, const _: () =
{
impl<'__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for Visibility {
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
Visibility::Default => {}
Visibility::Hidden => {}
Visibility::Protected => {}
}
}
}
};HashStable, const _: () =
{
impl<'tcx, __E: ::rustc_middle::ty::codec::TyEncoder<'tcx>>
::rustc_serialize::Encodable<__E> for Visibility {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
Visibility::Default => { 0usize }
Visibility::Hidden => { 1usize }
Visibility::Protected => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
Visibility::Default => {}
Visibility::Hidden => {}
Visibility::Protected => {}
}
}
}
};TyEncodable, const _: () =
{
impl<'tcx, __D: ::rustc_middle::ty::codec::TyDecoder<'tcx>>
::rustc_serialize::Decodable<__D> for Visibility {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => { Visibility::Default }
1usize => { Visibility::Hidden }
2usize => { Visibility::Protected }
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `Visibility`, expected 0..3, actual {0}",
n));
}
}
}
}
};TyDecodable)]
378pub enum Visibility {
379/// Export the symbol from the DSO and apply overrides of the symbol by outside DSOs to within
380 /// the DSO if the object file format supports this.
381Default,
382/// Hide the symbol outside of the defining DSO even when external linkage is used to export it
383 /// from the object file.
384Hidden,
385/// Export the symbol from the DSO, but don't apply overrides of the symbol by outside DSOs to
386 /// within the DSO. Equivalent to default visibility with object file formats that don't support
387 /// overriding exported symbols by another DSO.
388Protected,
389}
390391impl From<SymbolVisibility> for Visibility {
392fn from(value: SymbolVisibility) -> Self {
393match value {
394 SymbolVisibility::Hidden => Visibility::Hidden,
395 SymbolVisibility::Protected => Visibility::Protected,
396 SymbolVisibility::Interposable => Visibility::Default,
397 }
398 }
399}
400401impl<'tcx> CodegenUnit<'tcx> {
402#[inline]
403pub fn new(name: Symbol) -> CodegenUnit<'tcx> {
404CodegenUnit {
405name,
406 items: Default::default(),
407 size_estimate: 0,
408 primary: false,
409 is_code_coverage_dead_code_cgu: false,
410 }
411 }
412413pub fn name(&self) -> Symbol {
414self.name
415 }
416417pub fn set_name(&mut self, name: Symbol) {
418self.name = name;
419 }
420421pub fn is_primary(&self) -> bool {
422self.primary
423 }
424425pub fn make_primary(&mut self) {
426self.primary = true;
427 }
428429pub fn items(&self) -> &FxIndexMap<MonoItem<'tcx>, MonoItemData> {
430&self.items
431 }
432433pub fn items_mut(&mut self) -> &mut FxIndexMap<MonoItem<'tcx>, MonoItemData> {
434&mut self.items
435 }
436437pub fn is_code_coverage_dead_code_cgu(&self) -> bool {
438self.is_code_coverage_dead_code_cgu
439 }
440441/// Marks this CGU as the one used to contain code coverage information for dead code.
442pub fn make_code_coverage_dead_code_cgu(&mut self) {
443self.is_code_coverage_dead_code_cgu = true;
444 }
445446pub fn mangle_name(human_readable_name: &str) -> BaseNString {
447let mut hasher = StableHasher::new();
448human_readable_name.hash(&mut hasher);
449let hash: Hash128 = hasher.finish();
450hash.as_u128().to_base_fixed_len(CASE_INSENSITIVE)
451 }
452453pub fn shorten_name(human_readable_name: &str) -> Cow<'_, str> {
454// Set a limit a somewhat below the common platform limits for file names.
455const MAX_CGU_NAME_LENGTH: usize = 200;
456const TRUNCATED_NAME_PREFIX: &str = "-trunc-";
457if human_readable_name.len() > MAX_CGU_NAME_LENGTH {
458let mangled_name = Self::mangle_name(human_readable_name);
459// Determine a safe byte offset to truncate the name to
460let truncate_to = human_readable_name.floor_char_boundary(
461MAX_CGU_NAME_LENGTH - TRUNCATED_NAME_PREFIX.len() - mangled_name.len(),
462 );
463::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}{1}{2}",
&human_readable_name[..truncate_to], TRUNCATED_NAME_PREFIX,
mangled_name))
})format!(
464"{}{}{}",
465&human_readable_name[..truncate_to],
466 TRUNCATED_NAME_PREFIX,
467 mangled_name
468 )469 .into()
470 } else {
471// If the name is short enough, we can just return it as is.
472human_readable_name.into()
473 }
474 }
475476pub fn compute_size_estimate(&mut self) {
477// The size of a codegen unit as the sum of the sizes of the items
478 // within it.
479self.size_estimate = self.items.values().map(|data| data.size_estimate).sum();
480 }
481482/// Should only be called if [`compute_size_estimate`] has previously been called.
483 ///
484 /// [`compute_size_estimate`]: Self::compute_size_estimate
485#[inline]
486pub fn size_estimate(&self) -> usize {
487// Items are never zero-sized, so if we have items the estimate must be
488 // non-zero, unless we forgot to call `compute_size_estimate` first.
489if !(self.items.is_empty() || self.size_estimate != 0) {
::core::panicking::panic("assertion failed: self.items.is_empty() || self.size_estimate != 0")
};assert!(self.items.is_empty() || self.size_estimate != 0);
490self.size_estimate
491 }
492493pub fn contains_item(&self, item: &MonoItem<'tcx>) -> bool {
494self.items().contains_key(item)
495 }
496497pub fn work_product_id(&self) -> WorkProductId {
498WorkProductId::from_cgu_name(self.name().as_str())
499 }
500501pub fn previous_work_product(&self, tcx: TyCtxt<'_>) -> WorkProduct {
502let work_product_id = self.work_product_id();
503tcx.dep_graph
504 .previous_work_product(&work_product_id)
505 .unwrap_or_else(|| {
::core::panicking::panic_fmt(format_args!("Could not find work-product for CGU `{0}`",
self.name()));
}panic!("Could not find work-product for CGU `{}`", self.name()))
506 }
507508pub fn items_in_deterministic_order(
509&self,
510 tcx: TyCtxt<'tcx>,
511 ) -> Vec<(MonoItem<'tcx>, MonoItemData)> {
512// The codegen tests rely on items being process in the same order as
513 // they appear in the file, so for local items, we sort by span first
514#[derive(#[automatically_derived]
impl<'tcx> ::core::cmp::PartialEq for ItemSortKey<'tcx> {
#[inline]
fn eq(&self, other: &ItemSortKey<'tcx>) -> bool {
self.0 == other.0 && self.1 == other.1
}
}PartialEq, #[automatically_derived]
impl<'tcx> ::core::cmp::Eq for ItemSortKey<'tcx> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Option<Span>>;
let _: ::core::cmp::AssertParamIsEq<SymbolName<'tcx>>;
}
}Eq, #[automatically_derived]
impl<'tcx> ::core::cmp::PartialOrd for ItemSortKey<'tcx> {
#[inline]
fn partial_cmp(&self, other: &ItemSortKey<'tcx>)
-> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
::core::cmp::PartialOrd::partial_cmp(&self.1, &other.1),
cmp => cmp,
}
}
}PartialOrd, #[automatically_derived]
impl<'tcx> ::core::cmp::Ord for ItemSortKey<'tcx> {
#[inline]
fn cmp(&self, other: &ItemSortKey<'tcx>) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.0, &other.0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&self.1, &other.1),
cmp => cmp,
}
}
}Ord)]
515struct ItemSortKey<'tcx>(Option<Span>, SymbolName<'tcx>);
516517// We only want to take HirIds of user-defines instances into account.
518 // The others don't matter for the codegen tests and can even make item
519 // order unstable.
520fn local_item_id<'tcx>(item: MonoItem<'tcx>) -> Option<DefId> {
521match item {
522 MonoItem::Fn(ref instance) => match instance.def {
523 InstanceKind::Item(def) => def.as_local().map(|_| def),
524 InstanceKind::VTableShim(..)
525 | InstanceKind::ReifyShim(..)
526 | InstanceKind::Intrinsic(..)
527 | InstanceKind::FnPtrShim(..)
528 | InstanceKind::Virtual(..)
529 | InstanceKind::ClosureOnceShim { .. }
530 | InstanceKind::ConstructCoroutineInClosureShim { .. }
531 | InstanceKind::DropGlue(..)
532 | InstanceKind::CloneShim(..)
533 | InstanceKind::ThreadLocalShim(..)
534 | InstanceKind::FnPtrAddrShim(..)
535 | InstanceKind::AsyncDropGlue(..)
536 | InstanceKind::FutureDropPollShim(..)
537 | InstanceKind::AsyncDropGlueCtorShim(..) => None,
538 },
539 MonoItem::Static(def_id) => def_id.as_local().map(|_| def_id),
540 MonoItem::GlobalAsm(item_id) => Some(item_id.owner_id.def_id.to_def_id()),
541 }
542 }
543fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey<'tcx> {
544ItemSortKey(
545local_item_id(item)
546 .map(|def_id| tcx.def_span(def_id).find_ancestor_not_from_macro())
547 .flatten(),
548item.symbol_name(tcx),
549 )
550 }
551552let mut items: Vec<_> = self.items().iter().map(|(&i, &data)| (i, data)).collect();
553if !tcx.sess.opts.unstable_opts.codegen_source_order {
554// In this case, we do not need to keep the items in any specific order, as the input
555 // is already deterministic.
556 //
557 // However, it seems that moving related things (such as different
558 // monomorphizations of the same function) close to one another is actually beneficial
559 // for LLVM performance.
560 // LLVM will codegen the items in the order we pass them to it, and when it handles
561 // similar things in succession, it seems that it leads to better cache utilization,
562 // less branch mispredictions and in general to better performance.
563 // For example, if we have functions `a`, `c::<u32>`, `b`, `c::<i16>`, `d` and
564 // `c::<bool>`, it seems that it helps LLVM's performance to codegen the three `c`
565 // instantiations right after one another, as they will likely reference similar types,
566 // call similar functions, etc.
567 //
568 // See https://github.com/rust-lang/rust/pull/145358 for more details.
569 //
570 // Sorting by symbol name should not incur any new non-determinism.
571items.sort_by_cached_key(|&(i, _)| i.symbol_name(tcx));
572 } else {
573items.sort_by_cached_key(|&(i, _)| item_sort_key(tcx, i));
574 }
575items576 }
577578pub fn codegen_dep_node(&self, tcx: TyCtxt<'tcx>) -> DepNode {
579crate::dep_graph::make_compile_codegen_unit(tcx, self.name())
580 }
581}
582583impl ToStableHashKey<StableHashingContext<'_>> for CodegenUnit<'_> {
584type KeyType = String;
585586fn to_stable_hash_key(&self, _: &StableHashingContext<'_>) -> Self::KeyType {
587// Codegen unit names are conceptually required to be stable across
588 // compilation session so that object file names match up.
589self.name.to_string()
590 }
591}
592593pub struct CodegenUnitNameBuilder<'tcx> {
594 tcx: TyCtxt<'tcx>,
595 cache: UnordMap<CrateNum, String>,
596}
597598impl<'tcx> CodegenUnitNameBuilder<'tcx> {
599pub fn new(tcx: TyCtxt<'tcx>) -> Self {
600CodegenUnitNameBuilder { tcx, cache: Default::default() }
601 }
602603/// CGU names should fulfill the following requirements:
604 /// - They should be able to act as a file name on any kind of file system
605 /// - They should not collide with other CGU names, even for different versions
606 /// of the same crate.
607 ///
608 /// Consequently, we don't use special characters except for '.' and '-' and we
609 /// prefix each name with the crate-name and crate-disambiguator.
610 ///
611 /// This function will build CGU names of the form:
612 ///
613 /// ```text
614 /// <crate-name>.<crate-disambiguator>[-in-<local-crate-id>](-<component>)*[.<special-suffix>]
615 /// <local-crate-id> = <local-crate-name>.<local-crate-disambiguator>
616 /// ```
617 ///
618 /// The '.' before `<special-suffix>` makes sure that names with a special
619 /// suffix can never collide with a name built out of regular Rust
620 /// identifiers (e.g., module paths).
621pub fn build_cgu_name<I, C, S>(
622&mut self,
623 cnum: CrateNum,
624 components: I,
625 special_suffix: Option<S>,
626 ) -> Symbol627where
628I: IntoIterator<Item = C>,
629 C: fmt::Display,
630 S: fmt::Display,
631 {
632let cgu_name = self.build_cgu_name_no_mangle(cnum, components, special_suffix);
633634if self.tcx.sess.opts.unstable_opts.human_readable_cgu_names {
635Symbol::intern(&CodegenUnit::shorten_name(cgu_name.as_str()))
636 } else {
637Symbol::intern(&CodegenUnit::mangle_name(cgu_name.as_str()))
638 }
639 }
640641/// Same as `CodegenUnit::build_cgu_name()` but will never mangle the
642 /// resulting name.
643pub fn build_cgu_name_no_mangle<I, C, S>(
644&mut self,
645 cnum: CrateNum,
646 components: I,
647 special_suffix: Option<S>,
648 ) -> Symbol649where
650I: IntoIterator<Item = C>,
651 C: fmt::Display,
652 S: fmt::Display,
653 {
654use std::fmt::Write;
655656let mut cgu_name = String::with_capacity(64);
657658// Start out with the crate name and disambiguator
659let tcx = self.tcx;
660let crate_prefix = self.cache.entry(cnum).or_insert_with(|| {
661// Whenever the cnum is not LOCAL_CRATE we also mix in the
662 // local crate's ID. Otherwise there can be collisions between CGUs
663 // instantiating stuff for upstream crates.
664let local_crate_id = if cnum != LOCAL_CRATE {
665let local_stable_crate_id = tcx.stable_crate_id(LOCAL_CRATE);
666::alloc::__export::must_use({
::alloc::fmt::format(format_args!("-in-{0}.{1:08x}",
tcx.crate_name(LOCAL_CRATE), local_stable_crate_id))
})format!("-in-{}.{:08x}", tcx.crate_name(LOCAL_CRATE), local_stable_crate_id)667 } else {
668String::new()
669 };
670671let stable_crate_id = tcx.stable_crate_id(LOCAL_CRATE);
672::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}.{1:08x}{2}",
tcx.crate_name(cnum), stable_crate_id, local_crate_id))
})format!("{}.{:08x}{}", tcx.crate_name(cnum), stable_crate_id, local_crate_id)673 });
674675cgu_name.write_fmt(format_args!("{0}", crate_prefix))write!(cgu_name, "{crate_prefix}").unwrap();
676677// Add the components
678for component in components {
679cgu_name.write_fmt(format_args!("-{0}", component))write!(cgu_name, "-{component}").unwrap();
680 }
681682if let Some(special_suffix) = special_suffix {
683// We add a dot in here so it cannot clash with anything in a regular
684 // Rust identifier
685cgu_name.write_fmt(format_args!(".{0}", special_suffix))write!(cgu_name, ".{special_suffix}").unwrap();
686 }
687688Symbol::intern(&cgu_name)
689 }
690}
691692/// See module-level docs of `rustc_monomorphize::collector` on some context for "mentioned" items.
693#[derive(#[automatically_derived]
impl ::core::marker::Copy for CollectionMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CollectionMode {
#[inline]
fn clone(&self) -> CollectionMode { *self }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for CollectionMode {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
CollectionMode::UsedItems => "UsedItems",
CollectionMode::MentionedItems => "MentionedItems",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for CollectionMode {
#[inline]
fn eq(&self, other: &CollectionMode) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for CollectionMode {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for CollectionMode {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state)
}
}Hash, const _: () =
{
impl<'__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for CollectionMode {
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
CollectionMode::UsedItems => {}
CollectionMode::MentionedItems => {}
}
}
}
};HashStable)]
694pub enum CollectionMode {
695/// Collect items that are used, i.e., actually needed for codegen.
696 ///
697 /// Which items are used can depend on optimization levels, as MIR optimizations can remove
698 /// uses.
699UsedItems,
700/// Collect items that are mentioned. The goal of this mode is that it is independent of
701 /// optimizations: the set of "mentioned" items is computed before optimizations are run.
702 ///
703 /// The exact contents of this set are *not* a stable guarantee. (For instance, it is currently
704 /// computed after drop-elaboration. If we ever do some optimizations even in debug builds, we
705 /// might decide to run them before computing mentioned items.) The key property of this set is
706 /// that it is optimization-independent.
707MentionedItems,
708}