1use std::borrow::Cow;
2use std::fmt;
3use std::hash::Hash;
45use rustc_data_structures::base_n::{BaseNString, CASE_INSENSITIVE, ToBaseN};
6use rustc_data_structures::fingerprint::Fingerprint;
7use rustc_data_structures::fx::FxIndexMap;
8use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey};
9use rustc_data_structures::unord::UnordMap;
10use rustc_hashes::Hash128;
11use rustc_hir::ItemId;
12use rustc_hir::attrs::{InlineAttr, Linkage};
13use rustc_hir::def_id::{CrateNum, DefId, DefIdSet, LOCAL_CRATE};
14use rustc_macros::{HashStable, TyDecodable, TyEncodable};
15use rustc_session::config::OptLevel;
16use rustc_span::{Span, Symbol};
17use rustc_target::spec::SymbolVisibility;
18use tracing::debug;
1920use crate::dep_graph::dep_node::{make_compile_codegen_unit, make_compile_mono_item};
21use crate::dep_graph::{DepNode, WorkProduct, WorkProductId};
22use crate::ich::StableHashingContext;
23use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
24use crate::ty::{self, GenericArgs, Instance, InstanceKind, SymbolName, Ty, TyCtxt};
2526/// Describes how a monomorphization will be instantiated in object files.
27#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for InstantiationMode {
#[inline]
fn eq(&self, other: &InstantiationMode) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(InstantiationMode::GloballyShared { may_conflict: __self_0 },
InstantiationMode::GloballyShared { may_conflict: __arg1_0
}) => __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq)]
28pub enum InstantiationMode {
29/// There will be exactly one instance of the given MonoItem. It will have
30 /// external linkage so that it can be linked to from other codegen units.
31GloballyShared {
32/// In some compilation scenarios we may decide to take functions that
33 /// are typically `LocalCopy` and instead move them to `GloballyShared`
34 /// to avoid codegenning them a bunch of times. In this situation,
35 /// however, our local copy may conflict with other crates also
36 /// inlining the same function.
37 ///
38 /// This flag indicates that this situation is occurring, and informs
39 /// symbol name calculation that some extra mangling is needed to
40 /// avoid conflicts. Note that this may eventually go away entirely if
41 /// ThinLTO enables us to *always* have a globally shared instance of a
42 /// function within one crate's compilation.
43may_conflict: bool,
44 },
4546/// Each codegen unit containing a reference to the given MonoItem will
47 /// have its own private copy of the function (with internal linkage).
48LocalCopy,
49}
5051#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for NormalizationErrorInMono {
#[inline]
fn eq(&self, other: &NormalizationErrorInMono) -> bool { true }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for NormalizationErrorInMono {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::clone::Clone for NormalizationErrorInMono {
#[inline]
fn clone(&self) -> NormalizationErrorInMono { *self }
}Clone, #[automatically_derived]
impl ::core::marker::Copy for NormalizationErrorInMono { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for NormalizationErrorInMono {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f, "NormalizationErrorInMono")
}
}Debug, #[automatically_derived]
impl ::core::hash::Hash for NormalizationErrorInMono {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {}
}Hash, const _: () =
{
impl<'__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_middle::ich::StableHashingContext<'__ctx>>
for NormalizationErrorInMono {
#[inline]
fn hash_stable(&self,
__hcx: &mut ::rustc_middle::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self { NormalizationErrorInMono => {} }
}
}
};HashStable, const _: () =
{
impl<'tcx, __E: ::rustc_middle::ty::codec::TyEncoder<'tcx>>
::rustc_serialize::Encodable<__E> for NormalizationErrorInMono {
fn encode(&self, __encoder: &mut __E) {
match *self { NormalizationErrorInMono => {} }
}
}
};TyEncodable, const _: () =
{
impl<'tcx, __D: ::rustc_middle::ty::codec::TyDecoder<'tcx>>
::rustc_serialize::Decodable<__D> for NormalizationErrorInMono {
fn decode(__decoder: &mut __D) -> Self {
NormalizationErrorInMono
}
}
};TyDecodable)]
52pub struct NormalizationErrorInMono;
5354#[derive(#[automatically_derived]
impl<'tcx> ::core::cmp::PartialEq for MonoItem<'tcx> {
#[inline]
fn eq(&self, other: &MonoItem<'tcx>) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(MonoItem::Fn(__self_0), MonoItem::Fn(__arg1_0)) =>
__self_0 == __arg1_0,
(MonoItem::Static(__self_0), MonoItem::Static(__arg1_0)) =>
__self_0 == __arg1_0,
(MonoItem::GlobalAsm(__self_0), MonoItem::GlobalAsm(__arg1_0))
=> __self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl<'tcx> ::core::cmp::Eq for MonoItem<'tcx> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Instance<'tcx>>;
let _: ::core::cmp::AssertParamIsEq<DefId>;
let _: ::core::cmp::AssertParamIsEq<ItemId>;
}
}Eq, #[automatically_derived]
impl<'tcx> ::core::clone::Clone for MonoItem<'tcx> {
#[inline]
fn clone(&self) -> MonoItem<'tcx> {
let _: ::core::clone::AssertParamIsClone<Instance<'tcx>>;
let _: ::core::clone::AssertParamIsClone<DefId>;
let _: ::core::clone::AssertParamIsClone<ItemId>;
*self
}
}Clone, #[automatically_derived]
impl<'tcx> ::core::marker::Copy for MonoItem<'tcx> { }Copy, #[automatically_derived]
impl<'tcx> ::core::fmt::Debug for MonoItem<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
MonoItem::Fn(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Fn",
&__self_0),
MonoItem::Static(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Static",
&__self_0),
MonoItem::GlobalAsm(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"GlobalAsm", &__self_0),
}
}
}Debug, #[automatically_derived]
impl<'tcx> ::core::hash::Hash for MonoItem<'tcx> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
MonoItem::Fn(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
MonoItem::Static(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
MonoItem::GlobalAsm(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
}
}
}Hash, const _: () =
{
impl<'tcx, '__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_middle::ich::StableHashingContext<'__ctx>>
for MonoItem<'tcx> {
#[inline]
fn hash_stable(&self,
__hcx: &mut ::rustc_middle::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
MonoItem::Fn(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
MonoItem::Static(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
MonoItem::GlobalAsm(ref __binding_0) => {
{ __binding_0.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable, const _: () =
{
impl<'tcx, __E: ::rustc_middle::ty::codec::TyEncoder<'tcx>>
::rustc_serialize::Encodable<__E> for MonoItem<'tcx> {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
MonoItem::Fn(ref __binding_0) => { 0usize }
MonoItem::Static(ref __binding_0) => { 1usize }
MonoItem::GlobalAsm(ref __binding_0) => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
MonoItem::Fn(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
MonoItem::Static(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
MonoItem::GlobalAsm(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};TyEncodable, const _: () =
{
impl<'tcx, __D: ::rustc_middle::ty::codec::TyDecoder<'tcx>>
::rustc_serialize::Decodable<__D> for MonoItem<'tcx> {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
MonoItem::Fn(::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
MonoItem::Static(::rustc_serialize::Decodable::decode(__decoder))
}
2usize => {
MonoItem::GlobalAsm(::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `MonoItem`, expected 0..3, actual {0}",
n));
}
}
}
}
};TyDecodable)]
55pub enum MonoItem<'tcx> {
56 Fn(Instance<'tcx>),
57 Static(DefId),
58 GlobalAsm(ItemId),
59}
6061fn opt_incr_drop_glue_mode<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> InstantiationMode {
62// Non-ADTs can't have a Drop impl. This case is mostly hit by closures whose captures require
63 // dropping.
64let ty::Adt(adt_def, _) = ty.kind() else {
65return InstantiationMode::LocalCopy;
66 };
6768// Types that don't have a direct Drop impl, but have fields that require dropping.
69let Some(dtor) = adt_def.destructor(tcx) else {
70// We use LocalCopy for drops of enums only; this code is inherited from
71 // https://github.com/rust-lang/rust/pull/67332 and the theory is that we get to optimize
72 // out code like drop_in_place(Option::None) before crate-local ThinLTO, which improves
73 // compile time. At the time of writing, simply removing this entire check does seem to
74 // regress incr-opt compile times. But it sure seems like a more sophisticated check could
75 // do better here.
76if adt_def.is_enum() {
77return InstantiationMode::LocalCopy;
78 } else {
79return InstantiationMode::GloballyShared { may_conflict: true };
80 }
81 };
8283// We've gotten to a drop_in_place for a type that directly implements Drop.
84 // The drop glue is a wrapper for the Drop::drop impl, and we are an optimized build, so in an
85 // effort to coordinate with the mode that the actual impl will get, we make the glue also
86 // LocalCopy.
87if tcx.cross_crate_inlinable(dtor.did) {
88 InstantiationMode::LocalCopy89 } else {
90 InstantiationMode::GloballyShared { may_conflict: true }
91 }
92}
9394impl<'tcx> MonoItem<'tcx> {
95/// Returns `true` if the mono item is user-defined (i.e. not compiler-generated, like shims).
96pub fn is_user_defined(&self) -> bool {
97match *self {
98 MonoItem::Fn(instance) => #[allow(non_exhaustive_omitted_patterns)] match instance.def {
InstanceKind::Item(..) => true,
_ => false,
}matches!(instance.def, InstanceKind::Item(..)),
99 MonoItem::Static(..) | MonoItem::GlobalAsm(..) => true,
100 }
101 }
102103// Note: if you change how item size estimates work, you might need to
104 // change NON_INCR_MIN_CGU_SIZE as well.
105pub fn size_estimate(&self, tcx: TyCtxt<'tcx>) -> usize {
106match *self {
107 MonoItem::Fn(instance) => tcx.size_estimate(instance),
108// Conservatively estimate the size of a static declaration or
109 // assembly item to be 1.
110MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1,
111 }
112 }
113114pub fn is_generic_fn(&self) -> bool {
115match self {
116 MonoItem::Fn(instance) => instance.args.non_erasable_generics().next().is_some(),
117 MonoItem::Static(..) | MonoItem::GlobalAsm(..) => false,
118 }
119 }
120121pub fn symbol_name(&self, tcx: TyCtxt<'tcx>) -> SymbolName<'tcx> {
122match *self {
123 MonoItem::Fn(instance) => tcx.symbol_name(instance),
124 MonoItem::Static(def_id) => tcx.symbol_name(Instance::mono(tcx, def_id)),
125 MonoItem::GlobalAsm(item_id) => {
126SymbolName::new(tcx, &::alloc::__export::must_use({
::alloc::fmt::format(format_args!("global_asm_{0:?}",
item_id.owner_id))
})format!("global_asm_{:?}", item_id.owner_id))
127 }
128 }
129 }
130131pub fn instantiation_mode(&self, tcx: TyCtxt<'tcx>) -> InstantiationMode {
132// The case handling here is written in the same style as cross_crate_inlinable, we first
133 // handle the cases where we must use a particular instantiation mode, then cascade down
134 // through a sequence of heuristics.
135136 // The first thing we do is detect MonoItems which we must instantiate exactly once in the
137 // whole program.
138139 // Statics and global_asm! must be instantiated exactly once.
140let instance = match *self {
141 MonoItem::Fn(instance) => instance,
142 MonoItem::Static(..) | MonoItem::GlobalAsm(..) => {
143return InstantiationMode::GloballyShared { may_conflict: false };
144 }
145 };
146147// Similarly, the executable entrypoint must be instantiated exactly once.
148if tcx.is_entrypoint(instance.def_id()) {
149return InstantiationMode::GloballyShared { may_conflict: false };
150 }
151152// If the function is #[naked] or contains any other attribute that requires exactly-once
153 // instantiation:
154 // We emit an unused_attributes lint for this case, which should be kept in sync if possible.
155let codegen_fn_attrs = tcx.codegen_instance_attrs(instance.def);
156if codegen_fn_attrs.contains_extern_indicator()
157 || codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED)
158 {
159return InstantiationMode::GloballyShared { may_conflict: false };
160 }
161162// This is technically a heuristic even though it's in the "not a heuristic" part of
163 // instantiation mode selection.
164 // It is surely possible to untangle this; the root problem is that the way we instantiate
165 // InstanceKind other than Item is very complicated.
166 //
167 // The fallback case is to give everything else GloballyShared at OptLevel::No and
168 // LocalCopy at all other opt levels. This is a good default, except for one specific build
169 // configuration: Optimized incremental builds.
170 // In the current compiler architecture there is a fundamental tension between
171 // optimizations (which want big CGUs with as many things LocalCopy as possible) and
172 // incrementality (which wants small CGUs with as many things GloballyShared as possible).
173 // The heuristics implemented here do better than a completely naive approach in the
174 // compiler benchmark suite, but there is no reason to believe they are optimal.
175if let InstanceKind::DropGlue(_, Some(ty)) = instance.def {
176if tcx.sess.opts.optimize == OptLevel::No {
177return InstantiationMode::GloballyShared { may_conflict: false };
178 }
179if tcx.sess.opts.incremental.is_none() {
180return InstantiationMode::LocalCopy;
181 }
182return opt_incr_drop_glue_mode(tcx, ty);
183 }
184185// We need to ensure that we do not decide the InstantiationMode of an exported symbol is
186 // LocalCopy. Since exported symbols are computed based on the output of
187 // cross_crate_inlinable, we are beholden to our previous decisions.
188 //
189 // Note that just like above, this check for requires_inline is technically a heuristic
190 // even though it's in the "not a heuristic" part of instantiation mode selection.
191if !tcx.cross_crate_inlinable(instance.def_id()) && !instance.def.requires_inline(tcx) {
192return InstantiationMode::GloballyShared { may_conflict: false };
193 }
194195// Beginning of heuristics. The handling of link-dead-code and inline(always) are QoL only,
196 // the compiler should not crash and linkage should work, but codegen may be undesirable.
197198 // -Clink-dead-code was given an unfortunate name; the point of the flag is to assist
199 // coverage tools which rely on having every function in the program appear in the
200 // generated code. If we select LocalCopy, functions which are not used because they are
201 // missing test coverage will disappear from such coverage reports, defeating the point.
202 // Note that -Cinstrument-coverage does not require such assistance from us, only coverage
203 // tools implemented without compiler support ironically require a special compiler flag.
204if tcx.sess.link_dead_code() {
205return InstantiationMode::GloballyShared { may_conflict: true };
206 }
207208// To ensure that #[inline(always)] can be inlined as much as possible, especially in unoptimized
209 // builds, we always select LocalCopy.
210if codegen_fn_attrs.inline.always() {
211return InstantiationMode::LocalCopy;
212 }
213214// #[inline(never)] functions in general are poor candidates for inlining and thus since
215 // LocalCopy generally increases code size for the benefit of optimizations from inlining,
216 // we want to give them GloballyShared codegen.
217 // The slight problem is that generic functions need to always support cross-crate
218 // compilation, so all previous stages of the compiler are obligated to treat generic
219 // functions the same as those that unconditionally get LocalCopy codegen. It's only when
220 // we get here that we can at least not codegen a #[inline(never)] generic function in all
221 // of our CGUs.
222if let InlineAttr::Never = codegen_fn_attrs.inline
223 && self.is_generic_fn()
224 {
225return InstantiationMode::GloballyShared { may_conflict: true };
226 }
227228// The fallthrough case is to generate LocalCopy for all optimized builds, and
229 // GloballyShared with conflict prevention when optimizations are disabled.
230match tcx.sess.opts.optimize {
231 OptLevel::No => InstantiationMode::GloballyShared { may_conflict: true },
232_ => InstantiationMode::LocalCopy,
233 }
234 }
235236pub fn explicit_linkage(&self, tcx: TyCtxt<'tcx>) -> Option<Linkage> {
237let instance_kind = match *self {
238 MonoItem::Fn(ref instance) => instance.def,
239 MonoItem::Static(def_id) => InstanceKind::Item(def_id),
240 MonoItem::GlobalAsm(..) => return None,
241 };
242243tcx.codegen_instance_attrs(instance_kind).linkage
244 }
245246/// Returns `true` if this instance is instantiable - whether it has no unsatisfied
247 /// predicates.
248 ///
249 /// In order to codegen an item, all of its predicates must hold, because
250 /// otherwise the item does not make sense. Type-checking ensures that
251 /// the predicates of every item that is *used by* a valid item *do*
252 /// hold, so we can rely on that.
253 ///
254 /// However, we codegen collector roots (reachable items) and functions
255 /// in vtables when they are seen, even if they are not used, and so they
256 /// might not be instantiable. For example, a programmer can define this
257 /// public function:
258 ///
259 /// pub fn foo<'a>(s: &'a mut ()) where &'a mut (): Clone {
260 /// <&mut () as Clone>::clone(&s);
261 /// }
262 ///
263 /// That function can't be codegened, because the method `<&mut () as Clone>::clone`
264 /// does not exist. Luckily for us, that function can't ever be used,
265 /// because that would require for `&'a mut (): Clone` to hold, so we
266 /// can just not emit any code, or even a linker reference for it.
267 ///
268 /// Similarly, if a vtable method has such a signature, and therefore can't
269 /// be used, we can just not emit it and have a placeholder (a null pointer,
270 /// which will never be accessed) in its place.
271pub fn is_instantiable(&self, tcx: TyCtxt<'tcx>) -> bool {
272{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_middle/src/mir/mono.rs:272",
"rustc_middle::mir::mono", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_middle/src/mir/mono.rs"),
::tracing_core::__macro_support::Option::Some(272u32),
::tracing_core::__macro_support::Option::Some("rustc_middle::mir::mono"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("is_instantiable({0:?})",
self) as &dyn Value))])
});
} else { ; }
};debug!("is_instantiable({:?})", self);
273let (def_id, args) = match *self {
274 MonoItem::Fn(ref instance) => (instance.def_id(), instance.args),
275 MonoItem::Static(def_id) => (def_id, GenericArgs::empty()),
276// global asm never has predicates
277MonoItem::GlobalAsm(..) => return true,
278 };
279280 !tcx.instantiate_and_check_impossible_predicates((def_id, &args))
281 }
282283pub fn local_span(&self, tcx: TyCtxt<'tcx>) -> Option<Span> {
284match *self {
285 MonoItem::Fn(Instance { def, .. }) => def.def_id().as_local(),
286 MonoItem::Static(def_id) => def_id.as_local(),
287 MonoItem::GlobalAsm(item_id) => Some(item_id.owner_id.def_id),
288 }
289 .map(|def_id| tcx.def_span(def_id))
290 }
291292// Only used by rustc_codegen_cranelift
293pub fn codegen_dep_node(&self, tcx: TyCtxt<'tcx>) -> DepNode {
294make_compile_mono_item(tcx, self)
295 }
296297/// Returns the item's `CrateNum`
298pub fn krate(&self) -> CrateNum {
299match self {
300 MonoItem::Fn(instance) => instance.def_id().krate,
301 MonoItem::Static(def_id) => def_id.krate,
302 MonoItem::GlobalAsm(..) => LOCAL_CRATE,
303 }
304 }
305306/// Returns the item's `DefId`
307pub fn def_id(&self) -> DefId {
308match *self {
309 MonoItem::Fn(Instance { def, .. }) => def.def_id(),
310 MonoItem::Static(def_id) => def_id,
311 MonoItem::GlobalAsm(item_id) => item_id.owner_id.to_def_id(),
312 }
313 }
314}
315316impl<'tcx> fmt::Displayfor MonoItem<'tcx> {
317fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
318match *self {
319 MonoItem::Fn(instance) => f.write_fmt(format_args!("fn {0}", instance))write!(f, "fn {instance}"),
320 MonoItem::Static(def_id) => {
321f.write_fmt(format_args!("static {0}",
Instance::new_raw(def_id, GenericArgs::empty())))write!(f, "static {}", Instance::new_raw(def_id, GenericArgs::empty()))322 }
323 MonoItem::GlobalAsm(..) => f.write_fmt(format_args!("global_asm"))write!(f, "global_asm"),
324 }
325 }
326}
327328impl ToStableHashKey<StableHashingContext<'_>> for MonoItem<'_> {
329type KeyType = Fingerprint;
330331fn to_stable_hash_key(&self, hcx: &StableHashingContext<'_>) -> Self::KeyType {
332let mut hasher = StableHasher::new();
333self.hash_stable(&mut hcx.clone(), &mut hasher);
334hasher.finish()
335 }
336}
337338#[derive(#[automatically_derived]
impl<'tcx> ::core::fmt::Debug for MonoItemPartitions<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"MonoItemPartitions", "codegen_units", &self.codegen_units,
"all_mono_items", &&self.all_mono_items)
}
}Debug, const _: () =
{
impl<'tcx, '__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_middle::ich::StableHashingContext<'__ctx>>
for MonoItemPartitions<'tcx> {
#[inline]
fn hash_stable(&self,
__hcx: &mut ::rustc_middle::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
MonoItemPartitions {
codegen_units: ref __binding_0,
all_mono_items: ref __binding_1 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable, #[automatically_derived]
impl<'tcx> ::core::marker::Copy for MonoItemPartitions<'tcx> { }Copy, #[automatically_derived]
impl<'tcx> ::core::clone::Clone for MonoItemPartitions<'tcx> {
#[inline]
fn clone(&self) -> MonoItemPartitions<'tcx> {
let _: ::core::clone::AssertParamIsClone<&'tcx [CodegenUnit<'tcx>]>;
let _: ::core::clone::AssertParamIsClone<&'tcx DefIdSet>;
*self
}
}Clone)]
339pub struct MonoItemPartitions<'tcx> {
340pub codegen_units: &'tcx [CodegenUnit<'tcx>],
341pub all_mono_items: &'tcx DefIdSet,
342}
343344#[derive(#[automatically_derived]
impl<'tcx> ::core::fmt::Debug for CodegenUnit<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field5_finish(f, "CodegenUnit",
"name", &self.name, "items", &self.items, "size_estimate",
&self.size_estimate, "primary", &self.primary,
"is_code_coverage_dead_code_cgu",
&&self.is_code_coverage_dead_code_cgu)
}
}Debug, const _: () =
{
impl<'tcx, '__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_middle::ich::StableHashingContext<'__ctx>>
for CodegenUnit<'tcx> {
#[inline]
fn hash_stable(&self,
__hcx: &mut ::rustc_middle::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
CodegenUnit {
name: ref __binding_0,
items: ref __binding_1,
size_estimate: ref __binding_2,
primary: ref __binding_3,
is_code_coverage_dead_code_cgu: ref __binding_4 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
{ __binding_2.hash_stable(__hcx, __hasher); }
{ __binding_3.hash_stable(__hcx, __hasher); }
{ __binding_4.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable)]
345pub struct CodegenUnit<'tcx> {
346/// A name for this CGU. Incremental compilation requires that
347 /// name be unique amongst **all** crates. Therefore, it should
348 /// contain something unique to this crate (e.g., a module path)
349 /// as well as the crate name and disambiguator.
350name: Symbol,
351 items: FxIndexMap<MonoItem<'tcx>, MonoItemData>,
352 size_estimate: usize,
353 primary: bool,
354/// True if this is CGU is used to hold code coverage information for dead code,
355 /// false otherwise.
356is_code_coverage_dead_code_cgu: bool,
357}
358359/// Auxiliary info about a `MonoItem`.
360#[derive(#[automatically_derived]
impl ::core::marker::Copy for MonoItemData { }Copy, #[automatically_derived]
impl ::core::clone::Clone for MonoItemData {
#[inline]
fn clone(&self) -> MonoItemData {
let _: ::core::clone::AssertParamIsClone<bool>;
let _: ::core::clone::AssertParamIsClone<Linkage>;
let _: ::core::clone::AssertParamIsClone<Visibility>;
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for MonoItemData {
#[inline]
fn eq(&self, other: &MonoItemData) -> bool {
self.inlined == other.inlined && self.linkage == other.linkage &&
self.visibility == other.visibility &&
self.size_estimate == other.size_estimate
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for MonoItemData {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field4_finish(f, "MonoItemData",
"inlined", &self.inlined, "linkage", &self.linkage, "visibility",
&self.visibility, "size_estimate", &&self.size_estimate)
}
}Debug, const _: () =
{
impl<'__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_middle::ich::StableHashingContext<'__ctx>>
for MonoItemData {
#[inline]
fn hash_stable(&self,
__hcx: &mut ::rustc_middle::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
MonoItemData {
inlined: ref __binding_0,
linkage: ref __binding_1,
visibility: ref __binding_2,
size_estimate: ref __binding_3 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
{ __binding_2.hash_stable(__hcx, __hasher); }
{ __binding_3.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable)]
361pub struct MonoItemData {
362/// A cached copy of the result of `MonoItem::instantiation_mode`, where
363 /// `GloballyShared` maps to `false` and `LocalCopy` maps to `true`.
364pub inlined: bool,
365366pub linkage: Linkage,
367pub visibility: Visibility,
368369/// A cached copy of the result of `MonoItem::size_estimate`.
370pub size_estimate: usize,
371}
372373/// Specifies the symbol visibility with regards to dynamic linking.
374///
375/// Visibility doesn't have any effect when linkage is internal.
376///
377/// DSO means dynamic shared object, that is a dynamically linked executable or dylib.
378#[derive(#[automatically_derived]
impl ::core::marker::Copy for Visibility { }Copy, #[automatically_derived]
impl ::core::clone::Clone for Visibility {
#[inline]
fn clone(&self) -> Visibility { *self }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for Visibility {
#[inline]
fn eq(&self, other: &Visibility) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for Visibility {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
Visibility::Default => "Default",
Visibility::Hidden => "Hidden",
Visibility::Protected => "Protected",
})
}
}Debug, const _: () =
{
impl<'__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_middle::ich::StableHashingContext<'__ctx>>
for Visibility {
#[inline]
fn hash_stable(&self,
__hcx: &mut ::rustc_middle::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
Visibility::Default => {}
Visibility::Hidden => {}
Visibility::Protected => {}
}
}
}
};HashStable, const _: () =
{
impl<'tcx, __E: ::rustc_middle::ty::codec::TyEncoder<'tcx>>
::rustc_serialize::Encodable<__E> for Visibility {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
Visibility::Default => { 0usize }
Visibility::Hidden => { 1usize }
Visibility::Protected => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
Visibility::Default => {}
Visibility::Hidden => {}
Visibility::Protected => {}
}
}
}
};TyEncodable, const _: () =
{
impl<'tcx, __D: ::rustc_middle::ty::codec::TyDecoder<'tcx>>
::rustc_serialize::Decodable<__D> for Visibility {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => { Visibility::Default }
1usize => { Visibility::Hidden }
2usize => { Visibility::Protected }
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `Visibility`, expected 0..3, actual {0}",
n));
}
}
}
}
};TyDecodable)]
379pub enum Visibility {
380/// Export the symbol from the DSO and apply overrides of the symbol by outside DSOs to within
381 /// the DSO if the object file format supports this.
382Default,
383/// Hide the symbol outside of the defining DSO even when external linkage is used to export it
384 /// from the object file.
385Hidden,
386/// Export the symbol from the DSO, but don't apply overrides of the symbol by outside DSOs to
387 /// within the DSO. Equivalent to default visibility with object file formats that don't support
388 /// overriding exported symbols by another DSO.
389Protected,
390}
391392impl From<SymbolVisibility> for Visibility {
393fn from(value: SymbolVisibility) -> Self {
394match value {
395 SymbolVisibility::Hidden => Visibility::Hidden,
396 SymbolVisibility::Protected => Visibility::Protected,
397 SymbolVisibility::Interposable => Visibility::Default,
398 }
399 }
400}
401402impl<'tcx> CodegenUnit<'tcx> {
403#[inline]
404pub fn new(name: Symbol) -> CodegenUnit<'tcx> {
405CodegenUnit {
406name,
407 items: Default::default(),
408 size_estimate: 0,
409 primary: false,
410 is_code_coverage_dead_code_cgu: false,
411 }
412 }
413414pub fn name(&self) -> Symbol {
415self.name
416 }
417418pub fn set_name(&mut self, name: Symbol) {
419self.name = name;
420 }
421422pub fn is_primary(&self) -> bool {
423self.primary
424 }
425426pub fn make_primary(&mut self) {
427self.primary = true;
428 }
429430pub fn items(&self) -> &FxIndexMap<MonoItem<'tcx>, MonoItemData> {
431&self.items
432 }
433434pub fn items_mut(&mut self) -> &mut FxIndexMap<MonoItem<'tcx>, MonoItemData> {
435&mut self.items
436 }
437438pub fn is_code_coverage_dead_code_cgu(&self) -> bool {
439self.is_code_coverage_dead_code_cgu
440 }
441442/// Marks this CGU as the one used to contain code coverage information for dead code.
443pub fn make_code_coverage_dead_code_cgu(&mut self) {
444self.is_code_coverage_dead_code_cgu = true;
445 }
446447pub fn mangle_name(human_readable_name: &str) -> BaseNString {
448let mut hasher = StableHasher::new();
449human_readable_name.hash(&mut hasher);
450let hash: Hash128 = hasher.finish();
451hash.as_u128().to_base_fixed_len(CASE_INSENSITIVE)
452 }
453454pub fn shorten_name(human_readable_name: &str) -> Cow<'_, str> {
455// Set a limit a somewhat below the common platform limits for file names.
456const MAX_CGU_NAME_LENGTH: usize = 200;
457const TRUNCATED_NAME_PREFIX: &str = "-trunc-";
458if human_readable_name.len() > MAX_CGU_NAME_LENGTH {
459let mangled_name = Self::mangle_name(human_readable_name);
460// Determine a safe byte offset to truncate the name to
461let truncate_to = human_readable_name.floor_char_boundary(
462MAX_CGU_NAME_LENGTH - TRUNCATED_NAME_PREFIX.len() - mangled_name.len(),
463 );
464::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}{1}{2}",
&human_readable_name[..truncate_to], TRUNCATED_NAME_PREFIX,
mangled_name))
})format!(
465"{}{}{}",
466&human_readable_name[..truncate_to],
467 TRUNCATED_NAME_PREFIX,
468 mangled_name
469 )470 .into()
471 } else {
472// If the name is short enough, we can just return it as is.
473human_readable_name.into()
474 }
475 }
476477pub fn compute_size_estimate(&mut self) {
478// The size of a codegen unit as the sum of the sizes of the items
479 // within it.
480self.size_estimate = self.items.values().map(|data| data.size_estimate).sum();
481 }
482483/// Should only be called if [`compute_size_estimate`] has previously been called.
484 ///
485 /// [`compute_size_estimate`]: Self::compute_size_estimate
486#[inline]
487pub fn size_estimate(&self) -> usize {
488// Items are never zero-sized, so if we have items the estimate must be
489 // non-zero, unless we forgot to call `compute_size_estimate` first.
490if !(self.items.is_empty() || self.size_estimate != 0) {
::core::panicking::panic("assertion failed: self.items.is_empty() || self.size_estimate != 0")
};assert!(self.items.is_empty() || self.size_estimate != 0);
491self.size_estimate
492 }
493494pub fn contains_item(&self, item: &MonoItem<'tcx>) -> bool {
495self.items().contains_key(item)
496 }
497498pub fn work_product_id(&self) -> WorkProductId {
499WorkProductId::from_cgu_name(self.name().as_str())
500 }
501502pub fn previous_work_product(&self, tcx: TyCtxt<'_>) -> WorkProduct {
503let work_product_id = self.work_product_id();
504tcx.dep_graph
505 .previous_work_product(&work_product_id)
506 .unwrap_or_else(|| {
::core::panicking::panic_fmt(format_args!("Could not find work-product for CGU `{0}`",
self.name()));
}panic!("Could not find work-product for CGU `{}`", self.name()))
507 }
508509pub fn items_in_deterministic_order(
510&self,
511 tcx: TyCtxt<'tcx>,
512 ) -> Vec<(MonoItem<'tcx>, MonoItemData)> {
513// The codegen tests rely on items being process in the same order as
514 // they appear in the file, so for local items, we sort by span first
515#[derive(#[automatically_derived]
impl<'tcx> ::core::cmp::PartialEq for ItemSortKey<'tcx> {
#[inline]
fn eq(&self, other: &ItemSortKey<'tcx>) -> bool {
self.0 == other.0 && self.1 == other.1
}
}PartialEq, #[automatically_derived]
impl<'tcx> ::core::cmp::Eq for ItemSortKey<'tcx> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Option<Span>>;
let _: ::core::cmp::AssertParamIsEq<SymbolName<'tcx>>;
}
}Eq, #[automatically_derived]
impl<'tcx> ::core::cmp::PartialOrd for ItemSortKey<'tcx> {
#[inline]
fn partial_cmp(&self, other: &ItemSortKey<'tcx>)
-> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
::core::cmp::PartialOrd::partial_cmp(&self.1, &other.1),
cmp => cmp,
}
}
}PartialOrd, #[automatically_derived]
impl<'tcx> ::core::cmp::Ord for ItemSortKey<'tcx> {
#[inline]
fn cmp(&self, other: &ItemSortKey<'tcx>) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.0, &other.0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&self.1, &other.1),
cmp => cmp,
}
}
}Ord)]
516struct ItemSortKey<'tcx>(Option<Span>, SymbolName<'tcx>);
517518// We only want to take HirIds of user-defines instances into account.
519 // The others don't matter for the codegen tests and can even make item
520 // order unstable.
521fn local_item_id<'tcx>(item: MonoItem<'tcx>) -> Option<DefId> {
522match item {
523 MonoItem::Fn(ref instance) => match instance.def {
524 InstanceKind::Item(def) => def.as_local().map(|_| def),
525 InstanceKind::VTableShim(..)
526 | InstanceKind::ReifyShim(..)
527 | InstanceKind::Intrinsic(..)
528 | InstanceKind::FnPtrShim(..)
529 | InstanceKind::Virtual(..)
530 | InstanceKind::ClosureOnceShim { .. }
531 | InstanceKind::ConstructCoroutineInClosureShim { .. }
532 | InstanceKind::DropGlue(..)
533 | InstanceKind::CloneShim(..)
534 | InstanceKind::ThreadLocalShim(..)
535 | InstanceKind::FnPtrAddrShim(..)
536 | InstanceKind::AsyncDropGlue(..)
537 | InstanceKind::FutureDropPollShim(..)
538 | InstanceKind::AsyncDropGlueCtorShim(..) => None,
539 },
540 MonoItem::Static(def_id) => def_id.as_local().map(|_| def_id),
541 MonoItem::GlobalAsm(item_id) => Some(item_id.owner_id.def_id.to_def_id()),
542 }
543 }
544fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey<'tcx> {
545ItemSortKey(
546local_item_id(item)
547 .map(|def_id| tcx.def_span(def_id).find_ancestor_not_from_macro())
548 .flatten(),
549item.symbol_name(tcx),
550 )
551 }
552553let mut items: Vec<_> = self.items().iter().map(|(&i, &data)| (i, data)).collect();
554if !tcx.sess.opts.unstable_opts.codegen_source_order {
555// In this case, we do not need to keep the items in any specific order, as the input
556 // is already deterministic.
557 //
558 // However, it seems that moving related things (such as different
559 // monomorphizations of the same function) close to one another is actually beneficial
560 // for LLVM performance.
561 // LLVM will codegen the items in the order we pass them to it, and when it handles
562 // similar things in succession, it seems that it leads to better cache utilization,
563 // less branch mispredictions and in general to better performance.
564 // For example, if we have functions `a`, `c::<u32>`, `b`, `c::<i16>`, `d` and
565 // `c::<bool>`, it seems that it helps LLVM's performance to codegen the three `c`
566 // instantiations right after one another, as they will likely reference similar types,
567 // call similar functions, etc.
568 //
569 // See https://github.com/rust-lang/rust/pull/145358 for more details.
570 //
571 // Sorting by symbol name should not incur any new non-determinism.
572items.sort_by_cached_key(|&(i, _)| i.symbol_name(tcx));
573 } else {
574items.sort_by_cached_key(|&(i, _)| item_sort_key(tcx, i));
575 }
576items577 }
578579pub fn codegen_dep_node(&self, tcx: TyCtxt<'tcx>) -> DepNode {
580make_compile_codegen_unit(tcx, self.name())
581 }
582}
583584impl ToStableHashKey<StableHashingContext<'_>> for CodegenUnit<'_> {
585type KeyType = String;
586587fn to_stable_hash_key(&self, _: &StableHashingContext<'_>) -> Self::KeyType {
588// Codegen unit names are conceptually required to be stable across
589 // compilation session so that object file names match up.
590self.name.to_string()
591 }
592}
593594pub struct CodegenUnitNameBuilder<'tcx> {
595 tcx: TyCtxt<'tcx>,
596 cache: UnordMap<CrateNum, String>,
597}
598599impl<'tcx> CodegenUnitNameBuilder<'tcx> {
600pub fn new(tcx: TyCtxt<'tcx>) -> Self {
601CodegenUnitNameBuilder { tcx, cache: Default::default() }
602 }
603604/// CGU names should fulfill the following requirements:
605 /// - They should be able to act as a file name on any kind of file system
606 /// - They should not collide with other CGU names, even for different versions
607 /// of the same crate.
608 ///
609 /// Consequently, we don't use special characters except for '.' and '-' and we
610 /// prefix each name with the crate-name and crate-disambiguator.
611 ///
612 /// This function will build CGU names of the form:
613 ///
614 /// ```text
615 /// <crate-name>.<crate-disambiguator>[-in-<local-crate-id>](-<component>)*[.<special-suffix>]
616 /// <local-crate-id> = <local-crate-name>.<local-crate-disambiguator>
617 /// ```
618 ///
619 /// The '.' before `<special-suffix>` makes sure that names with a special
620 /// suffix can never collide with a name built out of regular Rust
621 /// identifiers (e.g., module paths).
622pub fn build_cgu_name<I, C, S>(
623&mut self,
624 cnum: CrateNum,
625 components: I,
626 special_suffix: Option<S>,
627 ) -> Symbol628where
629I: IntoIterator<Item = C>,
630 C: fmt::Display,
631 S: fmt::Display,
632 {
633let cgu_name = self.build_cgu_name_no_mangle(cnum, components, special_suffix);
634635if self.tcx.sess.opts.unstable_opts.human_readable_cgu_names {
636Symbol::intern(&CodegenUnit::shorten_name(cgu_name.as_str()))
637 } else {
638Symbol::intern(&CodegenUnit::mangle_name(cgu_name.as_str()))
639 }
640 }
641642/// Same as `CodegenUnit::build_cgu_name()` but will never mangle the
643 /// resulting name.
644pub fn build_cgu_name_no_mangle<I, C, S>(
645&mut self,
646 cnum: CrateNum,
647 components: I,
648 special_suffix: Option<S>,
649 ) -> Symbol650where
651I: IntoIterator<Item = C>,
652 C: fmt::Display,
653 S: fmt::Display,
654 {
655use std::fmt::Write;
656657let mut cgu_name = String::with_capacity(64);
658659// Start out with the crate name and disambiguator
660let tcx = self.tcx;
661let crate_prefix = self.cache.entry(cnum).or_insert_with(|| {
662// Whenever the cnum is not LOCAL_CRATE we also mix in the
663 // local crate's ID. Otherwise there can be collisions between CGUs
664 // instantiating stuff for upstream crates.
665let local_crate_id = if cnum != LOCAL_CRATE {
666let local_stable_crate_id = tcx.stable_crate_id(LOCAL_CRATE);
667::alloc::__export::must_use({
::alloc::fmt::format(format_args!("-in-{0}.{1:08x}",
tcx.crate_name(LOCAL_CRATE), local_stable_crate_id))
})format!("-in-{}.{:08x}", tcx.crate_name(LOCAL_CRATE), local_stable_crate_id)668 } else {
669String::new()
670 };
671672let stable_crate_id = tcx.stable_crate_id(LOCAL_CRATE);
673::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}.{1:08x}{2}",
tcx.crate_name(cnum), stable_crate_id, local_crate_id))
})format!("{}.{:08x}{}", tcx.crate_name(cnum), stable_crate_id, local_crate_id)674 });
675676cgu_name.write_fmt(format_args!("{0}", crate_prefix))write!(cgu_name, "{crate_prefix}").unwrap();
677678// Add the components
679for component in components {
680cgu_name.write_fmt(format_args!("-{0}", component))write!(cgu_name, "-{component}").unwrap();
681 }
682683if let Some(special_suffix) = special_suffix {
684// We add a dot in here so it cannot clash with anything in a regular
685 // Rust identifier
686cgu_name.write_fmt(format_args!(".{0}", special_suffix))write!(cgu_name, ".{special_suffix}").unwrap();
687 }
688689Symbol::intern(&cgu_name)
690 }
691}
692693/// See module-level docs of `rustc_monomorphize::collector` on some context for "mentioned" items.
694#[derive(#[automatically_derived]
impl ::core::marker::Copy for CollectionMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CollectionMode {
#[inline]
fn clone(&self) -> CollectionMode { *self }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for CollectionMode {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
CollectionMode::UsedItems => "UsedItems",
CollectionMode::MentionedItems => "MentionedItems",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for CollectionMode {
#[inline]
fn eq(&self, other: &CollectionMode) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for CollectionMode {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for CollectionMode {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state)
}
}Hash, const _: () =
{
impl<'__ctx>
::rustc_data_structures::stable_hasher::HashStable<::rustc_middle::ich::StableHashingContext<'__ctx>>
for CollectionMode {
#[inline]
fn hash_stable(&self,
__hcx: &mut ::rustc_middle::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
match *self {
CollectionMode::UsedItems => {}
CollectionMode::MentionedItems => {}
}
}
}
};HashStable)]
695pub enum CollectionMode {
696/// Collect items that are used, i.e., actually needed for codegen.
697 ///
698 /// Which items are used can depend on optimization levels, as MIR optimizations can remove
699 /// uses.
700UsedItems,
701/// Collect items that are mentioned. The goal of this mode is that it is independent of
702 /// optimizations: the set of "mentioned" items is computed before optimizations are run.
703 ///
704 /// The exact contents of this set are *not* a stable guarantee. (For instance, it is currently
705 /// computed after drop-elaboration. If we ever do some optimizations even in debug builds, we
706 /// might decide to run them before computing mentioned items.) The key property of this set is
707 /// that it is optimization-independent.
708MentionedItems,
709}