1//! Coroutine layout logic.
2//!
3//! When laying out coroutines, we divide our saved local fields into two
4//! categories: overlap-eligible and overlap-ineligible.
5//!
6//! Those fields which are ineligible for overlap go in a "prefix" at the
7//! beginning of the layout, and always have space reserved for them.
8//!
9//! Overlap-eligible fields are only assigned to one variant, so we lay
10//! those fields out for each variant and put them right after the
11//! prefix.
12//!
13//! Finally, in the layout details, we point to the fields from the
14//! variants they are assigned to. It is possible for some fields to be
15//! included in multiple variants. No field ever "moves around" in the
16//! layout; its offset is always the same.
17//!
18//! Also included in the layout are the upvars and the discriminant.
19//! These are included as fields on the "outer" layout; they are not part
20//! of any variant.
2122use std::iter;
2324use rustc_index::bit_set::{BitMatrix, DenseBitSet};
25use rustc_index::{Idx, IndexSlice, IndexVec};
26use tracing::{debug, trace};
2728use crate::{
29BackendRepr, FieldsShape, HasDataLayout, Integer, LayoutData, Primitive, ReprOptions, Scalar,
30StructKind, TagEncoding, Variants, WrappingRange,
31};
3233/// Overlap eligibility and variant assignment for each CoroutineSavedLocal.
34#[derive(#[automatically_derived]
impl<VariantIdx: ::core::clone::Clone, FieldIdx: ::core::clone::Clone>
::core::clone::Clone for SavedLocalEligibility<VariantIdx, FieldIdx> {
#[inline]
fn clone(&self) -> SavedLocalEligibility<VariantIdx, FieldIdx> {
match self {
SavedLocalEligibility::Unassigned =>
SavedLocalEligibility::Unassigned,
SavedLocalEligibility::Assigned(__self_0) =>
SavedLocalEligibility::Assigned(::core::clone::Clone::clone(__self_0)),
SavedLocalEligibility::Ineligible(__self_0) =>
SavedLocalEligibility::Ineligible(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl<VariantIdx: ::core::fmt::Debug, FieldIdx: ::core::fmt::Debug>
::core::fmt::Debug for SavedLocalEligibility<VariantIdx, FieldIdx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
SavedLocalEligibility::Unassigned =>
::core::fmt::Formatter::write_str(f, "Unassigned"),
SavedLocalEligibility::Assigned(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Assigned", &__self_0),
SavedLocalEligibility::Ineligible(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Ineligible", &__self_0),
}
}
}Debug, #[automatically_derived]
impl<VariantIdx: ::core::cmp::PartialEq, FieldIdx: ::core::cmp::PartialEq>
::core::cmp::PartialEq for SavedLocalEligibility<VariantIdx, FieldIdx> {
#[inline]
fn eq(&self, other: &SavedLocalEligibility<VariantIdx, FieldIdx>)
-> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(SavedLocalEligibility::Assigned(__self_0),
SavedLocalEligibility::Assigned(__arg1_0)) =>
__self_0 == __arg1_0,
(SavedLocalEligibility::Ineligible(__self_0),
SavedLocalEligibility::Ineligible(__arg1_0)) =>
__self_0 == __arg1_0,
_ => true,
}
}
}PartialEq)]
35enum SavedLocalEligibility<VariantIdx, FieldIdx> {
36 Unassigned,
37 Assigned(VariantIdx),
38 Ineligible(Option<FieldIdx>),
39}
4041/// Compute the eligibility and assignment of each local.
42fn coroutine_saved_local_eligibility<VariantIdx: Idx, FieldIdx: Idx, LocalIdx: Idx>(
43 nb_locals: usize,
44 variant_fields: &IndexSlice<VariantIdx, IndexVec<FieldIdx, LocalIdx>>,
45 storage_conflicts: &BitMatrix<LocalIdx, LocalIdx>,
46) -> (DenseBitSet<LocalIdx>, IndexVec<LocalIdx, SavedLocalEligibility<VariantIdx, FieldIdx>>) {
47use SavedLocalEligibility::*;
4849let mut assignments: IndexVec<LocalIdx, _> = IndexVec::from_elem_n(Unassigned, nb_locals);
5051// The saved locals not eligible for overlap. These will get
52 // "promoted" to the prefix of our coroutine.
53let mut ineligible_locals = DenseBitSet::new_empty(nb_locals);
5455// Figure out which of our saved locals are fields in only
56 // one variant. The rest are deemed ineligible for overlap.
57for (variant_index, fields) in variant_fields.iter_enumerated() {
58for local in fields {
59match assignments[*local] {
60 Unassigned => {
61 assignments[*local] = Assigned(variant_index);
62 }
63 Assigned(idx) => {
64// We've already seen this local at another suspension
65 // point, so it is no longer a candidate.
66{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_abi/src/layout/coroutine.rs:66",
"rustc_abi::layout::coroutine", ::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_abi/src/layout/coroutine.rs"),
::tracing_core::__macro_support::Option::Some(66u32),
::tracing_core::__macro_support::Option::Some("rustc_abi::layout::coroutine"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("removing local {0:?} in >1 variant ({1:?}, {2:?})",
local, variant_index, idx) as &dyn Value))])
});
} else { ; }
};trace!(
67"removing local {:?} in >1 variant ({:?}, {:?})",
68 local, variant_index, idx
69 );
70 ineligible_locals.insert(*local);
71 assignments[*local] = Ineligible(None);
72 }
73 Ineligible(_) => {}
74 }
75 }
76 }
7778// Next, check every pair of eligible locals to see if they
79 // conflict.
80for local_a in storage_conflicts.rows() {
81let conflicts_a = storage_conflicts.count(local_a);
82if ineligible_locals.contains(local_a) {
83continue;
84 }
8586for local_b in storage_conflicts.iter(local_a) {
87// local_a and local_b are storage live at the same time, therefore they
88 // cannot overlap in the coroutine layout. The only way to guarantee
89 // this is if they are in the same variant, or one is ineligible
90 // (which means it is stored in every variant).
91if ineligible_locals.contains(local_b) || assignments[local_a] == assignments[local_b] {
92continue;
93 }
9495// If they conflict, we will choose one to make ineligible.
96 // This is not always optimal; it's just a greedy heuristic that
97 // seems to produce good results most of the time.
98let conflicts_b = storage_conflicts.count(local_b);
99let (remove, other) =
100if conflicts_a > conflicts_b { (local_a, local_b) } else { (local_b, local_a) };
101 ineligible_locals.insert(remove);
102 assignments[remove] = Ineligible(None);
103{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_abi/src/layout/coroutine.rs:103",
"rustc_abi::layout::coroutine", ::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_abi/src/layout/coroutine.rs"),
::tracing_core::__macro_support::Option::Some(103u32),
::tracing_core::__macro_support::Option::Some("rustc_abi::layout::coroutine"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("removing local {0:?} due to conflict with {1:?}",
remove, other) as &dyn Value))])
});
} else { ; }
};trace!("removing local {:?} due to conflict with {:?}", remove, other);
104 }
105 }
106107// Count the number of variants in use. If only one of them, then it is
108 // impossible to overlap any locals in our layout. In this case it's
109 // always better to make the remaining locals ineligible, so we can
110 // lay them out with the other locals in the prefix and eliminate
111 // unnecessary padding bytes.
112{
113let mut used_variants = DenseBitSet::new_empty(variant_fields.len());
114for assignment in &assignments {
115if let Assigned(idx) = assignment {
116 used_variants.insert(*idx);
117 }
118 }
119if used_variants.count() < 2 {
120for assignment in assignments.iter_mut() {
121*assignment = Ineligible(None);
122 }
123ineligible_locals.insert_all();
124 }
125 }
126127// Write down the order of our locals that will be promoted to the prefix.
128{
129for (idx, local) in ineligible_locals.iter().enumerate() {
130 assignments[local] = Ineligible(Some(FieldIdx::new(idx)));
131 }
132 }
133{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_abi/src/layout/coroutine.rs:133",
"rustc_abi::layout::coroutine", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_abi/src/layout/coroutine.rs"),
::tracing_core::__macro_support::Option::Some(133u32),
::tracing_core::__macro_support::Option::Some("rustc_abi::layout::coroutine"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("coroutine saved local assignments: {0:?}",
assignments) as &dyn Value))])
});
} else { ; }
};debug!("coroutine saved local assignments: {:?}", assignments);
134135 (ineligible_locals, assignments)
136}
137138/// Compute the full coroutine layout.
139pub(super) fn layout<
140'a,
141 F: core::ops::Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + core::fmt::Debug + Copy,
142 VariantIdx: Idx,
143 FieldIdx: Idx,
144 LocalIdx: Idx,
145>(
146 calc: &super::LayoutCalculator<impl HasDataLayout>,
147 local_layouts: &IndexSlice<LocalIdx, F>,
148mut prefix_layouts: IndexVec<FieldIdx, F>,
149 variant_fields: &IndexSlice<VariantIdx, IndexVec<FieldIdx, LocalIdx>>,
150 storage_conflicts: &BitMatrix<LocalIdx, LocalIdx>,
151 tag_to_layout: impl Fn(Scalar) -> F,
152) -> super::LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
153use SavedLocalEligibility::*;
154155let (ineligible_locals, assignments) =
156coroutine_saved_local_eligibility(local_layouts.len(), variant_fields, storage_conflicts);
157158// Build a prefix layout, including "promoting" all ineligible
159 // locals as part of the prefix. We compute the layout of all of
160 // these fields at once to get optimal packing.
161let tag_index = prefix_layouts.next_index();
162163// `variant_fields` already accounts for the reserved variants, so no need to add them.
164let max_discr = (variant_fields.len() - 1) as u128;
165let discr_int = Integer::fit_unsigned(max_discr);
166let tag = Scalar::Initialized {
167 value: Primitive::Int(discr_int, /* signed = */ false),
168 valid_range: WrappingRange { start: 0, end: max_discr },
169 };
170171let promoted_layouts = ineligible_locals.iter().map(|local| local_layouts[local]);
172prefix_layouts.push(tag_to_layout(tag));
173prefix_layouts.extend(promoted_layouts);
174let prefix =
175calc.univariant(&prefix_layouts, &ReprOptions::default(), StructKind::AlwaysSized)?;
176177let (prefix_size, prefix_align) = (prefix.size, prefix.align);
178179// Split the prefix layout into the "outer" fields (upvars and
180 // discriminant) and the "promoted" fields. Promoted fields will
181 // get included in each variant that requested them in
182 // CoroutineLayout.
183{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_abi/src/layout/coroutine.rs:183",
"rustc_abi::layout::coroutine", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_abi/src/layout/coroutine.rs"),
::tracing_core::__macro_support::Option::Some(183u32),
::tracing_core::__macro_support::Option::Some("rustc_abi::layout::coroutine"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("prefix = {0:#?}",
prefix) as &dyn Value))])
});
} else { ; }
};debug!("prefix = {:#?}", prefix);
184let (outer_fields, promoted_offsets, promoted_memory_index) = match prefix.fields {
185 FieldsShape::Arbitrary { mut offsets, in_memory_order } => {
186// "a" (`0..b_start`) and "b" (`b_start..`) correspond to
187 // "outer" and "promoted" fields respectively.
188let b_start = tag_index.plus(1);
189let offsets_b = IndexVec::from_raw(offsets.raw.split_off(b_start.index()));
190let offsets_a = offsets;
191192// Disentangle the "a" and "b" components of `in_memory_order`
193 // by preserving the order but keeping only one disjoint "half" each.
194 // FIXME(eddyb) build a better abstraction for permutations, if possible.
195let mut in_memory_order_a = IndexVec::<u32, FieldIdx>::new();
196let mut in_memory_order_b = IndexVec::<u32, FieldIdx>::new();
197for i in in_memory_order {
198if let Some(j) = i.index().checked_sub(b_start.index()) {
199 in_memory_order_b.push(FieldIdx::new(j));
200 } else {
201 in_memory_order_a.push(i);
202 }
203 }
204205let outer_fields =
206 FieldsShape::Arbitrary { offsets: offsets_a, in_memory_order: in_memory_order_a };
207 (outer_fields, offsets_b, in_memory_order_b.invert_bijective_mapping())
208 }
209_ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
210 };
211212let mut size = prefix.size;
213let mut align = prefix.align;
214let variants = variant_fields215 .iter_enumerated()
216 .map(|(index, variant_fields)| {
217// Only include overlap-eligible fields when we compute our variant layout.
218let variant_only_tys = variant_fields219 .iter()
220 .filter(|local| match assignments[**local] {
221Unassigned => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
222Assigned(v) if v == index => true,
223Assigned(_) => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("assignment does not match variant")));
}unreachable!("assignment does not match variant"),
224Ineligible(_) => false,
225 })
226 .map(|local| local_layouts[*local]);
227228let mut variant = calc.univariant(
229&variant_only_tys.collect::<IndexVec<_, _>>(),
230&ReprOptions::default(),
231 StructKind::Prefixed(prefix_size, prefix_align.abi),
232 )?;
233variant.variants = Variants::Single { index };
234235let FieldsShape::Arbitrary { offsets, in_memory_order } = variant.fields else {
236::core::panicking::panic("internal error: entered unreachable code");unreachable!();
237 };
238239// Now, stitch the promoted and variant-only fields back together in
240 // the order they are mentioned by our CoroutineLayout.
241 // Because we only use some subset (that can differ between variants)
242 // of the promoted fields, we can't just pick those elements of the
243 // `promoted_memory_index` (as we'd end up with gaps).
244 // So instead, we build an "inverse memory_index", as if all of the
245 // promoted fields were being used, but leave the elements not in the
246 // subset as `invalid_field_idx`, which we can filter out later to
247 // obtain a valid (bijective) mapping.
248let memory_index = in_memory_order.invert_bijective_mapping();
249let invalid_field_idx = promoted_memory_index.len() + memory_index.len();
250let mut combined_in_memory_order =
251IndexVec::from_elem_n(FieldIdx::new(invalid_field_idx), invalid_field_idx);
252253let mut offsets_and_memory_index = iter::zip(offsets, memory_index);
254let combined_offsets = variant_fields255 .iter_enumerated()
256 .map(|(i, local)| {
257let (offset, memory_index) = match assignments[*local] {
258Unassigned => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
259Assigned(_) => {
260let (offset, memory_index) = offsets_and_memory_index.next().unwrap();
261 (offset, promoted_memory_index.len() as u32 + memory_index)
262 }
263Ineligible(field_idx) => {
264let field_idx = field_idx.unwrap();
265 (promoted_offsets[field_idx], promoted_memory_index[field_idx])
266 }
267 };
268combined_in_memory_order[memory_index] = i;
269offset270 })
271 .collect();
272273// Remove the unused slots to obtain the combined `in_memory_order`
274 // (also see previous comment).
275combined_in_memory_order.raw.retain(|&i| i.index() != invalid_field_idx);
276277variant.fields = FieldsShape::Arbitrary {
278 offsets: combined_offsets,
279 in_memory_order: combined_in_memory_order,
280 };
281282size = size.max(variant.size);
283align = align.max(variant.align);
284Ok(variant)
285 })
286 .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
287288size = size.align_to(align.abi);
289290let uninhabited = prefix.uninhabited || variants.iter().all(|v| v.is_uninhabited());
291let abi = BackendRepr::Memory { sized: true };
292293Ok(LayoutData {
294 variants: Variants::Multiple {
295tag,
296 tag_encoding: TagEncoding::Direct,
297 tag_field: tag_index,
298variants,
299 },
300 fields: outer_fields,
301 backend_repr: abi,
302// Suppress niches inside coroutines. If the niche is inside a field that is aliased (due to
303 // self-referentiality), getting the discriminant can cause aliasing violations.
304 // `UnsafeCell` blocks niches for the same reason, but we don't yet have `UnsafePinned` that
305 // would do the same for us here.
306 // See <https://github.com/rust-lang/rust/issues/63818>, <https://github.com/rust-lang/miri/issues/3780>.
307 // FIXME: Remove when <https://github.com/rust-lang/rust/issues/125735> is implemented and aliased coroutine fields are wrapped in `UnsafePinned`.
308largest_niche: None,
309uninhabited,
310size,
311align,
312 max_repr_align: None,
313 unadjusted_abi_align: align.abi,
314 randomization_seed: Default::default(),
315 })
316}