1use std::iter;
23
24use rustc_index::bit_set::{BitMatrix, DenseBitSet};
25use rustc_index::{Idx, IndexSlice, IndexVec};
26use tracing::{debug, trace};
27
28use crate::{
29 BackendRepr, FieldsShape, HasDataLayout, Integer, LayoutData, Primitive, ReprOptions, Scalar,
30 StructKind, TagEncoding, Variants, WrappingRange,
31};
32
33#[derive(Clone, Debug, PartialEq)]
35enum SavedLocalEligibility<VariantIdx, FieldIdx> {
36 Unassigned,
37 Assigned(VariantIdx),
38 Ineligible(Option<FieldIdx>),
39}
40
41fn coroutine_saved_local_eligibility<VariantIdx: Idx, FieldIdx: Idx, LocalIdx: Idx>(
43 nb_locals: usize,
44 variant_fields: &IndexSlice<VariantIdx, IndexVec<FieldIdx, LocalIdx>>,
45 storage_conflicts: &BitMatrix<LocalIdx, LocalIdx>,
46) -> (DenseBitSet<LocalIdx>, IndexVec<LocalIdx, SavedLocalEligibility<VariantIdx, FieldIdx>>) {
47 use SavedLocalEligibility::*;
48
49 let mut assignments: IndexVec<LocalIdx, _> = IndexVec::from_elem_n(Unassigned, nb_locals);
50
51 let mut ineligible_locals = DenseBitSet::new_empty(nb_locals);
54
55 for (variant_index, fields) in variant_fields.iter_enumerated() {
58 for local in fields {
59 match assignments[*local] {
60 Unassigned => {
61 assignments[*local] = Assigned(variant_index);
62 }
63 Assigned(idx) => {
64 trace!(
67 "removing local {:?} in >1 variant ({:?}, {:?})",
68 local, variant_index, idx
69 );
70 ineligible_locals.insert(*local);
71 assignments[*local] = Ineligible(None);
72 }
73 Ineligible(_) => {}
74 }
75 }
76 }
77
78 for local_a in storage_conflicts.rows() {
81 let conflicts_a = storage_conflicts.count(local_a);
82 if ineligible_locals.contains(local_a) {
83 continue;
84 }
85
86 for local_b in storage_conflicts.iter(local_a) {
87 if ineligible_locals.contains(local_b) || assignments[local_a] == assignments[local_b] {
92 continue;
93 }
94
95 let conflicts_b = storage_conflicts.count(local_b);
99 let (remove, other) =
100 if conflicts_a > conflicts_b { (local_a, local_b) } else { (local_b, local_a) };
101 ineligible_locals.insert(remove);
102 assignments[remove] = Ineligible(None);
103 trace!("removing local {:?} due to conflict with {:?}", remove, other);
104 }
105 }
106
107 {
113 let mut used_variants = DenseBitSet::new_empty(variant_fields.len());
114 for assignment in &assignments {
115 if let Assigned(idx) = assignment {
116 used_variants.insert(*idx);
117 }
118 }
119 if used_variants.count() < 2 {
120 for assignment in assignments.iter_mut() {
121 *assignment = Ineligible(None);
122 }
123 ineligible_locals.insert_all();
124 }
125 }
126
127 {
129 for (idx, local) in ineligible_locals.iter().enumerate() {
130 assignments[local] = Ineligible(Some(FieldIdx::new(idx)));
131 }
132 }
133 debug!("coroutine saved local assignments: {:?}", assignments);
134
135 (ineligible_locals, assignments)
136}
137
138pub(super) fn layout<
140 'a,
141 F: core::ops::Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + core::fmt::Debug + Copy,
142 VariantIdx: Idx,
143 FieldIdx: Idx,
144 LocalIdx: Idx,
145>(
146 calc: &super::LayoutCalculator<impl HasDataLayout>,
147 local_layouts: &IndexSlice<LocalIdx, F>,
148 mut prefix_layouts: IndexVec<FieldIdx, F>,
149 variant_fields: &IndexSlice<VariantIdx, IndexVec<FieldIdx, LocalIdx>>,
150 storage_conflicts: &BitMatrix<LocalIdx, LocalIdx>,
151 tag_to_layout: impl Fn(Scalar) -> F,
152) -> super::LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
153 use SavedLocalEligibility::*;
154
155 let (ineligible_locals, assignments) =
156 coroutine_saved_local_eligibility(local_layouts.len(), variant_fields, storage_conflicts);
157
158 let tag_index = prefix_layouts.len();
162
163 let max_discr = (variant_fields.len() - 1) as u128;
165 let discr_int = Integer::fit_unsigned(max_discr);
166 let tag = Scalar::Initialized {
167 value: Primitive::Int(discr_int, false),
168 valid_range: WrappingRange { start: 0, end: max_discr },
169 };
170
171 let promoted_layouts = ineligible_locals.iter().map(|local| local_layouts[local]);
172 prefix_layouts.push(tag_to_layout(tag));
173 prefix_layouts.extend(promoted_layouts);
174 let prefix =
175 calc.univariant(&prefix_layouts, &ReprOptions::default(), StructKind::AlwaysSized)?;
176
177 let (prefix_size, prefix_align) = (prefix.size, prefix.align);
178
179 debug!("prefix = {:#?}", prefix);
184 let (outer_fields, promoted_offsets, promoted_memory_index) = match prefix.fields {
185 FieldsShape::Arbitrary { mut offsets, memory_index } => {
186 let mut inverse_memory_index = memory_index.invert_bijective_mapping();
187
188 let b_start = FieldIdx::new(tag_index + 1);
191 let offsets_b = IndexVec::from_raw(offsets.raw.split_off(b_start.index()));
192 let offsets_a = offsets;
193
194 let inverse_memory_index_b: IndexVec<u32, FieldIdx> = inverse_memory_index
198 .iter()
199 .filter_map(|&i| i.index().checked_sub(b_start.index()).map(FieldIdx::new))
200 .collect();
201 inverse_memory_index.raw.retain(|&i| i.index() < b_start.index());
202 let inverse_memory_index_a = inverse_memory_index;
203
204 let memory_index_a = inverse_memory_index_a.invert_bijective_mapping();
207 let memory_index_b = inverse_memory_index_b.invert_bijective_mapping();
208
209 let outer_fields =
210 FieldsShape::Arbitrary { offsets: offsets_a, memory_index: memory_index_a };
211 (outer_fields, offsets_b, memory_index_b)
212 }
213 _ => unreachable!(),
214 };
215
216 let mut size = prefix.size;
217 let mut align = prefix.align;
218 let variants = variant_fields
219 .iter_enumerated()
220 .map(|(index, variant_fields)| {
221 let variant_only_tys = variant_fields
223 .iter()
224 .filter(|local| match assignments[**local] {
225 Unassigned => unreachable!(),
226 Assigned(v) if v == index => true,
227 Assigned(_) => unreachable!("assignment does not match variant"),
228 Ineligible(_) => false,
229 })
230 .map(|local| local_layouts[*local]);
231
232 let mut variant = calc.univariant(
233 &variant_only_tys.collect::<IndexVec<_, _>>(),
234 &ReprOptions::default(),
235 StructKind::Prefixed(prefix_size, prefix_align.abi),
236 )?;
237 variant.variants = Variants::Single { index };
238
239 let FieldsShape::Arbitrary { offsets, memory_index } = variant.fields else {
240 unreachable!();
241 };
242
243 let invalid_field_idx = promoted_memory_index.len() + memory_index.len();
253 let mut combined_inverse_memory_index =
254 IndexVec::from_elem_n(FieldIdx::new(invalid_field_idx), invalid_field_idx);
255
256 let mut offsets_and_memory_index = iter::zip(offsets, memory_index);
257 let combined_offsets = variant_fields
258 .iter_enumerated()
259 .map(|(i, local)| {
260 let (offset, memory_index) = match assignments[*local] {
261 Unassigned => unreachable!(),
262 Assigned(_) => {
263 let (offset, memory_index) = offsets_and_memory_index.next().unwrap();
264 (offset, promoted_memory_index.len() as u32 + memory_index)
265 }
266 Ineligible(field_idx) => {
267 let field_idx = field_idx.unwrap();
268 (promoted_offsets[field_idx], promoted_memory_index[field_idx])
269 }
270 };
271 combined_inverse_memory_index[memory_index] = i;
272 offset
273 })
274 .collect();
275
276 combined_inverse_memory_index.raw.retain(|&i| i.index() != invalid_field_idx);
279 let combined_memory_index = combined_inverse_memory_index.invert_bijective_mapping();
280
281 variant.fields = FieldsShape::Arbitrary {
282 offsets: combined_offsets,
283 memory_index: combined_memory_index,
284 };
285
286 size = size.max(variant.size);
287 align = align.max(variant.align);
288 Ok(variant)
289 })
290 .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
291
292 size = size.align_to(align.abi);
293
294 let uninhabited = prefix.uninhabited || variants.iter().all(|v| v.is_uninhabited());
295 let abi = BackendRepr::Memory { sized: true };
296
297 Ok(LayoutData {
298 variants: Variants::Multiple {
299 tag,
300 tag_encoding: TagEncoding::Direct,
301 tag_field: tag_index,
302 variants,
303 },
304 fields: outer_fields,
305 backend_repr: abi,
306 largest_niche: None,
313 uninhabited,
314 size,
315 align,
316 max_repr_align: None,
317 unadjusted_abi_align: align.abi,
318 randomization_seed: Default::default(),
319 })
320}