1use std::fmt::{self, Write};
2use std::ops::{Bound, Deref};
3use std::{cmp, iter};
4
5use rustc_hashes::Hash64;
6use rustc_index::Idx;
7use rustc_index::bit_set::BitMatrix;
8use tracing::debug;
9
10use crate::{
11 AbiAndPrefAlign, Align, BackendRepr, FieldsShape, HasDataLayout, IndexSlice, IndexVec, Integer,
12 LayoutData, Niche, NonZeroUsize, Primitive, ReprOptions, Scalar, Size, StructKind, TagEncoding,
13 Variants, WrappingRange,
14};
15
16mod coroutine;
17mod simple;
18
19#[cfg(feature = "nightly")]
20mod ty;
21
22#[cfg(feature = "nightly")]
23pub use ty::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
24
25fn absent<'a, FieldIdx, VariantIdx, F>(fields: &IndexSlice<FieldIdx, F>) -> bool
31where
32 FieldIdx: Idx,
33 VariantIdx: Idx,
34 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
35{
36 let uninhabited = fields.iter().any(|f| f.is_uninhabited());
37 let is_1zst = fields.iter().all(|f| f.is_1zst());
40 uninhabited && is_1zst
41}
42
43enum NicheBias {
45 Start,
46 End,
47}
48
49#[derive(Copy, Clone, Debug, PartialEq, Eq)]
50pub enum LayoutCalculatorError<F> {
51 UnexpectedUnsized(F),
58
59 SizeOverflow,
61
62 EmptyUnion,
64
65 ReprConflict,
67
68 ZeroLengthSimdType,
70
71 OversizedSimdType { max_lanes: u64 },
73
74 NonPrimitiveSimdType(F),
76}
77
78impl<F> LayoutCalculatorError<F> {
79 pub fn without_payload(&self) -> LayoutCalculatorError<()> {
80 use LayoutCalculatorError::*;
81 match *self {
82 UnexpectedUnsized(_) => UnexpectedUnsized(()),
83 SizeOverflow => SizeOverflow,
84 EmptyUnion => EmptyUnion,
85 ReprConflict => ReprConflict,
86 ZeroLengthSimdType => ZeroLengthSimdType,
87 OversizedSimdType { max_lanes } => OversizedSimdType { max_lanes },
88 NonPrimitiveSimdType(_) => NonPrimitiveSimdType(()),
89 }
90 }
91
92 pub fn fallback_fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
96 use LayoutCalculatorError::*;
97 f.write_str(match self {
98 UnexpectedUnsized(_) => "an unsized type was found where a sized type was expected",
99 SizeOverflow => "size overflow",
100 EmptyUnion => "type is a union with no fields",
101 ReprConflict => "type has an invalid repr",
102 ZeroLengthSimdType | OversizedSimdType { .. } | NonPrimitiveSimdType(_) => {
103 "invalid simd type definition"
104 }
105 })
106 }
107}
108
109type LayoutCalculatorResult<FieldIdx, VariantIdx, F> =
110 Result<LayoutData<FieldIdx, VariantIdx>, LayoutCalculatorError<F>>;
111
112#[derive(Clone, Copy, Debug)]
113pub struct LayoutCalculator<Cx> {
114 pub cx: Cx,
115}
116
117impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
118 pub fn new(cx: Cx) -> Self {
119 Self { cx }
120 }
121
122 pub fn array_like<FieldIdx: Idx, VariantIdx: Idx, F>(
123 &self,
124 element: &LayoutData<FieldIdx, VariantIdx>,
125 count_if_sized: Option<u64>, ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
127 let count = count_if_sized.unwrap_or(0);
128 let size =
129 element.size.checked_mul(count, &self.cx).ok_or(LayoutCalculatorError::SizeOverflow)?;
130
131 Ok(LayoutData {
132 variants: Variants::Single { index: VariantIdx::new(0) },
133 fields: FieldsShape::Array { stride: element.size, count },
134 backend_repr: BackendRepr::Memory { sized: count_if_sized.is_some() },
135 largest_niche: element.largest_niche.filter(|_| count != 0),
136 uninhabited: element.uninhabited && count != 0,
137 align: element.align,
138 size,
139 max_repr_align: None,
140 unadjusted_abi_align: element.align.abi,
141 randomization_seed: element.randomization_seed.wrapping_add(Hash64::new(count)),
142 })
143 }
144
145 pub fn simd_type<
146 FieldIdx: Idx,
147 VariantIdx: Idx,
148 F: AsRef<LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
149 >(
150 &self,
151 element: F,
152 count: u64,
153 repr_packed: bool,
154 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
155 let elt = element.as_ref();
156 if count == 0 {
157 return Err(LayoutCalculatorError::ZeroLengthSimdType);
158 } else if count > crate::MAX_SIMD_LANES {
159 return Err(LayoutCalculatorError::OversizedSimdType {
160 max_lanes: crate::MAX_SIMD_LANES,
161 });
162 }
163
164 let BackendRepr::Scalar(e_repr) = elt.backend_repr else {
165 return Err(LayoutCalculatorError::NonPrimitiveSimdType(element));
166 };
167
168 let dl = self.cx.data_layout();
170 let size =
171 elt.size.checked_mul(count, dl).ok_or_else(|| LayoutCalculatorError::SizeOverflow)?;
172 let (repr, align) = if repr_packed && !count.is_power_of_two() {
173 (
177 BackendRepr::Memory { sized: true },
178 AbiAndPrefAlign {
179 abi: Align::max_aligned_factor(size),
180 pref: dl.llvmlike_vector_align(size).pref,
181 },
182 )
183 } else {
184 (BackendRepr::SimdVector { element: e_repr, count }, dl.llvmlike_vector_align(size))
185 };
186 let size = size.align_to(align.abi);
187
188 Ok(LayoutData {
189 variants: Variants::Single { index: VariantIdx::new(0) },
190 fields: FieldsShape::Arbitrary {
191 offsets: [Size::ZERO].into(),
192 memory_index: [0].into(),
193 },
194 backend_repr: repr,
195 largest_niche: elt.largest_niche,
196 uninhabited: false,
197 size,
198 align,
199 max_repr_align: None,
200 unadjusted_abi_align: elt.align.abi,
201 randomization_seed: elt.randomization_seed.wrapping_add(Hash64::new(count)),
202 })
203 }
204
205 pub fn coroutine<
210 'a,
211 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
212 VariantIdx: Idx,
213 FieldIdx: Idx,
214 LocalIdx: Idx,
215 >(
216 &self,
217 local_layouts: &IndexSlice<LocalIdx, F>,
218 prefix_layouts: IndexVec<FieldIdx, F>,
219 variant_fields: &IndexSlice<VariantIdx, IndexVec<FieldIdx, LocalIdx>>,
220 storage_conflicts: &BitMatrix<LocalIdx, LocalIdx>,
221 tag_to_layout: impl Fn(Scalar) -> F,
222 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
223 coroutine::layout(
224 self,
225 local_layouts,
226 prefix_layouts,
227 variant_fields,
228 storage_conflicts,
229 tag_to_layout,
230 )
231 }
232
233 pub fn univariant<
234 'a,
235 FieldIdx: Idx,
236 VariantIdx: Idx,
237 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
238 >(
239 &self,
240 fields: &IndexSlice<FieldIdx, F>,
241 repr: &ReprOptions,
242 kind: StructKind,
243 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
244 let dl = self.cx.data_layout();
245 let layout = self.univariant_biased(fields, repr, kind, NicheBias::Start);
246 if let Ok(layout) = &layout {
252 if !matches!(kind, StructKind::MaybeUnsized) {
256 if let Some(niche) = layout.largest_niche {
257 let head_space = niche.offset.bytes();
258 let niche_len = niche.value.size(dl).bytes();
259 let tail_space = layout.size.bytes() - head_space - niche_len;
260
261 if fields.len() > 1 && head_space != 0 && tail_space > 0 {
265 let alt_layout = self
266 .univariant_biased(fields, repr, kind, NicheBias::End)
267 .expect("alt layout should always work");
268 let alt_niche = alt_layout
269 .largest_niche
270 .expect("alt layout should have a niche like the regular one");
271 let alt_head_space = alt_niche.offset.bytes();
272 let alt_niche_len = alt_niche.value.size(dl).bytes();
273 let alt_tail_space =
274 alt_layout.size.bytes() - alt_head_space - alt_niche_len;
275
276 debug_assert_eq!(layout.size.bytes(), alt_layout.size.bytes());
277
278 let prefer_alt_layout =
279 alt_head_space > head_space && alt_head_space > tail_space;
280
281 debug!(
282 "sz: {}, default_niche_at: {}+{}, default_tail_space: {}, alt_niche_at/head_space: {}+{}, alt_tail: {}, num_fields: {}, better: {}\n\
283 layout: {}\n\
284 alt_layout: {}\n",
285 layout.size.bytes(),
286 head_space,
287 niche_len,
288 tail_space,
289 alt_head_space,
290 alt_niche_len,
291 alt_tail_space,
292 layout.fields.count(),
293 prefer_alt_layout,
294 self.format_field_niches(layout, fields),
295 self.format_field_niches(&alt_layout, fields),
296 );
297
298 if prefer_alt_layout {
299 return Ok(alt_layout);
300 }
301 }
302 }
303 }
304 }
305 layout
306 }
307
308 pub fn layout_of_struct_or_enum<
309 'a,
310 FieldIdx: Idx,
311 VariantIdx: Idx,
312 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
313 >(
314 &self,
315 repr: &ReprOptions,
316 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
317 is_enum: bool,
318 is_unsafe_cell: bool,
319 scalar_valid_range: (Bound<u128>, Bound<u128>),
320 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
321 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
322 dont_niche_optimize_enum: bool,
323 always_sized: bool,
324 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
325 let (present_first, present_second) = {
326 let mut present_variants = variants
327 .iter_enumerated()
328 .filter_map(|(i, v)| if !repr.c() && absent(v) { None } else { Some(i) });
329 (present_variants.next(), present_variants.next())
330 };
331 let present_first = match present_first {
332 Some(present_first) => present_first,
333 None if is_enum => {
335 return Ok(LayoutData::never_type(&self.cx));
336 }
337 None => VariantIdx::new(0),
340 };
341
342 if !is_enum ||
344 (present_second.is_none() && !repr.inhibit_enum_layout_opt())
346 {
347 self.layout_of_struct(
348 repr,
349 variants,
350 is_enum,
351 is_unsafe_cell,
352 scalar_valid_range,
353 always_sized,
354 present_first,
355 )
356 } else {
357 assert!(is_enum);
361 self.layout_of_enum(
362 repr,
363 variants,
364 discr_range_of_repr,
365 discriminants,
366 dont_niche_optimize_enum,
367 )
368 }
369 }
370
371 pub fn layout_of_union<
372 'a,
373 FieldIdx: Idx,
374 VariantIdx: Idx,
375 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
376 >(
377 &self,
378 repr: &ReprOptions,
379 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
380 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
381 let dl = self.cx.data_layout();
382 let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
383 let mut max_repr_align = repr.align;
384
385 struct AbiMismatch;
388 let mut common_non_zst_repr_and_align = if repr.inhibits_union_abi_opt() {
389 Err(AbiMismatch)
391 } else {
392 Ok(None)
393 };
394
395 let mut size = Size::ZERO;
396 let only_variant_idx = VariantIdx::new(0);
397 let only_variant = &variants[only_variant_idx];
398 for field in only_variant {
399 if field.is_unsized() {
400 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
401 }
402
403 align = align.max(field.align);
404 max_repr_align = max_repr_align.max(field.max_repr_align);
405 size = cmp::max(size, field.size);
406
407 if field.is_zst() {
408 continue;
410 }
411
412 if let Ok(common) = common_non_zst_repr_and_align {
413 let field_abi = field.backend_repr.to_union();
415
416 if let Some((common_abi, common_align)) = common {
417 if common_abi != field_abi {
418 common_non_zst_repr_and_align = Err(AbiMismatch);
420 } else {
421 if !matches!(common_abi, BackendRepr::Memory { .. }) {
424 assert_eq!(
425 common_align, field.align.abi,
426 "non-Aggregate field with matching ABI but differing alignment"
427 );
428 }
429 }
430 } else {
431 common_non_zst_repr_and_align = Ok(Some((field_abi, field.align.abi)));
433 }
434 }
435 }
436
437 if let Some(pack) = repr.pack {
438 align = align.min(AbiAndPrefAlign::new(pack));
439 }
440 let unadjusted_abi_align = align.abi;
443 if let Some(repr_align) = repr.align {
444 align = align.max(AbiAndPrefAlign::new(repr_align));
445 }
446 let align = align;
448
449 let backend_repr = match common_non_zst_repr_and_align {
452 Err(AbiMismatch) | Ok(None) => BackendRepr::Memory { sized: true },
453 Ok(Some((repr, _))) => match repr {
454 BackendRepr::Scalar(_) | BackendRepr::ScalarPair(_, _)
456 if repr.scalar_align(dl).unwrap() != align.abi =>
457 {
458 BackendRepr::Memory { sized: true }
459 }
460 BackendRepr::SimdVector { element, count: _ }
462 if element.align(dl).abi > align.abi =>
463 {
464 BackendRepr::Memory { sized: true }
465 }
466 BackendRepr::Scalar(..)
468 | BackendRepr::ScalarPair(..)
469 | BackendRepr::SimdVector { .. }
470 | BackendRepr::Memory { .. } => repr,
471 },
472 };
473
474 let Some(union_field_count) = NonZeroUsize::new(only_variant.len()) else {
475 return Err(LayoutCalculatorError::EmptyUnion);
476 };
477
478 let combined_seed = only_variant
479 .iter()
480 .map(|v| v.randomization_seed)
481 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
482
483 Ok(LayoutData {
484 variants: Variants::Single { index: only_variant_idx },
485 fields: FieldsShape::Union(union_field_count),
486 backend_repr,
487 largest_niche: None,
488 uninhabited: false,
489 align,
490 size: size.align_to(align.abi),
491 max_repr_align,
492 unadjusted_abi_align,
493 randomization_seed: combined_seed,
494 })
495 }
496
497 fn layout_of_struct<
499 'a,
500 FieldIdx: Idx,
501 VariantIdx: Idx,
502 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
503 >(
504 &self,
505 repr: &ReprOptions,
506 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
507 is_enum: bool,
508 is_unsafe_cell: bool,
509 scalar_valid_range: (Bound<u128>, Bound<u128>),
510 always_sized: bool,
511 present_first: VariantIdx,
512 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
513 let dl = self.cx.data_layout();
517 let v = present_first;
518 let kind = if is_enum || variants[v].is_empty() || always_sized {
519 StructKind::AlwaysSized
520 } else {
521 StructKind::MaybeUnsized
522 };
523
524 let mut st = self.univariant(&variants[v], repr, kind)?;
525 st.variants = Variants::Single { index: v };
526
527 if is_unsafe_cell {
528 let hide_niches = |scalar: &mut _| match scalar {
529 Scalar::Initialized { value, valid_range } => {
530 *valid_range = WrappingRange::full(value.size(dl))
531 }
532 Scalar::Union { .. } => {}
534 };
535 match &mut st.backend_repr {
536 BackendRepr::Scalar(scalar) => hide_niches(scalar),
537 BackendRepr::ScalarPair(a, b) => {
538 hide_niches(a);
539 hide_niches(b);
540 }
541 BackendRepr::SimdVector { element, count: _ } => hide_niches(element),
542 BackendRepr::Memory { sized: _ } => {}
543 }
544 st.largest_niche = None;
545 return Ok(st);
546 }
547
548 let (start, end) = scalar_valid_range;
549 match st.backend_repr {
550 BackendRepr::Scalar(ref mut scalar) | BackendRepr::ScalarPair(ref mut scalar, _) => {
551 let max_value = scalar.size(dl).unsigned_int_max();
560 if let Bound::Included(start) = start {
561 assert!(start <= max_value, "{start} > {max_value}");
564 scalar.valid_range_mut().start = start;
565 }
566 if let Bound::Included(end) = end {
567 assert!(end <= max_value, "{end} > {max_value}");
570 scalar.valid_range_mut().end = end;
571 }
572
573 let niche = Niche::from_scalar(dl, Size::ZERO, *scalar);
575 if let Some(niche) = niche {
576 match st.largest_niche {
577 Some(largest_niche) => {
578 if largest_niche.available(dl) <= niche.available(dl) {
581 st.largest_niche = Some(niche);
582 }
583 }
584 None => st.largest_niche = Some(niche),
585 }
586 }
587 }
588 _ => assert!(
589 start == Bound::Unbounded && end == Bound::Unbounded,
590 "nonscalar layout for layout_scalar_valid_range type: {st:#?}",
591 ),
592 }
593
594 Ok(st)
595 }
596
597 fn layout_of_enum<
598 'a,
599 FieldIdx: Idx,
600 VariantIdx: Idx,
601 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
602 >(
603 &self,
604 repr: &ReprOptions,
605 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
606 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
607 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
608 dont_niche_optimize_enum: bool,
609 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
610 struct TmpLayout<FieldIdx: Idx, VariantIdx: Idx> {
616 layout: LayoutData<FieldIdx, VariantIdx>,
617 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
618 }
619
620 let dl = self.cx.data_layout();
621 if repr.packed() {
623 return Err(LayoutCalculatorError::ReprConflict);
624 }
625
626 let calculate_niche_filling_layout = || -> Option<TmpLayout<FieldIdx, VariantIdx>> {
627 if dont_niche_optimize_enum {
628 return None;
629 }
630
631 if variants.len() < 2 {
632 return None;
633 }
634
635 let mut align = dl.aggregate_align;
636 let mut max_repr_align = repr.align;
637 let mut unadjusted_abi_align = align.abi;
638
639 let mut variant_layouts = variants
640 .iter_enumerated()
641 .map(|(j, v)| {
642 let mut st = self.univariant(v, repr, StructKind::AlwaysSized).ok()?;
643 st.variants = Variants::Single { index: j };
644
645 align = align.max(st.align);
646 max_repr_align = max_repr_align.max(st.max_repr_align);
647 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
648
649 Some(st)
650 })
651 .collect::<Option<IndexVec<VariantIdx, _>>>()?;
652
653 let largest_variant_index = variant_layouts
654 .iter_enumerated()
655 .max_by_key(|(_i, layout)| layout.size.bytes())
656 .map(|(i, _layout)| i)?;
657
658 let all_indices = variants.indices();
659 let needs_disc =
660 |index: VariantIdx| index != largest_variant_index && !absent(&variants[index]);
661 let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap()
662 ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap();
663
664 let count =
665 (niche_variants.end().index() as u128 - niche_variants.start().index() as u128) + 1;
666
667 let niche = variant_layouts[largest_variant_index].largest_niche?;
669 let (niche_start, niche_scalar) = niche.reserve(dl, count)?;
670 let niche_offset = niche.offset;
671 let niche_size = niche.value.size(dl);
672 let size = variant_layouts[largest_variant_index].size.align_to(align.abi);
673
674 let all_variants_fit = variant_layouts.iter_enumerated_mut().all(|(i, layout)| {
675 if i == largest_variant_index {
676 return true;
677 }
678
679 layout.largest_niche = None;
680
681 if layout.size <= niche_offset {
682 return true;
684 }
685
686 let this_align = layout.align.abi;
688 let this_offset = (niche_offset + niche_size).align_to(this_align);
689
690 if this_offset + layout.size > size {
691 return false;
692 }
693
694 match layout.fields {
696 FieldsShape::Arbitrary { ref mut offsets, .. } => {
697 for offset in offsets.iter_mut() {
698 *offset += this_offset;
699 }
700 }
701 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
702 panic!("Layout of fields should be Arbitrary for variants")
703 }
704 }
705
706 if !layout.is_uninhabited() {
708 layout.backend_repr = BackendRepr::Memory { sized: true };
709 }
710 layout.size += this_offset;
711
712 true
713 });
714
715 if !all_variants_fit {
716 return None;
717 }
718
719 let largest_niche = Niche::from_scalar(dl, niche_offset, niche_scalar);
720
721 let others_zst = variant_layouts
722 .iter_enumerated()
723 .all(|(i, layout)| i == largest_variant_index || layout.size == Size::ZERO);
724 let same_size = size == variant_layouts[largest_variant_index].size;
725 let same_align = align == variant_layouts[largest_variant_index].align;
726
727 let uninhabited = variant_layouts.iter().all(|v| v.is_uninhabited());
728 let abi = if same_size && same_align && others_zst {
729 match variant_layouts[largest_variant_index].backend_repr {
730 BackendRepr::Scalar(_) => BackendRepr::Scalar(niche_scalar),
733 BackendRepr::ScalarPair(first, second) => {
734 if niche_offset == Size::ZERO {
737 BackendRepr::ScalarPair(niche_scalar, second.to_union())
738 } else {
739 BackendRepr::ScalarPair(first.to_union(), niche_scalar)
740 }
741 }
742 _ => BackendRepr::Memory { sized: true },
743 }
744 } else {
745 BackendRepr::Memory { sized: true }
746 };
747
748 let combined_seed = variant_layouts
749 .iter()
750 .map(|v| v.randomization_seed)
751 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
752
753 let layout = LayoutData {
754 variants: Variants::Multiple {
755 tag: niche_scalar,
756 tag_encoding: TagEncoding::Niche {
757 untagged_variant: largest_variant_index,
758 niche_variants,
759 niche_start,
760 },
761 tag_field: 0,
762 variants: IndexVec::new(),
763 },
764 fields: FieldsShape::Arbitrary {
765 offsets: [niche_offset].into(),
766 memory_index: [0].into(),
767 },
768 backend_repr: abi,
769 largest_niche,
770 uninhabited,
771 size,
772 align,
773 max_repr_align,
774 unadjusted_abi_align,
775 randomization_seed: combined_seed,
776 };
777
778 Some(TmpLayout { layout, variants: variant_layouts })
779 };
780
781 let niche_filling_layout = calculate_niche_filling_layout();
782
783 let (mut min, mut max) = (i128::MAX, i128::MIN);
784 let discr_type = repr.discr_type();
785 let bits = Integer::from_attr(dl, discr_type).size().bits();
786 for (i, mut val) in discriminants {
787 if !repr.c() && variants[i].iter().any(|f| f.is_uninhabited()) {
788 continue;
789 }
790 if discr_type.is_signed() {
791 val = (val << (128 - bits)) >> (128 - bits);
793 }
794 if val < min {
795 min = val;
796 }
797 if val > max {
798 max = val;
799 }
800 }
801 if (min, max) == (i128::MAX, i128::MIN) {
803 min = 0;
804 max = 0;
805 }
806 assert!(min <= max, "discriminant range is {min}...{max}");
807 let (min_ity, signed) = discr_range_of_repr(min, max); let mut align = dl.aggregate_align;
810 let mut max_repr_align = repr.align;
811 let mut unadjusted_abi_align = align.abi;
812
813 let mut size = Size::ZERO;
814
815 let mut start_align = Align::from_bytes(256).unwrap();
817 assert_eq!(Integer::for_align(dl, start_align), None);
818
819 let mut prefix_align = min_ity.align(dl).abi;
825 if repr.c() {
826 for fields in variants {
827 for field in fields {
828 prefix_align = prefix_align.max(field.align.abi);
829 }
830 }
831 }
832
833 let mut layout_variants = variants
835 .iter_enumerated()
836 .map(|(i, field_layouts)| {
837 let mut st = self.univariant(
838 field_layouts,
839 repr,
840 StructKind::Prefixed(min_ity.size(), prefix_align),
841 )?;
842 st.variants = Variants::Single { index: i };
843 for field_idx in st.fields.index_by_increasing_offset() {
846 let field = &field_layouts[FieldIdx::new(field_idx)];
847 if !field.is_1zst() {
848 start_align = start_align.min(field.align.abi);
849 break;
850 }
851 }
852 size = cmp::max(size, st.size);
853 align = align.max(st.align);
854 max_repr_align = max_repr_align.max(st.max_repr_align);
855 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
856 Ok(st)
857 })
858 .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
859
860 size = size.align_to(align.abi);
862
863 if size.bytes() >= dl.obj_size_bound() {
865 return Err(LayoutCalculatorError::SizeOverflow);
866 }
867
868 let typeck_ity = Integer::from_attr(dl, repr.discr_type());
869 if typeck_ity < min_ity {
870 panic!(
880 "layout decided on a larger discriminant type ({min_ity:?}) than typeck ({typeck_ity:?})"
881 );
882 }
885
886 let mut ity = if repr.c() || repr.int.is_some() {
897 min_ity
898 } else {
899 Integer::for_align(dl, start_align).unwrap_or(min_ity)
900 };
901
902 if ity <= min_ity {
905 ity = min_ity;
906 } else {
907 let old_ity_size = min_ity.size();
909 let new_ity_size = ity.size();
910 for variant in &mut layout_variants {
911 match variant.fields {
912 FieldsShape::Arbitrary { ref mut offsets, .. } => {
913 for i in offsets {
914 if *i <= old_ity_size {
915 assert_eq!(*i, old_ity_size);
916 *i = new_ity_size;
917 }
918 }
919 if variant.size <= old_ity_size {
921 variant.size = new_ity_size;
922 }
923 }
924 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
925 panic!("encountered a non-arbitrary layout during enum layout")
926 }
927 }
928 }
929 }
930
931 let tag_mask = ity.size().unsigned_int_max();
932 let tag = Scalar::Initialized {
933 value: Primitive::Int(ity, signed),
934 valid_range: WrappingRange {
935 start: (min as u128 & tag_mask),
936 end: (max as u128 & tag_mask),
937 },
938 };
939 let mut abi = BackendRepr::Memory { sized: true };
940
941 let uninhabited = layout_variants.iter().all(|v| v.is_uninhabited());
942 if tag.size(dl) == size {
943 abi = BackendRepr::Scalar(tag);
946 } else {
947 let mut common_prim = None;
950 let mut common_prim_initialized_in_all_variants = true;
951 for (field_layouts, layout_variant) in iter::zip(variants, &layout_variants) {
952 let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
953 panic!("encountered a non-arbitrary layout during enum layout");
954 };
955 let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
958 let (field, offset) = match (fields.next(), fields.next()) {
959 (None, None) => {
960 common_prim_initialized_in_all_variants = false;
961 continue;
962 }
963 (Some(pair), None) => pair,
964 _ => {
965 common_prim = None;
966 break;
967 }
968 };
969 let prim = match field.backend_repr {
970 BackendRepr::Scalar(scalar) => {
971 common_prim_initialized_in_all_variants &=
972 matches!(scalar, Scalar::Initialized { .. });
973 scalar.primitive()
974 }
975 _ => {
976 common_prim = None;
977 break;
978 }
979 };
980 if let Some((old_prim, common_offset)) = common_prim {
981 if offset != common_offset {
983 common_prim = None;
984 break;
985 }
986 let new_prim = match (old_prim, prim) {
990 (x, y) if x == y => x,
992 (p @ Primitive::Int(x, _), Primitive::Int(y, _)) if x == y => p,
995 (p @ Primitive::Pointer(_), i @ Primitive::Int(..))
999 | (i @ Primitive::Int(..), p @ Primitive::Pointer(_))
1000 if p.size(dl) == i.size(dl) && p.align(dl) == i.align(dl) =>
1001 {
1002 p
1003 }
1004 _ => {
1005 common_prim = None;
1006 break;
1007 }
1008 };
1009 common_prim = Some((new_prim, common_offset));
1011 } else {
1012 common_prim = Some((prim, offset));
1013 }
1014 }
1015 if let Some((prim, offset)) = common_prim {
1016 let prim_scalar = if common_prim_initialized_in_all_variants {
1017 let size = prim.size(dl);
1018 assert!(size.bits() <= 128);
1019 Scalar::Initialized { value: prim, valid_range: WrappingRange::full(size) }
1020 } else {
1021 Scalar::Union { value: prim }
1023 };
1024 let pair =
1025 LayoutData::<FieldIdx, VariantIdx>::scalar_pair(&self.cx, tag, prim_scalar);
1026 let pair_offsets = match pair.fields {
1027 FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
1028 assert_eq!(memory_index.raw, [0, 1]);
1029 offsets
1030 }
1031 _ => panic!("encountered a non-arbitrary layout during enum layout"),
1032 };
1033 if pair_offsets[FieldIdx::new(0)] == Size::ZERO
1034 && pair_offsets[FieldIdx::new(1)] == *offset
1035 && align == pair.align
1036 && size == pair.size
1037 {
1038 abi = pair.backend_repr;
1041 }
1042 }
1043 }
1044
1045 if matches!(abi, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1049 for variant in &mut layout_variants {
1050 if variant.fields.count() > 0
1053 && matches!(variant.backend_repr, BackendRepr::Memory { .. })
1054 {
1055 variant.backend_repr = abi;
1056 variant.size = cmp::max(variant.size, size);
1059 variant.align.abi = cmp::max(variant.align.abi, align.abi);
1060 }
1061 }
1062 }
1063
1064 let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag);
1065
1066 let combined_seed = layout_variants
1067 .iter()
1068 .map(|v| v.randomization_seed)
1069 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
1070
1071 let tagged_layout = LayoutData {
1072 variants: Variants::Multiple {
1073 tag,
1074 tag_encoding: TagEncoding::Direct,
1075 tag_field: 0,
1076 variants: IndexVec::new(),
1077 },
1078 fields: FieldsShape::Arbitrary {
1079 offsets: [Size::ZERO].into(),
1080 memory_index: [0].into(),
1081 },
1082 largest_niche,
1083 uninhabited,
1084 backend_repr: abi,
1085 align,
1086 size,
1087 max_repr_align,
1088 unadjusted_abi_align,
1089 randomization_seed: combined_seed,
1090 };
1091
1092 let tagged_layout = TmpLayout { layout: tagged_layout, variants: layout_variants };
1093
1094 let mut best_layout = match (tagged_layout, niche_filling_layout) {
1095 (tl, Some(nl)) => {
1096 use cmp::Ordering::*;
1100 let niche_size = |tmp_l: &TmpLayout<FieldIdx, VariantIdx>| {
1101 tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl))
1102 };
1103 match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) {
1104 (Greater, _) => nl,
1105 (Equal, Less) => nl,
1106 _ => tl,
1107 }
1108 }
1109 (tl, None) => tl,
1110 };
1111
1112 best_layout.layout.variants = match best_layout.layout.variants {
1114 Variants::Multiple { tag, tag_encoding, tag_field, .. } => {
1115 Variants::Multiple { tag, tag_encoding, tag_field, variants: best_layout.variants }
1116 }
1117 Variants::Single { .. } | Variants::Empty => {
1118 panic!("encountered a single-variant or empty enum during multi-variant layout")
1119 }
1120 };
1121 Ok(best_layout.layout)
1122 }
1123
1124 fn univariant_biased<
1125 'a,
1126 FieldIdx: Idx,
1127 VariantIdx: Idx,
1128 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
1129 >(
1130 &self,
1131 fields: &IndexSlice<FieldIdx, F>,
1132 repr: &ReprOptions,
1133 kind: StructKind,
1134 niche_bias: NicheBias,
1135 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
1136 let dl = self.cx.data_layout();
1137 let pack = repr.pack;
1138 let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
1139 let mut max_repr_align = repr.align;
1140 let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect();
1141 let optimize_field_order = !repr.inhibit_struct_field_reordering();
1142 let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
1143 let optimizing = &mut inverse_memory_index.raw[..end];
1144 let fields_excluding_tail = &fields.raw[..end];
1145 let field_seed = fields_excluding_tail
1147 .iter()
1148 .fold(Hash64::ZERO, |acc, f| acc.wrapping_add(f.randomization_seed));
1149
1150 if optimize_field_order && fields.len() > 1 {
1151 if repr.can_randomize_type_layout() && cfg!(feature = "randomize") {
1155 #[cfg(feature = "randomize")]
1156 {
1157 use rand::SeedableRng;
1158 use rand::seq::SliceRandom;
1159 let mut rng = rand_xoshiro::Xoshiro128StarStar::seed_from_u64(
1162 field_seed.wrapping_add(repr.field_shuffle_seed).as_u64(),
1163 );
1164
1165 optimizing.shuffle(&mut rng);
1167 }
1168 } else {
1170 let max_field_align =
1173 fields_excluding_tail.iter().map(|f| f.align.abi.bytes()).max().unwrap_or(1);
1174 let largest_niche_size = fields_excluding_tail
1175 .iter()
1176 .filter_map(|f| f.largest_niche)
1177 .map(|n| n.available(dl))
1178 .max()
1179 .unwrap_or(0);
1180
1181 let alignment_group_key = |layout: &F| {
1184 if let Some(pack) = pack {
1188 layout.align.abi.min(pack).bytes()
1190 } else {
1191 let align = layout.align.abi.bytes();
1194 let size = layout.size.bytes();
1195 let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
1196 let size_as_align = align.max(size).trailing_zeros();
1198 let size_as_align = if largest_niche_size > 0 {
1199 match niche_bias {
1200 NicheBias::Start => {
1204 max_field_align.trailing_zeros().min(size_as_align)
1205 }
1206 NicheBias::End if niche_size == largest_niche_size => {
1210 align.trailing_zeros()
1211 }
1212 NicheBias::End => size_as_align,
1213 }
1214 } else {
1215 size_as_align
1216 };
1217 size_as_align as u64
1218 }
1219 };
1220
1221 match kind {
1222 StructKind::AlwaysSized | StructKind::MaybeUnsized => {
1223 optimizing.sort_by_key(|&x| {
1232 let f = &fields[x];
1233 let field_size = f.size.bytes();
1234 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1235 let niche_size_key = match niche_bias {
1236 NicheBias::Start => !niche_size,
1238 NicheBias::End => niche_size,
1240 };
1241 let inner_niche_offset_key = match niche_bias {
1242 NicheBias::Start => f.largest_niche.map_or(0, |n| n.offset.bytes()),
1243 NicheBias::End => f.largest_niche.map_or(0, |n| {
1244 !(field_size - n.value.size(dl).bytes() - n.offset.bytes())
1245 }),
1246 };
1247
1248 (
1249 cmp::Reverse(alignment_group_key(f)),
1251 niche_size_key,
1254 inner_niche_offset_key,
1257 )
1258 });
1259 }
1260
1261 StructKind::Prefixed(..) => {
1262 optimizing.sort_by_key(|&x| {
1267 let f = &fields[x];
1268 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1269 (alignment_group_key(f), niche_size)
1270 });
1271 }
1272 }
1273
1274 }
1277 }
1278 let mut unsized_field = None::<&F>;
1285 let mut offsets = IndexVec::from_elem(Size::ZERO, fields);
1286 let mut offset = Size::ZERO;
1287 let mut largest_niche = None;
1288 let mut largest_niche_available = 0;
1289 if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
1290 let prefix_align =
1291 if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align };
1292 align = align.max(AbiAndPrefAlign::new(prefix_align));
1293 offset = prefix_size.align_to(prefix_align);
1294 }
1295 for &i in &inverse_memory_index {
1296 let field = &fields[i];
1297 if let Some(unsized_field) = unsized_field {
1298 return Err(LayoutCalculatorError::UnexpectedUnsized(*unsized_field));
1299 }
1300
1301 if field.is_unsized() {
1302 if let StructKind::MaybeUnsized = kind {
1303 unsized_field = Some(field);
1304 } else {
1305 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
1306 }
1307 }
1308
1309 let field_align = if let Some(pack) = pack {
1311 field.align.min(AbiAndPrefAlign::new(pack))
1312 } else {
1313 field.align
1314 };
1315 offset = offset.align_to(field_align.abi);
1316 align = align.max(field_align);
1317 max_repr_align = max_repr_align.max(field.max_repr_align);
1318
1319 debug!("univariant offset: {:?} field: {:#?}", offset, field);
1320 offsets[i] = offset;
1321
1322 if let Some(mut niche) = field.largest_niche {
1323 let available = niche.available(dl);
1324 let prefer_new_niche = match niche_bias {
1326 NicheBias::Start => available > largest_niche_available,
1327 NicheBias::End => available >= largest_niche_available,
1329 };
1330 if prefer_new_niche {
1331 largest_niche_available = available;
1332 niche.offset += offset;
1333 largest_niche = Some(niche);
1334 }
1335 }
1336
1337 offset =
1338 offset.checked_add(field.size, dl).ok_or(LayoutCalculatorError::SizeOverflow)?;
1339 }
1340
1341 let unadjusted_abi_align = align.abi;
1344 if let Some(repr_align) = repr.align {
1345 align = align.max(AbiAndPrefAlign::new(repr_align));
1346 }
1347 let align = align;
1349
1350 debug!("univariant min_size: {:?}", offset);
1351 let min_size = offset;
1352 let memory_index = if optimize_field_order {
1359 inverse_memory_index.invert_bijective_mapping()
1360 } else {
1361 debug_assert!(inverse_memory_index.iter().copied().eq(fields.indices()));
1362 inverse_memory_index.into_iter().map(|it| it.index() as u32).collect()
1363 };
1364 let size = min_size.align_to(align.abi);
1365 if size.bytes() >= dl.obj_size_bound() {
1367 return Err(LayoutCalculatorError::SizeOverflow);
1368 }
1369 let mut layout_of_single_non_zst_field = None;
1370 let sized = unsized_field.is_none();
1371 let mut abi = BackendRepr::Memory { sized };
1372
1373 let optimize_abi = !repr.inhibit_newtype_abi_optimization();
1374
1375 if sized && size.bytes() > 0 {
1377 let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.is_zst());
1380
1381 match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
1382 (Some((i, field)), None, None) => {
1384 layout_of_single_non_zst_field = Some(field);
1385
1386 if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size
1388 {
1389 match field.backend_repr {
1390 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. }
1393 if optimize_abi =>
1394 {
1395 abi = field.backend_repr;
1396 }
1397 BackendRepr::ScalarPair(..) => {
1400 abi = field.backend_repr;
1401 }
1402 _ => {}
1403 }
1404 }
1405 }
1406
1407 (Some((i, a)), Some((j, b)), None) => {
1409 match (a.backend_repr, b.backend_repr) {
1410 (BackendRepr::Scalar(a), BackendRepr::Scalar(b)) => {
1411 let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
1413 ((i, a), (j, b))
1414 } else {
1415 ((j, b), (i, a))
1416 };
1417 let pair =
1418 LayoutData::<FieldIdx, VariantIdx>::scalar_pair(&self.cx, a, b);
1419 let pair_offsets = match pair.fields {
1420 FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
1421 assert_eq!(memory_index.raw, [0, 1]);
1422 offsets
1423 }
1424 FieldsShape::Primitive
1425 | FieldsShape::Array { .. }
1426 | FieldsShape::Union(..) => {
1427 panic!("encountered a non-arbitrary layout during enum layout")
1428 }
1429 };
1430 if offsets[i] == pair_offsets[FieldIdx::new(0)]
1431 && offsets[j] == pair_offsets[FieldIdx::new(1)]
1432 && align == pair.align
1433 && size == pair.size
1434 {
1435 abi = pair.backend_repr;
1438 }
1439 }
1440 _ => {}
1441 }
1442 }
1443
1444 _ => {}
1445 }
1446 }
1447 let uninhabited = fields.iter().any(|f| f.is_uninhabited());
1448
1449 let unadjusted_abi_align = if repr.transparent() {
1450 match layout_of_single_non_zst_field {
1451 Some(l) => l.unadjusted_abi_align,
1452 None => {
1453 align.abi
1455 }
1456 }
1457 } else {
1458 unadjusted_abi_align
1459 };
1460
1461 let seed = field_seed.wrapping_add(repr.field_shuffle_seed);
1462
1463 Ok(LayoutData {
1464 variants: Variants::Single { index: VariantIdx::new(0) },
1465 fields: FieldsShape::Arbitrary { offsets, memory_index },
1466 backend_repr: abi,
1467 largest_niche,
1468 uninhabited,
1469 align,
1470 size,
1471 max_repr_align,
1472 unadjusted_abi_align,
1473 randomization_seed: seed,
1474 })
1475 }
1476
1477 fn format_field_niches<
1478 'a,
1479 FieldIdx: Idx,
1480 VariantIdx: Idx,
1481 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
1482 >(
1483 &self,
1484 layout: &LayoutData<FieldIdx, VariantIdx>,
1485 fields: &IndexSlice<FieldIdx, F>,
1486 ) -> String {
1487 let dl = self.cx.data_layout();
1488 let mut s = String::new();
1489 for i in layout.fields.index_by_increasing_offset() {
1490 let offset = layout.fields.offset(i);
1491 let f = &fields[FieldIdx::new(i)];
1492 write!(s, "[o{}a{}s{}", offset.bytes(), f.align.abi.bytes(), f.size.bytes()).unwrap();
1493 if let Some(n) = f.largest_niche {
1494 write!(
1495 s,
1496 " n{}b{}s{}",
1497 n.offset.bytes(),
1498 n.available(dl).ilog2(),
1499 n.value.size(dl).bytes()
1500 )
1501 .unwrap();
1502 }
1503 write!(s, "] ").unwrap();
1504 }
1505 s
1506 }
1507}