1use std::fmt::{self, Write};
2use std::ops::{Bound, Deref};
3use std::{cmp, iter};
4
5use rustc_index::Idx;
6use tracing::debug;
7
8use crate::{
9 AbiAndPrefAlign, Align, BackendRepr, FieldsShape, HasDataLayout, IndexSlice, IndexVec, Integer,
10 LayoutData, Niche, NonZeroUsize, Primitive, ReprOptions, Scalar, Size, StructKind, TagEncoding,
11 Variants, WrappingRange,
12};
13
14#[cfg(feature = "nightly")]
15mod ty;
16
17#[cfg(feature = "nightly")]
18pub use ty::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
19
20fn absent<'a, FieldIdx, VariantIdx, F>(fields: &IndexSlice<FieldIdx, F>) -> bool
26where
27 FieldIdx: Idx,
28 VariantIdx: Idx,
29 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
30{
31 let uninhabited = fields.iter().any(|f| f.is_uninhabited());
32 let is_1zst = fields.iter().all(|f| f.is_1zst());
35 uninhabited && is_1zst
36}
37
38enum NicheBias {
40 Start,
41 End,
42}
43
44#[derive(Copy, Clone, Debug, PartialEq, Eq)]
45pub enum LayoutCalculatorError<F> {
46 UnexpectedUnsized(F),
53
54 SizeOverflow,
56
57 EmptyUnion,
59
60 ReprConflict,
62}
63
64impl<F> LayoutCalculatorError<F> {
65 pub fn without_payload(&self) -> LayoutCalculatorError<()> {
66 match self {
67 LayoutCalculatorError::UnexpectedUnsized(_) => {
68 LayoutCalculatorError::UnexpectedUnsized(())
69 }
70 LayoutCalculatorError::SizeOverflow => LayoutCalculatorError::SizeOverflow,
71 LayoutCalculatorError::EmptyUnion => LayoutCalculatorError::EmptyUnion,
72 LayoutCalculatorError::ReprConflict => LayoutCalculatorError::ReprConflict,
73 }
74 }
75
76 pub fn fallback_fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
80 f.write_str(match self {
81 LayoutCalculatorError::UnexpectedUnsized(_) => {
82 "an unsized type was found where a sized type was expected"
83 }
84 LayoutCalculatorError::SizeOverflow => "size overflow",
85 LayoutCalculatorError::EmptyUnion => "type is a union with no fields",
86 LayoutCalculatorError::ReprConflict => "type has an invalid repr",
87 })
88 }
89}
90
91type LayoutCalculatorResult<FieldIdx, VariantIdx, F> =
92 Result<LayoutData<FieldIdx, VariantIdx>, LayoutCalculatorError<F>>;
93
94#[derive(Clone, Copy, Debug)]
95pub struct LayoutCalculator<Cx> {
96 pub cx: Cx,
97}
98
99impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
100 pub fn new(cx: Cx) -> Self {
101 Self { cx }
102 }
103
104 pub fn scalar_pair<FieldIdx: Idx, VariantIdx: Idx>(
105 &self,
106 a: Scalar,
107 b: Scalar,
108 ) -> LayoutData<FieldIdx, VariantIdx> {
109 let dl = self.cx.data_layout();
110 let b_align = b.align(dl);
111 let align = a.align(dl).max(b_align).max(dl.aggregate_align);
112 let b_offset = a.size(dl).align_to(b_align.abi);
113 let size = (b_offset + b.size(dl)).align_to(align.abi);
114
115 let largest_niche = Niche::from_scalar(dl, b_offset, b)
118 .into_iter()
119 .chain(Niche::from_scalar(dl, Size::ZERO, a))
120 .max_by_key(|niche| niche.available(dl));
121
122 let combined_seed = a.size(&self.cx).bytes().wrapping_add(b.size(&self.cx).bytes());
123
124 LayoutData {
125 variants: Variants::Single { index: VariantIdx::new(0) },
126 fields: FieldsShape::Arbitrary {
127 offsets: [Size::ZERO, b_offset].into(),
128 memory_index: [0, 1].into(),
129 },
130 backend_repr: BackendRepr::ScalarPair(a, b),
131 largest_niche,
132 align,
133 size,
134 max_repr_align: None,
135 unadjusted_abi_align: align.abi,
136 randomization_seed: combined_seed,
137 }
138 }
139
140 pub fn univariant<
141 'a,
142 FieldIdx: Idx,
143 VariantIdx: Idx,
144 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
145 >(
146 &self,
147 fields: &IndexSlice<FieldIdx, F>,
148 repr: &ReprOptions,
149 kind: StructKind,
150 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
151 let dl = self.cx.data_layout();
152 let layout = self.univariant_biased(fields, repr, kind, NicheBias::Start);
153 if let Ok(layout) = &layout {
159 if !matches!(kind, StructKind::MaybeUnsized) {
163 if let Some(niche) = layout.largest_niche {
164 let head_space = niche.offset.bytes();
165 let niche_len = niche.value.size(dl).bytes();
166 let tail_space = layout.size.bytes() - head_space - niche_len;
167
168 if fields.len() > 1 && head_space != 0 && tail_space > 0 {
172 let alt_layout = self
173 .univariant_biased(fields, repr, kind, NicheBias::End)
174 .expect("alt layout should always work");
175 let alt_niche = alt_layout
176 .largest_niche
177 .expect("alt layout should have a niche like the regular one");
178 let alt_head_space = alt_niche.offset.bytes();
179 let alt_niche_len = alt_niche.value.size(dl).bytes();
180 let alt_tail_space =
181 alt_layout.size.bytes() - alt_head_space - alt_niche_len;
182
183 debug_assert_eq!(layout.size.bytes(), alt_layout.size.bytes());
184
185 let prefer_alt_layout =
186 alt_head_space > head_space && alt_head_space > tail_space;
187
188 debug!(
189 "sz: {}, default_niche_at: {}+{}, default_tail_space: {}, alt_niche_at/head_space: {}+{}, alt_tail: {}, num_fields: {}, better: {}\n\
190 layout: {}\n\
191 alt_layout: {}\n",
192 layout.size.bytes(),
193 head_space,
194 niche_len,
195 tail_space,
196 alt_head_space,
197 alt_niche_len,
198 alt_tail_space,
199 layout.fields.count(),
200 prefer_alt_layout,
201 self.format_field_niches(layout, fields),
202 self.format_field_niches(&alt_layout, fields),
203 );
204
205 if prefer_alt_layout {
206 return Ok(alt_layout);
207 }
208 }
209 }
210 }
211 }
212 layout
213 }
214
215 pub fn layout_of_never_type<FieldIdx: Idx, VariantIdx: Idx>(
216 &self,
217 ) -> LayoutData<FieldIdx, VariantIdx> {
218 let dl = self.cx.data_layout();
219 LayoutData {
221 variants: Variants::Empty,
222 fields: FieldsShape::Primitive,
223 backend_repr: BackendRepr::Uninhabited,
224 largest_niche: None,
225 align: dl.i8_align,
226 size: Size::ZERO,
227 max_repr_align: None,
228 unadjusted_abi_align: dl.i8_align.abi,
229 randomization_seed: 0,
230 }
231 }
232
233 pub fn layout_of_struct_or_enum<
234 'a,
235 FieldIdx: Idx,
236 VariantIdx: Idx,
237 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
238 >(
239 &self,
240 repr: &ReprOptions,
241 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
242 is_enum: bool,
243 is_unsafe_cell: bool,
244 scalar_valid_range: (Bound<u128>, Bound<u128>),
245 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
246 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
247 dont_niche_optimize_enum: bool,
248 always_sized: bool,
249 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
250 let (present_first, present_second) = {
251 let mut present_variants = variants
252 .iter_enumerated()
253 .filter_map(|(i, v)| if !repr.c() && absent(v) { None } else { Some(i) });
254 (present_variants.next(), present_variants.next())
255 };
256 let present_first = match present_first {
257 Some(present_first) => present_first,
258 None if is_enum => {
260 return Ok(self.layout_of_never_type());
261 }
262 None => VariantIdx::new(0),
265 };
266
267 if !is_enum ||
269 (present_second.is_none() && !repr.inhibit_enum_layout_opt())
271 {
272 self.layout_of_struct(
273 repr,
274 variants,
275 is_enum,
276 is_unsafe_cell,
277 scalar_valid_range,
278 always_sized,
279 present_first,
280 )
281 } else {
282 assert!(is_enum);
286 self.layout_of_enum(
287 repr,
288 variants,
289 discr_range_of_repr,
290 discriminants,
291 dont_niche_optimize_enum,
292 )
293 }
294 }
295
296 pub fn layout_of_union<
297 'a,
298 FieldIdx: Idx,
299 VariantIdx: Idx,
300 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
301 >(
302 &self,
303 repr: &ReprOptions,
304 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
305 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
306 let dl = self.cx.data_layout();
307 let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
308 let mut max_repr_align = repr.align;
309
310 struct AbiMismatch;
313 let mut common_non_zst_abi_and_align = if repr.inhibits_union_abi_opt() {
314 Err(AbiMismatch)
316 } else {
317 Ok(None)
318 };
319
320 let mut size = Size::ZERO;
321 let only_variant_idx = VariantIdx::new(0);
322 let only_variant = &variants[only_variant_idx];
323 for field in only_variant {
324 if field.is_unsized() {
325 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
326 }
327
328 align = align.max(field.align);
329 max_repr_align = max_repr_align.max(field.max_repr_align);
330 size = cmp::max(size, field.size);
331
332 if field.is_zst() {
333 continue;
335 }
336
337 if let Ok(common) = common_non_zst_abi_and_align {
338 let field_abi = field.backend_repr.to_union();
340
341 if let Some((common_abi, common_align)) = common {
342 if common_abi != field_abi {
343 common_non_zst_abi_and_align = Err(AbiMismatch);
345 } else {
346 if !matches!(common_abi, BackendRepr::Memory { .. }) {
349 assert_eq!(
350 common_align, field.align.abi,
351 "non-Aggregate field with matching ABI but differing alignment"
352 );
353 }
354 }
355 } else {
356 common_non_zst_abi_and_align = Ok(Some((field_abi, field.align.abi)));
358 }
359 }
360 }
361
362 if let Some(pack) = repr.pack {
363 align = align.min(AbiAndPrefAlign::new(pack));
364 }
365 let unadjusted_abi_align = align.abi;
368 if let Some(repr_align) = repr.align {
369 align = align.max(AbiAndPrefAlign::new(repr_align));
370 }
371 let align = align;
373
374 let abi = match common_non_zst_abi_and_align {
377 Err(AbiMismatch) | Ok(None) => BackendRepr::Memory { sized: true },
378 Ok(Some((abi, _))) => {
379 if abi.inherent_align(dl).map(|a| a.abi) != Some(align.abi) {
380 BackendRepr::Memory { sized: true }
382 } else {
383 abi
384 }
385 }
386 };
387
388 let Some(union_field_count) = NonZeroUsize::new(only_variant.len()) else {
389 return Err(LayoutCalculatorError::EmptyUnion);
390 };
391
392 let combined_seed = only_variant
393 .iter()
394 .map(|v| v.randomization_seed)
395 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
396
397 Ok(LayoutData {
398 variants: Variants::Single { index: only_variant_idx },
399 fields: FieldsShape::Union(union_field_count),
400 backend_repr: abi,
401 largest_niche: None,
402 align,
403 size: size.align_to(align.abi),
404 max_repr_align,
405 unadjusted_abi_align,
406 randomization_seed: combined_seed,
407 })
408 }
409
410 fn layout_of_struct<
412 'a,
413 FieldIdx: Idx,
414 VariantIdx: Idx,
415 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
416 >(
417 &self,
418 repr: &ReprOptions,
419 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
420 is_enum: bool,
421 is_unsafe_cell: bool,
422 scalar_valid_range: (Bound<u128>, Bound<u128>),
423 always_sized: bool,
424 present_first: VariantIdx,
425 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
426 let dl = self.cx.data_layout();
430 let v = present_first;
431 let kind = if is_enum || variants[v].is_empty() || always_sized {
432 StructKind::AlwaysSized
433 } else {
434 StructKind::MaybeUnsized
435 };
436
437 let mut st = self.univariant(&variants[v], repr, kind)?;
438 st.variants = Variants::Single { index: v };
439
440 if is_unsafe_cell {
441 let hide_niches = |scalar: &mut _| match scalar {
442 Scalar::Initialized { value, valid_range } => {
443 *valid_range = WrappingRange::full(value.size(dl))
444 }
445 Scalar::Union { .. } => {}
447 };
448 match &mut st.backend_repr {
449 BackendRepr::Uninhabited => {}
450 BackendRepr::Scalar(scalar) => hide_niches(scalar),
451 BackendRepr::ScalarPair(a, b) => {
452 hide_niches(a);
453 hide_niches(b);
454 }
455 BackendRepr::Vector { element, count: _ } => hide_niches(element),
456 BackendRepr::Memory { sized: _ } => {}
457 }
458 st.largest_niche = None;
459 return Ok(st);
460 }
461
462 let (start, end) = scalar_valid_range;
463 match st.backend_repr {
464 BackendRepr::Scalar(ref mut scalar) | BackendRepr::ScalarPair(ref mut scalar, _) => {
465 let max_value = scalar.size(dl).unsigned_int_max();
474 if let Bound::Included(start) = start {
475 assert!(start <= max_value, "{start} > {max_value}");
478 scalar.valid_range_mut().start = start;
479 }
480 if let Bound::Included(end) = end {
481 assert!(end <= max_value, "{end} > {max_value}");
484 scalar.valid_range_mut().end = end;
485 }
486
487 let niche = Niche::from_scalar(dl, Size::ZERO, *scalar);
489 if let Some(niche) = niche {
490 match st.largest_niche {
491 Some(largest_niche) => {
492 if largest_niche.available(dl) <= niche.available(dl) {
495 st.largest_niche = Some(niche);
496 }
497 }
498 None => st.largest_niche = Some(niche),
499 }
500 }
501 }
502 _ => assert!(
503 start == Bound::Unbounded && end == Bound::Unbounded,
504 "nonscalar layout for layout_scalar_valid_range type: {st:#?}",
505 ),
506 }
507
508 Ok(st)
509 }
510
511 fn layout_of_enum<
512 'a,
513 FieldIdx: Idx,
514 VariantIdx: Idx,
515 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
516 >(
517 &self,
518 repr: &ReprOptions,
519 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
520 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
521 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
522 dont_niche_optimize_enum: bool,
523 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
524 struct TmpLayout<FieldIdx: Idx, VariantIdx: Idx> {
530 layout: LayoutData<FieldIdx, VariantIdx>,
531 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
532 }
533
534 let dl = self.cx.data_layout();
535 if repr.packed() {
537 return Err(LayoutCalculatorError::ReprConflict);
538 }
539
540 let calculate_niche_filling_layout = || -> Option<TmpLayout<FieldIdx, VariantIdx>> {
541 if dont_niche_optimize_enum {
542 return None;
543 }
544
545 if variants.len() < 2 {
546 return None;
547 }
548
549 let mut align = dl.aggregate_align;
550 let mut max_repr_align = repr.align;
551 let mut unadjusted_abi_align = align.abi;
552
553 let mut variant_layouts = variants
554 .iter_enumerated()
555 .map(|(j, v)| {
556 let mut st = self.univariant(v, repr, StructKind::AlwaysSized).ok()?;
557 st.variants = Variants::Single { index: j };
558
559 align = align.max(st.align);
560 max_repr_align = max_repr_align.max(st.max_repr_align);
561 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
562
563 Some(st)
564 })
565 .collect::<Option<IndexVec<VariantIdx, _>>>()?;
566
567 let largest_variant_index = variant_layouts
568 .iter_enumerated()
569 .max_by_key(|(_i, layout)| layout.size.bytes())
570 .map(|(i, _layout)| i)?;
571
572 let all_indices = variants.indices();
573 let needs_disc =
574 |index: VariantIdx| index != largest_variant_index && !absent(&variants[index]);
575 let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap()
576 ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap();
577
578 let count =
579 (niche_variants.end().index() as u128 - niche_variants.start().index() as u128) + 1;
580
581 let niche = variant_layouts[largest_variant_index].largest_niche?;
583 let (niche_start, niche_scalar) = niche.reserve(dl, count)?;
584 let niche_offset = niche.offset;
585 let niche_size = niche.value.size(dl);
586 let size = variant_layouts[largest_variant_index].size.align_to(align.abi);
587
588 let all_variants_fit = variant_layouts.iter_enumerated_mut().all(|(i, layout)| {
589 if i == largest_variant_index {
590 return true;
591 }
592
593 layout.largest_niche = None;
594
595 if layout.size <= niche_offset {
596 return true;
598 }
599
600 let this_align = layout.align.abi;
602 let this_offset = (niche_offset + niche_size).align_to(this_align);
603
604 if this_offset + layout.size > size {
605 return false;
606 }
607
608 match layout.fields {
610 FieldsShape::Arbitrary { ref mut offsets, .. } => {
611 for offset in offsets.iter_mut() {
612 *offset += this_offset;
613 }
614 }
615 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
616 panic!("Layout of fields should be Arbitrary for variants")
617 }
618 }
619
620 if !layout.is_uninhabited() {
622 layout.backend_repr = BackendRepr::Memory { sized: true };
623 }
624 layout.size += this_offset;
625
626 true
627 });
628
629 if !all_variants_fit {
630 return None;
631 }
632
633 let largest_niche = Niche::from_scalar(dl, niche_offset, niche_scalar);
634
635 let others_zst = variant_layouts
636 .iter_enumerated()
637 .all(|(i, layout)| i == largest_variant_index || layout.size == Size::ZERO);
638 let same_size = size == variant_layouts[largest_variant_index].size;
639 let same_align = align == variant_layouts[largest_variant_index].align;
640
641 let abi = if variant_layouts.iter().all(|v| v.is_uninhabited()) {
642 BackendRepr::Uninhabited
643 } else if same_size && same_align && others_zst {
644 match variant_layouts[largest_variant_index].backend_repr {
645 BackendRepr::Scalar(_) => BackendRepr::Scalar(niche_scalar),
648 BackendRepr::ScalarPair(first, second) => {
649 if niche_offset == Size::ZERO {
652 BackendRepr::ScalarPair(niche_scalar, second.to_union())
653 } else {
654 BackendRepr::ScalarPair(first.to_union(), niche_scalar)
655 }
656 }
657 _ => BackendRepr::Memory { sized: true },
658 }
659 } else {
660 BackendRepr::Memory { sized: true }
661 };
662
663 let combined_seed = variant_layouts
664 .iter()
665 .map(|v| v.randomization_seed)
666 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
667
668 let layout = LayoutData {
669 variants: Variants::Multiple {
670 tag: niche_scalar,
671 tag_encoding: TagEncoding::Niche {
672 untagged_variant: largest_variant_index,
673 niche_variants,
674 niche_start,
675 },
676 tag_field: 0,
677 variants: IndexVec::new(),
678 },
679 fields: FieldsShape::Arbitrary {
680 offsets: [niche_offset].into(),
681 memory_index: [0].into(),
682 },
683 backend_repr: abi,
684 largest_niche,
685 size,
686 align,
687 max_repr_align,
688 unadjusted_abi_align,
689 randomization_seed: combined_seed,
690 };
691
692 Some(TmpLayout { layout, variants: variant_layouts })
693 };
694
695 let niche_filling_layout = calculate_niche_filling_layout();
696
697 let (mut min, mut max) = (i128::MAX, i128::MIN);
698 let discr_type = repr.discr_type();
699 let bits = Integer::from_attr(dl, discr_type).size().bits();
700 for (i, mut val) in discriminants {
701 if !repr.c() && variants[i].iter().any(|f| f.is_uninhabited()) {
702 continue;
703 }
704 if discr_type.is_signed() {
705 val = (val << (128 - bits)) >> (128 - bits);
707 }
708 if val < min {
709 min = val;
710 }
711 if val > max {
712 max = val;
713 }
714 }
715 if (min, max) == (i128::MAX, i128::MIN) {
717 min = 0;
718 max = 0;
719 }
720 assert!(min <= max, "discriminant range is {min}...{max}");
721 let (min_ity, signed) = discr_range_of_repr(min, max); let mut align = dl.aggregate_align;
724 let mut max_repr_align = repr.align;
725 let mut unadjusted_abi_align = align.abi;
726
727 let mut size = Size::ZERO;
728
729 let mut start_align = Align::from_bytes(256).unwrap();
731 assert_eq!(Integer::for_align(dl, start_align), None);
732
733 let mut prefix_align = min_ity.align(dl).abi;
739 if repr.c() {
740 for fields in variants {
741 for field in fields {
742 prefix_align = prefix_align.max(field.align.abi);
743 }
744 }
745 }
746
747 let mut layout_variants = variants
749 .iter_enumerated()
750 .map(|(i, field_layouts)| {
751 let mut st = self.univariant(
752 field_layouts,
753 repr,
754 StructKind::Prefixed(min_ity.size(), prefix_align),
755 )?;
756 st.variants = Variants::Single { index: i };
757 for field_idx in st.fields.index_by_increasing_offset() {
760 let field = &field_layouts[FieldIdx::new(field_idx)];
761 if !field.is_1zst() {
762 start_align = start_align.min(field.align.abi);
763 break;
764 }
765 }
766 size = cmp::max(size, st.size);
767 align = align.max(st.align);
768 max_repr_align = max_repr_align.max(st.max_repr_align);
769 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
770 Ok(st)
771 })
772 .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
773
774 size = size.align_to(align.abi);
776
777 if size.bytes() >= dl.obj_size_bound() {
779 return Err(LayoutCalculatorError::SizeOverflow);
780 }
781
782 let typeck_ity = Integer::from_attr(dl, repr.discr_type());
783 if typeck_ity < min_ity {
784 panic!(
794 "layout decided on a larger discriminant type ({min_ity:?}) than typeck ({typeck_ity:?})"
795 );
796 }
799
800 let mut ity = if repr.c() || repr.int.is_some() {
811 min_ity
812 } else {
813 Integer::for_align(dl, start_align).unwrap_or(min_ity)
814 };
815
816 if ity <= min_ity {
819 ity = min_ity;
820 } else {
821 let old_ity_size = min_ity.size();
823 let new_ity_size = ity.size();
824 for variant in &mut layout_variants {
825 match variant.fields {
826 FieldsShape::Arbitrary { ref mut offsets, .. } => {
827 for i in offsets {
828 if *i <= old_ity_size {
829 assert_eq!(*i, old_ity_size);
830 *i = new_ity_size;
831 }
832 }
833 if variant.size <= old_ity_size {
835 variant.size = new_ity_size;
836 }
837 }
838 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
839 panic!("encountered a non-arbitrary layout during enum layout")
840 }
841 }
842 }
843 }
844
845 let tag_mask = ity.size().unsigned_int_max();
846 let tag = Scalar::Initialized {
847 value: Primitive::Int(ity, signed),
848 valid_range: WrappingRange {
849 start: (min as u128 & tag_mask),
850 end: (max as u128 & tag_mask),
851 },
852 };
853 let mut abi = BackendRepr::Memory { sized: true };
854
855 if layout_variants.iter().all(|v| v.is_uninhabited()) {
856 abi = BackendRepr::Uninhabited;
857 } else if tag.size(dl) == size {
858 abi = BackendRepr::Scalar(tag);
861 } else {
862 let mut common_prim = None;
865 let mut common_prim_initialized_in_all_variants = true;
866 for (field_layouts, layout_variant) in iter::zip(variants, &layout_variants) {
867 let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
868 panic!("encountered a non-arbitrary layout during enum layout");
869 };
870 let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
873 let (field, offset) = match (fields.next(), fields.next()) {
874 (None, None) => {
875 common_prim_initialized_in_all_variants = false;
876 continue;
877 }
878 (Some(pair), None) => pair,
879 _ => {
880 common_prim = None;
881 break;
882 }
883 };
884 let prim = match field.backend_repr {
885 BackendRepr::Scalar(scalar) => {
886 common_prim_initialized_in_all_variants &=
887 matches!(scalar, Scalar::Initialized { .. });
888 scalar.primitive()
889 }
890 _ => {
891 common_prim = None;
892 break;
893 }
894 };
895 if let Some((old_prim, common_offset)) = common_prim {
896 if offset != common_offset {
898 common_prim = None;
899 break;
900 }
901 let new_prim = match (old_prim, prim) {
905 (x, y) if x == y => x,
907 (p @ Primitive::Int(x, _), Primitive::Int(y, _)) if x == y => p,
910 (p @ Primitive::Pointer(_), i @ Primitive::Int(..))
914 | (i @ Primitive::Int(..), p @ Primitive::Pointer(_))
915 if p.size(dl) == i.size(dl) && p.align(dl) == i.align(dl) =>
916 {
917 p
918 }
919 _ => {
920 common_prim = None;
921 break;
922 }
923 };
924 common_prim = Some((new_prim, common_offset));
926 } else {
927 common_prim = Some((prim, offset));
928 }
929 }
930 if let Some((prim, offset)) = common_prim {
931 let prim_scalar = if common_prim_initialized_in_all_variants {
932 let size = prim.size(dl);
933 assert!(size.bits() <= 128);
934 Scalar::Initialized { value: prim, valid_range: WrappingRange::full(size) }
935 } else {
936 Scalar::Union { value: prim }
938 };
939 let pair = self.scalar_pair::<FieldIdx, VariantIdx>(tag, prim_scalar);
940 let pair_offsets = match pair.fields {
941 FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
942 assert_eq!(memory_index.raw, [0, 1]);
943 offsets
944 }
945 _ => panic!("encountered a non-arbitrary layout during enum layout"),
946 };
947 if pair_offsets[FieldIdx::new(0)] == Size::ZERO
948 && pair_offsets[FieldIdx::new(1)] == *offset
949 && align == pair.align
950 && size == pair.size
951 {
952 abi = pair.backend_repr;
955 }
956 }
957 }
958
959 if matches!(abi, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
963 for variant in &mut layout_variants {
964 if variant.fields.count() > 0
967 && matches!(variant.backend_repr, BackendRepr::Memory { .. })
968 {
969 variant.backend_repr = abi;
970 variant.size = cmp::max(variant.size, size);
973 variant.align.abi = cmp::max(variant.align.abi, align.abi);
974 }
975 }
976 }
977
978 let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag);
979
980 let combined_seed = layout_variants
981 .iter()
982 .map(|v| v.randomization_seed)
983 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
984
985 let tagged_layout = LayoutData {
986 variants: Variants::Multiple {
987 tag,
988 tag_encoding: TagEncoding::Direct,
989 tag_field: 0,
990 variants: IndexVec::new(),
991 },
992 fields: FieldsShape::Arbitrary {
993 offsets: [Size::ZERO].into(),
994 memory_index: [0].into(),
995 },
996 largest_niche,
997 backend_repr: abi,
998 align,
999 size,
1000 max_repr_align,
1001 unadjusted_abi_align,
1002 randomization_seed: combined_seed,
1003 };
1004
1005 let tagged_layout = TmpLayout { layout: tagged_layout, variants: layout_variants };
1006
1007 let mut best_layout = match (tagged_layout, niche_filling_layout) {
1008 (tl, Some(nl)) => {
1009 use cmp::Ordering::*;
1013 let niche_size = |tmp_l: &TmpLayout<FieldIdx, VariantIdx>| {
1014 tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl))
1015 };
1016 match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) {
1017 (Greater, _) => nl,
1018 (Equal, Less) => nl,
1019 _ => tl,
1020 }
1021 }
1022 (tl, None) => tl,
1023 };
1024
1025 best_layout.layout.variants = match best_layout.layout.variants {
1027 Variants::Multiple { tag, tag_encoding, tag_field, .. } => {
1028 Variants::Multiple { tag, tag_encoding, tag_field, variants: best_layout.variants }
1029 }
1030 Variants::Single { .. } | Variants::Empty => {
1031 panic!("encountered a single-variant or empty enum during multi-variant layout")
1032 }
1033 };
1034 Ok(best_layout.layout)
1035 }
1036
1037 fn univariant_biased<
1038 'a,
1039 FieldIdx: Idx,
1040 VariantIdx: Idx,
1041 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
1042 >(
1043 &self,
1044 fields: &IndexSlice<FieldIdx, F>,
1045 repr: &ReprOptions,
1046 kind: StructKind,
1047 niche_bias: NicheBias,
1048 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
1049 let dl = self.cx.data_layout();
1050 let pack = repr.pack;
1051 let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
1052 let mut max_repr_align = repr.align;
1053 let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect();
1054 let optimize_field_order = !repr.inhibit_struct_field_reordering();
1055 let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
1056 let optimizing = &mut inverse_memory_index.raw[..end];
1057 let fields_excluding_tail = &fields.raw[..end];
1058 let field_seed = fields_excluding_tail
1060 .iter()
1061 .fold(0u64, |acc, f| acc.wrapping_add(f.randomization_seed));
1062
1063 if optimize_field_order && fields.len() > 1 {
1064 if repr.can_randomize_type_layout() && cfg!(feature = "randomize") {
1068 #[cfg(feature = "randomize")]
1069 {
1070 use rand::SeedableRng;
1071 use rand::seq::SliceRandom;
1072 let mut rng = rand_xoshiro::Xoshiro128StarStar::seed_from_u64(
1075 field_seed.wrapping_add(repr.field_shuffle_seed),
1076 );
1077
1078 optimizing.shuffle(&mut rng);
1080 }
1081 } else {
1083 let max_field_align =
1086 fields_excluding_tail.iter().map(|f| f.align.abi.bytes()).max().unwrap_or(1);
1087 let largest_niche_size = fields_excluding_tail
1088 .iter()
1089 .filter_map(|f| f.largest_niche)
1090 .map(|n| n.available(dl))
1091 .max()
1092 .unwrap_or(0);
1093
1094 let alignment_group_key = |layout: &F| {
1097 if let Some(pack) = pack {
1101 layout.align.abi.min(pack).bytes()
1103 } else {
1104 let align = layout.align.abi.bytes();
1107 let size = layout.size.bytes();
1108 let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
1109 let size_as_align = align.max(size).trailing_zeros();
1111 let size_as_align = if largest_niche_size > 0 {
1112 match niche_bias {
1113 NicheBias::Start => {
1117 max_field_align.trailing_zeros().min(size_as_align)
1118 }
1119 NicheBias::End if niche_size == largest_niche_size => {
1123 align.trailing_zeros()
1124 }
1125 NicheBias::End => size_as_align,
1126 }
1127 } else {
1128 size_as_align
1129 };
1130 size_as_align as u64
1131 }
1132 };
1133
1134 match kind {
1135 StructKind::AlwaysSized | StructKind::MaybeUnsized => {
1136 optimizing.sort_by_key(|&x| {
1145 let f = &fields[x];
1146 let field_size = f.size.bytes();
1147 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1148 let niche_size_key = match niche_bias {
1149 NicheBias::Start => !niche_size,
1151 NicheBias::End => niche_size,
1153 };
1154 let inner_niche_offset_key = match niche_bias {
1155 NicheBias::Start => f.largest_niche.map_or(0, |n| n.offset.bytes()),
1156 NicheBias::End => f.largest_niche.map_or(0, |n| {
1157 !(field_size - n.value.size(dl).bytes() - n.offset.bytes())
1158 }),
1159 };
1160
1161 (
1162 cmp::Reverse(alignment_group_key(f)),
1164 niche_size_key,
1167 inner_niche_offset_key,
1170 )
1171 });
1172 }
1173
1174 StructKind::Prefixed(..) => {
1175 optimizing.sort_by_key(|&x| {
1180 let f = &fields[x];
1181 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1182 (alignment_group_key(f), niche_size)
1183 });
1184 }
1185 }
1186
1187 }
1190 }
1191 let mut unsized_field = None::<&F>;
1198 let mut offsets = IndexVec::from_elem(Size::ZERO, fields);
1199 let mut offset = Size::ZERO;
1200 let mut largest_niche = None;
1201 let mut largest_niche_available = 0;
1202 if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
1203 let prefix_align =
1204 if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align };
1205 align = align.max(AbiAndPrefAlign::new(prefix_align));
1206 offset = prefix_size.align_to(prefix_align);
1207 }
1208 for &i in &inverse_memory_index {
1209 let field = &fields[i];
1210 if let Some(unsized_field) = unsized_field {
1211 return Err(LayoutCalculatorError::UnexpectedUnsized(*unsized_field));
1212 }
1213
1214 if field.is_unsized() {
1215 if let StructKind::MaybeUnsized = kind {
1216 unsized_field = Some(field);
1217 } else {
1218 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
1219 }
1220 }
1221
1222 let field_align = if let Some(pack) = pack {
1224 field.align.min(AbiAndPrefAlign::new(pack))
1225 } else {
1226 field.align
1227 };
1228 offset = offset.align_to(field_align.abi);
1229 align = align.max(field_align);
1230 max_repr_align = max_repr_align.max(field.max_repr_align);
1231
1232 debug!("univariant offset: {:?} field: {:#?}", offset, field);
1233 offsets[i] = offset;
1234
1235 if let Some(mut niche) = field.largest_niche {
1236 let available = niche.available(dl);
1237 let prefer_new_niche = match niche_bias {
1239 NicheBias::Start => available > largest_niche_available,
1240 NicheBias::End => available >= largest_niche_available,
1242 };
1243 if prefer_new_niche {
1244 largest_niche_available = available;
1245 niche.offset += offset;
1246 largest_niche = Some(niche);
1247 }
1248 }
1249
1250 offset =
1251 offset.checked_add(field.size, dl).ok_or(LayoutCalculatorError::SizeOverflow)?;
1252 }
1253
1254 let unadjusted_abi_align = align.abi;
1257 if let Some(repr_align) = repr.align {
1258 align = align.max(AbiAndPrefAlign::new(repr_align));
1259 }
1260 let align = align;
1262
1263 debug!("univariant min_size: {:?}", offset);
1264 let min_size = offset;
1265 let memory_index = if optimize_field_order {
1272 inverse_memory_index.invert_bijective_mapping()
1273 } else {
1274 debug_assert!(inverse_memory_index.iter().copied().eq(fields.indices()));
1275 inverse_memory_index.into_iter().map(|it| it.index() as u32).collect()
1276 };
1277 let size = min_size.align_to(align.abi);
1278 if size.bytes() >= dl.obj_size_bound() {
1280 return Err(LayoutCalculatorError::SizeOverflow);
1281 }
1282 let mut layout_of_single_non_zst_field = None;
1283 let sized = unsized_field.is_none();
1284 let mut abi = BackendRepr::Memory { sized };
1285
1286 let optimize_abi = !repr.inhibit_newtype_abi_optimization();
1287
1288 if sized && size.bytes() > 0 {
1290 let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.is_zst());
1293
1294 match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
1295 (Some((i, field)), None, None) => {
1297 layout_of_single_non_zst_field = Some(field);
1298
1299 if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size
1301 {
1302 match field.backend_repr {
1303 BackendRepr::Scalar(_) | BackendRepr::Vector { .. } if optimize_abi => {
1306 abi = field.backend_repr;
1307 }
1308 BackendRepr::ScalarPair(..) => {
1311 abi = field.backend_repr;
1312 }
1313 _ => {}
1314 }
1315 }
1316 }
1317
1318 (Some((i, a)), Some((j, b)), None) => {
1320 match (a.backend_repr, b.backend_repr) {
1321 (BackendRepr::Scalar(a), BackendRepr::Scalar(b)) => {
1322 let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
1324 ((i, a), (j, b))
1325 } else {
1326 ((j, b), (i, a))
1327 };
1328 let pair = self.scalar_pair::<FieldIdx, VariantIdx>(a, b);
1329 let pair_offsets = match pair.fields {
1330 FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
1331 assert_eq!(memory_index.raw, [0, 1]);
1332 offsets
1333 }
1334 FieldsShape::Primitive
1335 | FieldsShape::Array { .. }
1336 | FieldsShape::Union(..) => {
1337 panic!("encountered a non-arbitrary layout during enum layout")
1338 }
1339 };
1340 if offsets[i] == pair_offsets[FieldIdx::new(0)]
1341 && offsets[j] == pair_offsets[FieldIdx::new(1)]
1342 && align == pair.align
1343 && size == pair.size
1344 {
1345 abi = pair.backend_repr;
1348 }
1349 }
1350 _ => {}
1351 }
1352 }
1353
1354 _ => {}
1355 }
1356 }
1357 if fields.iter().any(|f| f.is_uninhabited()) {
1358 abi = BackendRepr::Uninhabited;
1359 }
1360
1361 let unadjusted_abi_align = if repr.transparent() {
1362 match layout_of_single_non_zst_field {
1363 Some(l) => l.unadjusted_abi_align,
1364 None => {
1365 align.abi
1367 }
1368 }
1369 } else {
1370 unadjusted_abi_align
1371 };
1372
1373 let seed = field_seed.wrapping_add(repr.field_shuffle_seed);
1374
1375 Ok(LayoutData {
1376 variants: Variants::Single { index: VariantIdx::new(0) },
1377 fields: FieldsShape::Arbitrary { offsets, memory_index },
1378 backend_repr: abi,
1379 largest_niche,
1380 align,
1381 size,
1382 max_repr_align,
1383 unadjusted_abi_align,
1384 randomization_seed: seed,
1385 })
1386 }
1387
1388 fn format_field_niches<
1389 'a,
1390 FieldIdx: Idx,
1391 VariantIdx: Idx,
1392 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
1393 >(
1394 &self,
1395 layout: &LayoutData<FieldIdx, VariantIdx>,
1396 fields: &IndexSlice<FieldIdx, F>,
1397 ) -> String {
1398 let dl = self.cx.data_layout();
1399 let mut s = String::new();
1400 for i in layout.fields.index_by_increasing_offset() {
1401 let offset = layout.fields.offset(i);
1402 let f = &fields[FieldIdx::new(i)];
1403 write!(s, "[o{}a{}s{}", offset.bytes(), f.align.abi.bytes(), f.size.bytes()).unwrap();
1404 if let Some(n) = f.largest_niche {
1405 write!(
1406 s,
1407 " n{}b{}s{}",
1408 n.offset.bytes(),
1409 n.available(dl).ilog2(),
1410 n.value.size(dl).bytes()
1411 )
1412 .unwrap();
1413 }
1414 write!(s, "] ").unwrap();
1415 }
1416 s
1417 }
1418}