1use std::fmt::{self, Write};
2use std::ops::{Bound, Deref};
3use std::{cmp, iter};
4
5use rustc_hashes::Hash64;
6use rustc_index::Idx;
7use tracing::debug;
8
9use crate::{
10 AbiAndPrefAlign, Align, BackendRepr, FieldsShape, HasDataLayout, IndexSlice, IndexVec, Integer,
11 LayoutData, Niche, NonZeroUsize, Primitive, ReprOptions, Scalar, Size, StructKind, TagEncoding,
12 Variants, WrappingRange,
13};
14
15#[cfg(feature = "nightly")]
16mod ty;
17
18#[cfg(feature = "nightly")]
19pub use ty::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
20
21fn absent<'a, FieldIdx, VariantIdx, F>(fields: &IndexSlice<FieldIdx, F>) -> bool
27where
28 FieldIdx: Idx,
29 VariantIdx: Idx,
30 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
31{
32 let uninhabited = fields.iter().any(|f| f.is_uninhabited());
33 let is_1zst = fields.iter().all(|f| f.is_1zst());
36 uninhabited && is_1zst
37}
38
39enum NicheBias {
41 Start,
42 End,
43}
44
45#[derive(Copy, Clone, Debug, PartialEq, Eq)]
46pub enum LayoutCalculatorError<F> {
47 UnexpectedUnsized(F),
54
55 SizeOverflow,
57
58 EmptyUnion,
60
61 ReprConflict,
63}
64
65impl<F> LayoutCalculatorError<F> {
66 pub fn without_payload(&self) -> LayoutCalculatorError<()> {
67 match self {
68 LayoutCalculatorError::UnexpectedUnsized(_) => {
69 LayoutCalculatorError::UnexpectedUnsized(())
70 }
71 LayoutCalculatorError::SizeOverflow => LayoutCalculatorError::SizeOverflow,
72 LayoutCalculatorError::EmptyUnion => LayoutCalculatorError::EmptyUnion,
73 LayoutCalculatorError::ReprConflict => LayoutCalculatorError::ReprConflict,
74 }
75 }
76
77 pub fn fallback_fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
81 f.write_str(match self {
82 LayoutCalculatorError::UnexpectedUnsized(_) => {
83 "an unsized type was found where a sized type was expected"
84 }
85 LayoutCalculatorError::SizeOverflow => "size overflow",
86 LayoutCalculatorError::EmptyUnion => "type is a union with no fields",
87 LayoutCalculatorError::ReprConflict => "type has an invalid repr",
88 })
89 }
90}
91
92type LayoutCalculatorResult<FieldIdx, VariantIdx, F> =
93 Result<LayoutData<FieldIdx, VariantIdx>, LayoutCalculatorError<F>>;
94
95#[derive(Clone, Copy, Debug)]
96pub struct LayoutCalculator<Cx> {
97 pub cx: Cx,
98}
99
100impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
101 pub fn new(cx: Cx) -> Self {
102 Self { cx }
103 }
104
105 pub fn scalar_pair<FieldIdx: Idx, VariantIdx: Idx>(
106 &self,
107 a: Scalar,
108 b: Scalar,
109 ) -> LayoutData<FieldIdx, VariantIdx> {
110 let dl = self.cx.data_layout();
111 let b_align = b.align(dl);
112 let align = a.align(dl).max(b_align).max(dl.aggregate_align);
113 let b_offset = a.size(dl).align_to(b_align.abi);
114 let size = (b_offset + b.size(dl)).align_to(align.abi);
115
116 let largest_niche = Niche::from_scalar(dl, b_offset, b)
119 .into_iter()
120 .chain(Niche::from_scalar(dl, Size::ZERO, a))
121 .max_by_key(|niche| niche.available(dl));
122
123 let combined_seed = a.size(&self.cx).bytes().wrapping_add(b.size(&self.cx).bytes());
124
125 LayoutData {
126 variants: Variants::Single { index: VariantIdx::new(0) },
127 fields: FieldsShape::Arbitrary {
128 offsets: [Size::ZERO, b_offset].into(),
129 memory_index: [0, 1].into(),
130 },
131 backend_repr: BackendRepr::ScalarPair(a, b),
132 largest_niche,
133 align,
134 size,
135 max_repr_align: None,
136 unadjusted_abi_align: align.abi,
137 randomization_seed: Hash64::new(combined_seed),
138 }
139 }
140
141 pub fn univariant<
142 'a,
143 FieldIdx: Idx,
144 VariantIdx: Idx,
145 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
146 >(
147 &self,
148 fields: &IndexSlice<FieldIdx, F>,
149 repr: &ReprOptions,
150 kind: StructKind,
151 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
152 let dl = self.cx.data_layout();
153 let layout = self.univariant_biased(fields, repr, kind, NicheBias::Start);
154 if let Ok(layout) = &layout {
160 if !matches!(kind, StructKind::MaybeUnsized) {
164 if let Some(niche) = layout.largest_niche {
165 let head_space = niche.offset.bytes();
166 let niche_len = niche.value.size(dl).bytes();
167 let tail_space = layout.size.bytes() - head_space - niche_len;
168
169 if fields.len() > 1 && head_space != 0 && tail_space > 0 {
173 let alt_layout = self
174 .univariant_biased(fields, repr, kind, NicheBias::End)
175 .expect("alt layout should always work");
176 let alt_niche = alt_layout
177 .largest_niche
178 .expect("alt layout should have a niche like the regular one");
179 let alt_head_space = alt_niche.offset.bytes();
180 let alt_niche_len = alt_niche.value.size(dl).bytes();
181 let alt_tail_space =
182 alt_layout.size.bytes() - alt_head_space - alt_niche_len;
183
184 debug_assert_eq!(layout.size.bytes(), alt_layout.size.bytes());
185
186 let prefer_alt_layout =
187 alt_head_space > head_space && alt_head_space > tail_space;
188
189 debug!(
190 "sz: {}, default_niche_at: {}+{}, default_tail_space: {}, alt_niche_at/head_space: {}+{}, alt_tail: {}, num_fields: {}, better: {}\n\
191 layout: {}\n\
192 alt_layout: {}\n",
193 layout.size.bytes(),
194 head_space,
195 niche_len,
196 tail_space,
197 alt_head_space,
198 alt_niche_len,
199 alt_tail_space,
200 layout.fields.count(),
201 prefer_alt_layout,
202 self.format_field_niches(layout, fields),
203 self.format_field_niches(&alt_layout, fields),
204 );
205
206 if prefer_alt_layout {
207 return Ok(alt_layout);
208 }
209 }
210 }
211 }
212 }
213 layout
214 }
215
216 pub fn layout_of_never_type<FieldIdx: Idx, VariantIdx: Idx>(
217 &self,
218 ) -> LayoutData<FieldIdx, VariantIdx> {
219 let dl = self.cx.data_layout();
220 LayoutData {
222 variants: Variants::Empty,
223 fields: FieldsShape::Primitive,
224 backend_repr: BackendRepr::Uninhabited,
225 largest_niche: None,
226 align: dl.i8_align,
227 size: Size::ZERO,
228 max_repr_align: None,
229 unadjusted_abi_align: dl.i8_align.abi,
230 randomization_seed: Hash64::ZERO,
231 }
232 }
233
234 pub fn layout_of_struct_or_enum<
235 'a,
236 FieldIdx: Idx,
237 VariantIdx: Idx,
238 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
239 >(
240 &self,
241 repr: &ReprOptions,
242 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
243 is_enum: bool,
244 is_unsafe_cell: bool,
245 scalar_valid_range: (Bound<u128>, Bound<u128>),
246 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
247 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
248 dont_niche_optimize_enum: bool,
249 always_sized: bool,
250 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
251 let (present_first, present_second) = {
252 let mut present_variants = variants
253 .iter_enumerated()
254 .filter_map(|(i, v)| if !repr.c() && absent(v) { None } else { Some(i) });
255 (present_variants.next(), present_variants.next())
256 };
257 let present_first = match present_first {
258 Some(present_first) => present_first,
259 None if is_enum => {
261 return Ok(self.layout_of_never_type());
262 }
263 None => VariantIdx::new(0),
266 };
267
268 if !is_enum ||
270 (present_second.is_none() && !repr.inhibit_enum_layout_opt())
272 {
273 self.layout_of_struct(
274 repr,
275 variants,
276 is_enum,
277 is_unsafe_cell,
278 scalar_valid_range,
279 always_sized,
280 present_first,
281 )
282 } else {
283 assert!(is_enum);
287 self.layout_of_enum(
288 repr,
289 variants,
290 discr_range_of_repr,
291 discriminants,
292 dont_niche_optimize_enum,
293 )
294 }
295 }
296
297 pub fn layout_of_union<
298 'a,
299 FieldIdx: Idx,
300 VariantIdx: Idx,
301 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
302 >(
303 &self,
304 repr: &ReprOptions,
305 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
306 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
307 let dl = self.cx.data_layout();
308 let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
309 let mut max_repr_align = repr.align;
310
311 struct AbiMismatch;
314 let mut common_non_zst_abi_and_align = if repr.inhibits_union_abi_opt() {
315 Err(AbiMismatch)
317 } else {
318 Ok(None)
319 };
320
321 let mut size = Size::ZERO;
322 let only_variant_idx = VariantIdx::new(0);
323 let only_variant = &variants[only_variant_idx];
324 for field in only_variant {
325 if field.is_unsized() {
326 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
327 }
328
329 align = align.max(field.align);
330 max_repr_align = max_repr_align.max(field.max_repr_align);
331 size = cmp::max(size, field.size);
332
333 if field.is_zst() {
334 continue;
336 }
337
338 if let Ok(common) = common_non_zst_abi_and_align {
339 let field_abi = field.backend_repr.to_union();
341
342 if let Some((common_abi, common_align)) = common {
343 if common_abi != field_abi {
344 common_non_zst_abi_and_align = Err(AbiMismatch);
346 } else {
347 if !matches!(common_abi, BackendRepr::Memory { .. }) {
350 assert_eq!(
351 common_align, field.align.abi,
352 "non-Aggregate field with matching ABI but differing alignment"
353 );
354 }
355 }
356 } else {
357 common_non_zst_abi_and_align = Ok(Some((field_abi, field.align.abi)));
359 }
360 }
361 }
362
363 if let Some(pack) = repr.pack {
364 align = align.min(AbiAndPrefAlign::new(pack));
365 }
366 let unadjusted_abi_align = align.abi;
369 if let Some(repr_align) = repr.align {
370 align = align.max(AbiAndPrefAlign::new(repr_align));
371 }
372 let align = align;
374
375 let abi = match common_non_zst_abi_and_align {
378 Err(AbiMismatch) | Ok(None) => BackendRepr::Memory { sized: true },
379 Ok(Some((abi, _))) => {
380 if abi.inherent_align(dl).map(|a| a.abi) != Some(align.abi) {
381 BackendRepr::Memory { sized: true }
383 } else {
384 abi
385 }
386 }
387 };
388
389 let Some(union_field_count) = NonZeroUsize::new(only_variant.len()) else {
390 return Err(LayoutCalculatorError::EmptyUnion);
391 };
392
393 let combined_seed = only_variant
394 .iter()
395 .map(|v| v.randomization_seed)
396 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
397
398 Ok(LayoutData {
399 variants: Variants::Single { index: only_variant_idx },
400 fields: FieldsShape::Union(union_field_count),
401 backend_repr: abi,
402 largest_niche: None,
403 align,
404 size: size.align_to(align.abi),
405 max_repr_align,
406 unadjusted_abi_align,
407 randomization_seed: combined_seed,
408 })
409 }
410
411 fn layout_of_struct<
413 'a,
414 FieldIdx: Idx,
415 VariantIdx: Idx,
416 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
417 >(
418 &self,
419 repr: &ReprOptions,
420 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
421 is_enum: bool,
422 is_unsafe_cell: bool,
423 scalar_valid_range: (Bound<u128>, Bound<u128>),
424 always_sized: bool,
425 present_first: VariantIdx,
426 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
427 let dl = self.cx.data_layout();
431 let v = present_first;
432 let kind = if is_enum || variants[v].is_empty() || always_sized {
433 StructKind::AlwaysSized
434 } else {
435 StructKind::MaybeUnsized
436 };
437
438 let mut st = self.univariant(&variants[v], repr, kind)?;
439 st.variants = Variants::Single { index: v };
440
441 if is_unsafe_cell {
442 let hide_niches = |scalar: &mut _| match scalar {
443 Scalar::Initialized { value, valid_range } => {
444 *valid_range = WrappingRange::full(value.size(dl))
445 }
446 Scalar::Union { .. } => {}
448 };
449 match &mut st.backend_repr {
450 BackendRepr::Uninhabited => {}
451 BackendRepr::Scalar(scalar) => hide_niches(scalar),
452 BackendRepr::ScalarPair(a, b) => {
453 hide_niches(a);
454 hide_niches(b);
455 }
456 BackendRepr::Vector { element, count: _ } => hide_niches(element),
457 BackendRepr::Memory { sized: _ } => {}
458 }
459 st.largest_niche = None;
460 return Ok(st);
461 }
462
463 let (start, end) = scalar_valid_range;
464 match st.backend_repr {
465 BackendRepr::Scalar(ref mut scalar) | BackendRepr::ScalarPair(ref mut scalar, _) => {
466 let max_value = scalar.size(dl).unsigned_int_max();
475 if let Bound::Included(start) = start {
476 assert!(start <= max_value, "{start} > {max_value}");
479 scalar.valid_range_mut().start = start;
480 }
481 if let Bound::Included(end) = end {
482 assert!(end <= max_value, "{end} > {max_value}");
485 scalar.valid_range_mut().end = end;
486 }
487
488 let niche = Niche::from_scalar(dl, Size::ZERO, *scalar);
490 if let Some(niche) = niche {
491 match st.largest_niche {
492 Some(largest_niche) => {
493 if largest_niche.available(dl) <= niche.available(dl) {
496 st.largest_niche = Some(niche);
497 }
498 }
499 None => st.largest_niche = Some(niche),
500 }
501 }
502 }
503 _ => assert!(
504 start == Bound::Unbounded && end == Bound::Unbounded,
505 "nonscalar layout for layout_scalar_valid_range type: {st:#?}",
506 ),
507 }
508
509 Ok(st)
510 }
511
512 fn layout_of_enum<
513 'a,
514 FieldIdx: Idx,
515 VariantIdx: Idx,
516 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
517 >(
518 &self,
519 repr: &ReprOptions,
520 variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
521 discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
522 discriminants: impl Iterator<Item = (VariantIdx, i128)>,
523 dont_niche_optimize_enum: bool,
524 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
525 struct TmpLayout<FieldIdx: Idx, VariantIdx: Idx> {
531 layout: LayoutData<FieldIdx, VariantIdx>,
532 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
533 }
534
535 let dl = self.cx.data_layout();
536 if repr.packed() {
538 return Err(LayoutCalculatorError::ReprConflict);
539 }
540
541 let calculate_niche_filling_layout = || -> Option<TmpLayout<FieldIdx, VariantIdx>> {
542 if dont_niche_optimize_enum {
543 return None;
544 }
545
546 if variants.len() < 2 {
547 return None;
548 }
549
550 let mut align = dl.aggregate_align;
551 let mut max_repr_align = repr.align;
552 let mut unadjusted_abi_align = align.abi;
553
554 let mut variant_layouts = variants
555 .iter_enumerated()
556 .map(|(j, v)| {
557 let mut st = self.univariant(v, repr, StructKind::AlwaysSized).ok()?;
558 st.variants = Variants::Single { index: j };
559
560 align = align.max(st.align);
561 max_repr_align = max_repr_align.max(st.max_repr_align);
562 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
563
564 Some(st)
565 })
566 .collect::<Option<IndexVec<VariantIdx, _>>>()?;
567
568 let largest_variant_index = variant_layouts
569 .iter_enumerated()
570 .max_by_key(|(_i, layout)| layout.size.bytes())
571 .map(|(i, _layout)| i)?;
572
573 let all_indices = variants.indices();
574 let needs_disc =
575 |index: VariantIdx| index != largest_variant_index && !absent(&variants[index]);
576 let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap()
577 ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap();
578
579 let count =
580 (niche_variants.end().index() as u128 - niche_variants.start().index() as u128) + 1;
581
582 let niche = variant_layouts[largest_variant_index].largest_niche?;
584 let (niche_start, niche_scalar) = niche.reserve(dl, count)?;
585 let niche_offset = niche.offset;
586 let niche_size = niche.value.size(dl);
587 let size = variant_layouts[largest_variant_index].size.align_to(align.abi);
588
589 let all_variants_fit = variant_layouts.iter_enumerated_mut().all(|(i, layout)| {
590 if i == largest_variant_index {
591 return true;
592 }
593
594 layout.largest_niche = None;
595
596 if layout.size <= niche_offset {
597 return true;
599 }
600
601 let this_align = layout.align.abi;
603 let this_offset = (niche_offset + niche_size).align_to(this_align);
604
605 if this_offset + layout.size > size {
606 return false;
607 }
608
609 match layout.fields {
611 FieldsShape::Arbitrary { ref mut offsets, .. } => {
612 for offset in offsets.iter_mut() {
613 *offset += this_offset;
614 }
615 }
616 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
617 panic!("Layout of fields should be Arbitrary for variants")
618 }
619 }
620
621 if !layout.is_uninhabited() {
623 layout.backend_repr = BackendRepr::Memory { sized: true };
624 }
625 layout.size += this_offset;
626
627 true
628 });
629
630 if !all_variants_fit {
631 return None;
632 }
633
634 let largest_niche = Niche::from_scalar(dl, niche_offset, niche_scalar);
635
636 let others_zst = variant_layouts
637 .iter_enumerated()
638 .all(|(i, layout)| i == largest_variant_index || layout.size == Size::ZERO);
639 let same_size = size == variant_layouts[largest_variant_index].size;
640 let same_align = align == variant_layouts[largest_variant_index].align;
641
642 let abi = if variant_layouts.iter().all(|v| v.is_uninhabited()) {
643 BackendRepr::Uninhabited
644 } else if same_size && same_align && others_zst {
645 match variant_layouts[largest_variant_index].backend_repr {
646 BackendRepr::Scalar(_) => BackendRepr::Scalar(niche_scalar),
649 BackendRepr::ScalarPair(first, second) => {
650 if niche_offset == Size::ZERO {
653 BackendRepr::ScalarPair(niche_scalar, second.to_union())
654 } else {
655 BackendRepr::ScalarPair(first.to_union(), niche_scalar)
656 }
657 }
658 _ => BackendRepr::Memory { sized: true },
659 }
660 } else {
661 BackendRepr::Memory { sized: true }
662 };
663
664 let combined_seed = variant_layouts
665 .iter()
666 .map(|v| v.randomization_seed)
667 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
668
669 let layout = LayoutData {
670 variants: Variants::Multiple {
671 tag: niche_scalar,
672 tag_encoding: TagEncoding::Niche {
673 untagged_variant: largest_variant_index,
674 niche_variants,
675 niche_start,
676 },
677 tag_field: 0,
678 variants: IndexVec::new(),
679 },
680 fields: FieldsShape::Arbitrary {
681 offsets: [niche_offset].into(),
682 memory_index: [0].into(),
683 },
684 backend_repr: abi,
685 largest_niche,
686 size,
687 align,
688 max_repr_align,
689 unadjusted_abi_align,
690 randomization_seed: combined_seed,
691 };
692
693 Some(TmpLayout { layout, variants: variant_layouts })
694 };
695
696 let niche_filling_layout = calculate_niche_filling_layout();
697
698 let (mut min, mut max) = (i128::MAX, i128::MIN);
699 let discr_type = repr.discr_type();
700 let bits = Integer::from_attr(dl, discr_type).size().bits();
701 for (i, mut val) in discriminants {
702 if !repr.c() && variants[i].iter().any(|f| f.is_uninhabited()) {
703 continue;
704 }
705 if discr_type.is_signed() {
706 val = (val << (128 - bits)) >> (128 - bits);
708 }
709 if val < min {
710 min = val;
711 }
712 if val > max {
713 max = val;
714 }
715 }
716 if (min, max) == (i128::MAX, i128::MIN) {
718 min = 0;
719 max = 0;
720 }
721 assert!(min <= max, "discriminant range is {min}...{max}");
722 let (min_ity, signed) = discr_range_of_repr(min, max); let mut align = dl.aggregate_align;
725 let mut max_repr_align = repr.align;
726 let mut unadjusted_abi_align = align.abi;
727
728 let mut size = Size::ZERO;
729
730 let mut start_align = Align::from_bytes(256).unwrap();
732 assert_eq!(Integer::for_align(dl, start_align), None);
733
734 let mut prefix_align = min_ity.align(dl).abi;
740 if repr.c() {
741 for fields in variants {
742 for field in fields {
743 prefix_align = prefix_align.max(field.align.abi);
744 }
745 }
746 }
747
748 let mut layout_variants = variants
750 .iter_enumerated()
751 .map(|(i, field_layouts)| {
752 let mut st = self.univariant(
753 field_layouts,
754 repr,
755 StructKind::Prefixed(min_ity.size(), prefix_align),
756 )?;
757 st.variants = Variants::Single { index: i };
758 for field_idx in st.fields.index_by_increasing_offset() {
761 let field = &field_layouts[FieldIdx::new(field_idx)];
762 if !field.is_1zst() {
763 start_align = start_align.min(field.align.abi);
764 break;
765 }
766 }
767 size = cmp::max(size, st.size);
768 align = align.max(st.align);
769 max_repr_align = max_repr_align.max(st.max_repr_align);
770 unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align);
771 Ok(st)
772 })
773 .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
774
775 size = size.align_to(align.abi);
777
778 if size.bytes() >= dl.obj_size_bound() {
780 return Err(LayoutCalculatorError::SizeOverflow);
781 }
782
783 let typeck_ity = Integer::from_attr(dl, repr.discr_type());
784 if typeck_ity < min_ity {
785 panic!(
795 "layout decided on a larger discriminant type ({min_ity:?}) than typeck ({typeck_ity:?})"
796 );
797 }
800
801 let mut ity = if repr.c() || repr.int.is_some() {
812 min_ity
813 } else {
814 Integer::for_align(dl, start_align).unwrap_or(min_ity)
815 };
816
817 if ity <= min_ity {
820 ity = min_ity;
821 } else {
822 let old_ity_size = min_ity.size();
824 let new_ity_size = ity.size();
825 for variant in &mut layout_variants {
826 match variant.fields {
827 FieldsShape::Arbitrary { ref mut offsets, .. } => {
828 for i in offsets {
829 if *i <= old_ity_size {
830 assert_eq!(*i, old_ity_size);
831 *i = new_ity_size;
832 }
833 }
834 if variant.size <= old_ity_size {
836 variant.size = new_ity_size;
837 }
838 }
839 FieldsShape::Primitive | FieldsShape::Array { .. } | FieldsShape::Union(..) => {
840 panic!("encountered a non-arbitrary layout during enum layout")
841 }
842 }
843 }
844 }
845
846 let tag_mask = ity.size().unsigned_int_max();
847 let tag = Scalar::Initialized {
848 value: Primitive::Int(ity, signed),
849 valid_range: WrappingRange {
850 start: (min as u128 & tag_mask),
851 end: (max as u128 & tag_mask),
852 },
853 };
854 let mut abi = BackendRepr::Memory { sized: true };
855
856 if layout_variants.iter().all(|v| v.is_uninhabited()) {
857 abi = BackendRepr::Uninhabited;
858 } else if tag.size(dl) == size {
859 abi = BackendRepr::Scalar(tag);
862 } else {
863 let mut common_prim = None;
866 let mut common_prim_initialized_in_all_variants = true;
867 for (field_layouts, layout_variant) in iter::zip(variants, &layout_variants) {
868 let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
869 panic!("encountered a non-arbitrary layout during enum layout");
870 };
871 let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
874 let (field, offset) = match (fields.next(), fields.next()) {
875 (None, None) => {
876 common_prim_initialized_in_all_variants = false;
877 continue;
878 }
879 (Some(pair), None) => pair,
880 _ => {
881 common_prim = None;
882 break;
883 }
884 };
885 let prim = match field.backend_repr {
886 BackendRepr::Scalar(scalar) => {
887 common_prim_initialized_in_all_variants &=
888 matches!(scalar, Scalar::Initialized { .. });
889 scalar.primitive()
890 }
891 _ => {
892 common_prim = None;
893 break;
894 }
895 };
896 if let Some((old_prim, common_offset)) = common_prim {
897 if offset != common_offset {
899 common_prim = None;
900 break;
901 }
902 let new_prim = match (old_prim, prim) {
906 (x, y) if x == y => x,
908 (p @ Primitive::Int(x, _), Primitive::Int(y, _)) if x == y => p,
911 (p @ Primitive::Pointer(_), i @ Primitive::Int(..))
915 | (i @ Primitive::Int(..), p @ Primitive::Pointer(_))
916 if p.size(dl) == i.size(dl) && p.align(dl) == i.align(dl) =>
917 {
918 p
919 }
920 _ => {
921 common_prim = None;
922 break;
923 }
924 };
925 common_prim = Some((new_prim, common_offset));
927 } else {
928 common_prim = Some((prim, offset));
929 }
930 }
931 if let Some((prim, offset)) = common_prim {
932 let prim_scalar = if common_prim_initialized_in_all_variants {
933 let size = prim.size(dl);
934 assert!(size.bits() <= 128);
935 Scalar::Initialized { value: prim, valid_range: WrappingRange::full(size) }
936 } else {
937 Scalar::Union { value: prim }
939 };
940 let pair = self.scalar_pair::<FieldIdx, VariantIdx>(tag, prim_scalar);
941 let pair_offsets = match pair.fields {
942 FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
943 assert_eq!(memory_index.raw, [0, 1]);
944 offsets
945 }
946 _ => panic!("encountered a non-arbitrary layout during enum layout"),
947 };
948 if pair_offsets[FieldIdx::new(0)] == Size::ZERO
949 && pair_offsets[FieldIdx::new(1)] == *offset
950 && align == pair.align
951 && size == pair.size
952 {
953 abi = pair.backend_repr;
956 }
957 }
958 }
959
960 if matches!(abi, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
964 for variant in &mut layout_variants {
965 if variant.fields.count() > 0
968 && matches!(variant.backend_repr, BackendRepr::Memory { .. })
969 {
970 variant.backend_repr = abi;
971 variant.size = cmp::max(variant.size, size);
974 variant.align.abi = cmp::max(variant.align.abi, align.abi);
975 }
976 }
977 }
978
979 let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag);
980
981 let combined_seed = layout_variants
982 .iter()
983 .map(|v| v.randomization_seed)
984 .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed));
985
986 let tagged_layout = LayoutData {
987 variants: Variants::Multiple {
988 tag,
989 tag_encoding: TagEncoding::Direct,
990 tag_field: 0,
991 variants: IndexVec::new(),
992 },
993 fields: FieldsShape::Arbitrary {
994 offsets: [Size::ZERO].into(),
995 memory_index: [0].into(),
996 },
997 largest_niche,
998 backend_repr: abi,
999 align,
1000 size,
1001 max_repr_align,
1002 unadjusted_abi_align,
1003 randomization_seed: combined_seed,
1004 };
1005
1006 let tagged_layout = TmpLayout { layout: tagged_layout, variants: layout_variants };
1007
1008 let mut best_layout = match (tagged_layout, niche_filling_layout) {
1009 (tl, Some(nl)) => {
1010 use cmp::Ordering::*;
1014 let niche_size = |tmp_l: &TmpLayout<FieldIdx, VariantIdx>| {
1015 tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl))
1016 };
1017 match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) {
1018 (Greater, _) => nl,
1019 (Equal, Less) => nl,
1020 _ => tl,
1021 }
1022 }
1023 (tl, None) => tl,
1024 };
1025
1026 best_layout.layout.variants = match best_layout.layout.variants {
1028 Variants::Multiple { tag, tag_encoding, tag_field, .. } => {
1029 Variants::Multiple { tag, tag_encoding, tag_field, variants: best_layout.variants }
1030 }
1031 Variants::Single { .. } | Variants::Empty => {
1032 panic!("encountered a single-variant or empty enum during multi-variant layout")
1033 }
1034 };
1035 Ok(best_layout.layout)
1036 }
1037
1038 fn univariant_biased<
1039 'a,
1040 FieldIdx: Idx,
1041 VariantIdx: Idx,
1042 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug + Copy,
1043 >(
1044 &self,
1045 fields: &IndexSlice<FieldIdx, F>,
1046 repr: &ReprOptions,
1047 kind: StructKind,
1048 niche_bias: NicheBias,
1049 ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
1050 let dl = self.cx.data_layout();
1051 let pack = repr.pack;
1052 let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
1053 let mut max_repr_align = repr.align;
1054 let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect();
1055 let optimize_field_order = !repr.inhibit_struct_field_reordering();
1056 let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
1057 let optimizing = &mut inverse_memory_index.raw[..end];
1058 let fields_excluding_tail = &fields.raw[..end];
1059 let field_seed = fields_excluding_tail
1061 .iter()
1062 .fold(Hash64::ZERO, |acc, f| acc.wrapping_add(f.randomization_seed));
1063
1064 if optimize_field_order && fields.len() > 1 {
1065 if repr.can_randomize_type_layout() && cfg!(feature = "randomize") {
1069 #[cfg(feature = "randomize")]
1070 {
1071 use rand::SeedableRng;
1072 use rand::seq::SliceRandom;
1073 let mut rng = rand_xoshiro::Xoshiro128StarStar::seed_from_u64(
1076 field_seed.wrapping_add(repr.field_shuffle_seed).as_u64(),
1077 );
1078
1079 optimizing.shuffle(&mut rng);
1081 }
1082 } else {
1084 let max_field_align =
1087 fields_excluding_tail.iter().map(|f| f.align.abi.bytes()).max().unwrap_or(1);
1088 let largest_niche_size = fields_excluding_tail
1089 .iter()
1090 .filter_map(|f| f.largest_niche)
1091 .map(|n| n.available(dl))
1092 .max()
1093 .unwrap_or(0);
1094
1095 let alignment_group_key = |layout: &F| {
1098 if let Some(pack) = pack {
1102 layout.align.abi.min(pack).bytes()
1104 } else {
1105 let align = layout.align.abi.bytes();
1108 let size = layout.size.bytes();
1109 let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
1110 let size_as_align = align.max(size).trailing_zeros();
1112 let size_as_align = if largest_niche_size > 0 {
1113 match niche_bias {
1114 NicheBias::Start => {
1118 max_field_align.trailing_zeros().min(size_as_align)
1119 }
1120 NicheBias::End if niche_size == largest_niche_size => {
1124 align.trailing_zeros()
1125 }
1126 NicheBias::End => size_as_align,
1127 }
1128 } else {
1129 size_as_align
1130 };
1131 size_as_align as u64
1132 }
1133 };
1134
1135 match kind {
1136 StructKind::AlwaysSized | StructKind::MaybeUnsized => {
1137 optimizing.sort_by_key(|&x| {
1146 let f = &fields[x];
1147 let field_size = f.size.bytes();
1148 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1149 let niche_size_key = match niche_bias {
1150 NicheBias::Start => !niche_size,
1152 NicheBias::End => niche_size,
1154 };
1155 let inner_niche_offset_key = match niche_bias {
1156 NicheBias::Start => f.largest_niche.map_or(0, |n| n.offset.bytes()),
1157 NicheBias::End => f.largest_niche.map_or(0, |n| {
1158 !(field_size - n.value.size(dl).bytes() - n.offset.bytes())
1159 }),
1160 };
1161
1162 (
1163 cmp::Reverse(alignment_group_key(f)),
1165 niche_size_key,
1168 inner_niche_offset_key,
1171 )
1172 });
1173 }
1174
1175 StructKind::Prefixed(..) => {
1176 optimizing.sort_by_key(|&x| {
1181 let f = &fields[x];
1182 let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
1183 (alignment_group_key(f), niche_size)
1184 });
1185 }
1186 }
1187
1188 }
1191 }
1192 let mut unsized_field = None::<&F>;
1199 let mut offsets = IndexVec::from_elem(Size::ZERO, fields);
1200 let mut offset = Size::ZERO;
1201 let mut largest_niche = None;
1202 let mut largest_niche_available = 0;
1203 if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
1204 let prefix_align =
1205 if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align };
1206 align = align.max(AbiAndPrefAlign::new(prefix_align));
1207 offset = prefix_size.align_to(prefix_align);
1208 }
1209 for &i in &inverse_memory_index {
1210 let field = &fields[i];
1211 if let Some(unsized_field) = unsized_field {
1212 return Err(LayoutCalculatorError::UnexpectedUnsized(*unsized_field));
1213 }
1214
1215 if field.is_unsized() {
1216 if let StructKind::MaybeUnsized = kind {
1217 unsized_field = Some(field);
1218 } else {
1219 return Err(LayoutCalculatorError::UnexpectedUnsized(*field));
1220 }
1221 }
1222
1223 let field_align = if let Some(pack) = pack {
1225 field.align.min(AbiAndPrefAlign::new(pack))
1226 } else {
1227 field.align
1228 };
1229 offset = offset.align_to(field_align.abi);
1230 align = align.max(field_align);
1231 max_repr_align = max_repr_align.max(field.max_repr_align);
1232
1233 debug!("univariant offset: {:?} field: {:#?}", offset, field);
1234 offsets[i] = offset;
1235
1236 if let Some(mut niche) = field.largest_niche {
1237 let available = niche.available(dl);
1238 let prefer_new_niche = match niche_bias {
1240 NicheBias::Start => available > largest_niche_available,
1241 NicheBias::End => available >= largest_niche_available,
1243 };
1244 if prefer_new_niche {
1245 largest_niche_available = available;
1246 niche.offset += offset;
1247 largest_niche = Some(niche);
1248 }
1249 }
1250
1251 offset =
1252 offset.checked_add(field.size, dl).ok_or(LayoutCalculatorError::SizeOverflow)?;
1253 }
1254
1255 let unadjusted_abi_align = align.abi;
1258 if let Some(repr_align) = repr.align {
1259 align = align.max(AbiAndPrefAlign::new(repr_align));
1260 }
1261 let align = align;
1263
1264 debug!("univariant min_size: {:?}", offset);
1265 let min_size = offset;
1266 let memory_index = if optimize_field_order {
1273 inverse_memory_index.invert_bijective_mapping()
1274 } else {
1275 debug_assert!(inverse_memory_index.iter().copied().eq(fields.indices()));
1276 inverse_memory_index.into_iter().map(|it| it.index() as u32).collect()
1277 };
1278 let size = min_size.align_to(align.abi);
1279 if size.bytes() >= dl.obj_size_bound() {
1281 return Err(LayoutCalculatorError::SizeOverflow);
1282 }
1283 let mut layout_of_single_non_zst_field = None;
1284 let sized = unsized_field.is_none();
1285 let mut abi = BackendRepr::Memory { sized };
1286
1287 let optimize_abi = !repr.inhibit_newtype_abi_optimization();
1288
1289 if sized && size.bytes() > 0 {
1291 let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.is_zst());
1294
1295 match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
1296 (Some((i, field)), None, None) => {
1298 layout_of_single_non_zst_field = Some(field);
1299
1300 if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size
1302 {
1303 match field.backend_repr {
1304 BackendRepr::Scalar(_) | BackendRepr::Vector { .. } if optimize_abi => {
1307 abi = field.backend_repr;
1308 }
1309 BackendRepr::ScalarPair(..) => {
1312 abi = field.backend_repr;
1313 }
1314 _ => {}
1315 }
1316 }
1317 }
1318
1319 (Some((i, a)), Some((j, b)), None) => {
1321 match (a.backend_repr, b.backend_repr) {
1322 (BackendRepr::Scalar(a), BackendRepr::Scalar(b)) => {
1323 let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
1325 ((i, a), (j, b))
1326 } else {
1327 ((j, b), (i, a))
1328 };
1329 let pair = self.scalar_pair::<FieldIdx, VariantIdx>(a, b);
1330 let pair_offsets = match pair.fields {
1331 FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
1332 assert_eq!(memory_index.raw, [0, 1]);
1333 offsets
1334 }
1335 FieldsShape::Primitive
1336 | FieldsShape::Array { .. }
1337 | FieldsShape::Union(..) => {
1338 panic!("encountered a non-arbitrary layout during enum layout")
1339 }
1340 };
1341 if offsets[i] == pair_offsets[FieldIdx::new(0)]
1342 && offsets[j] == pair_offsets[FieldIdx::new(1)]
1343 && align == pair.align
1344 && size == pair.size
1345 {
1346 abi = pair.backend_repr;
1349 }
1350 }
1351 _ => {}
1352 }
1353 }
1354
1355 _ => {}
1356 }
1357 }
1358 if fields.iter().any(|f| f.is_uninhabited()) {
1359 abi = BackendRepr::Uninhabited;
1360 }
1361
1362 let unadjusted_abi_align = if repr.transparent() {
1363 match layout_of_single_non_zst_field {
1364 Some(l) => l.unadjusted_abi_align,
1365 None => {
1366 align.abi
1368 }
1369 }
1370 } else {
1371 unadjusted_abi_align
1372 };
1373
1374 let seed = field_seed.wrapping_add(repr.field_shuffle_seed);
1375
1376 Ok(LayoutData {
1377 variants: Variants::Single { index: VariantIdx::new(0) },
1378 fields: FieldsShape::Arbitrary { offsets, memory_index },
1379 backend_repr: abi,
1380 largest_niche,
1381 align,
1382 size,
1383 max_repr_align,
1384 unadjusted_abi_align,
1385 randomization_seed: seed,
1386 })
1387 }
1388
1389 fn format_field_niches<
1390 'a,
1391 FieldIdx: Idx,
1392 VariantIdx: Idx,
1393 F: Deref<Target = &'a LayoutData<FieldIdx, VariantIdx>> + fmt::Debug,
1394 >(
1395 &self,
1396 layout: &LayoutData<FieldIdx, VariantIdx>,
1397 fields: &IndexSlice<FieldIdx, F>,
1398 ) -> String {
1399 let dl = self.cx.data_layout();
1400 let mut s = String::new();
1401 for i in layout.fields.index_by_increasing_offset() {
1402 let offset = layout.fields.offset(i);
1403 let f = &fields[FieldIdx::new(i)];
1404 write!(s, "[o{}a{}s{}", offset.bytes(), f.align.abi.bytes(), f.size.bytes()).unwrap();
1405 if let Some(n) = f.largest_niche {
1406 write!(
1407 s,
1408 " n{}b{}s{}",
1409 n.offset.bytes(),
1410 n.available(dl).ilog2(),
1411 n.value.size(dl).bytes()
1412 )
1413 .unwrap();
1414 }
1415 write!(s, "] ").unwrap();
1416 }
1417 s
1418 }
1419}