1use std::fmt::Write;
2
3use rustc_abi::Primitive::{Float, Int, Pointer};
4use rustc_abi::{Align, BackendRepr, FieldsShape, Scalar, Size, Variants};
5use rustc_codegen_ssa::traits::*;
6use rustc_middle::bug;
7use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
8use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths};
9use rustc_middle::ty::{self, CoroutineArgsExt, Ty, TypeVisitableExt};
10use rustc_span::{DUMMY_SP, Span};
11use tracing::debug;
12
13use crate::common::*;
14use crate::llvm::Type;
15
16fn uncached_llvm_type<'a, 'tcx>(
17 cx: &CodegenCx<'a, 'tcx>,
18 layout: TyAndLayout<'tcx>,
19 defer: &mut Option<(&'a Type, TyAndLayout<'tcx>)>,
20) -> &'a Type {
21 match layout.backend_repr {
22 BackendRepr::Scalar(_) => bug!("handled elsewhere"),
23 BackendRepr::SimdVector { element, count } => {
24 let element = layout.scalar_llvm_type_at(cx, element);
25 return cx.type_vector(element, count);
26 }
27 BackendRepr::ScalableVector { ref element, count } => {
28 let element = if element.is_bool() {
29 cx.type_i1()
30 } else {
31 layout.scalar_llvm_type_at(cx, *element)
32 };
33
34 return cx.type_scalable_vector(element, count);
35 }
36 BackendRepr::Memory { .. } | BackendRepr::ScalarPair(..) => {}
37 }
38
39 let name = match layout.ty.kind() {
40 ty::Adt(..) | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Foreign(..) | ty::Coroutine(..) | ty::Str
44 if !cx.sess().fewer_names() =>
46 {
47 let mut name = with_no_visible_paths!(with_no_trimmed_paths!(layout.ty.to_string()));
48 if let (&ty::Adt(def, _), &Variants::Single { index }) =
49 (layout.ty.kind(), &layout.variants)
50 {
51 if def.is_enum() {
52 write!(&mut name, "::{}", def.variant(index).name).unwrap();
53 }
54 }
55 if let (&ty::Coroutine(_, _), &Variants::Single { index }) =
56 (layout.ty.kind(), &layout.variants)
57 {
58 write!(&mut name, "::{}", ty::CoroutineArgs::variant_name(index)).unwrap();
59 }
60 Some(name)
61 }
62 _ => None,
63 };
64
65 match layout.fields {
66 FieldsShape::Primitive | FieldsShape::Union(_) => {
67 let fill = cx.type_padding_filler(layout.size, layout.align.abi);
68 let packed = false;
69 match name {
70 None => cx.type_struct(&[fill], packed),
71 Some(ref name) => {
72 let llty = cx.type_named_struct(name);
73 cx.set_struct_body(llty, &[fill], packed);
74 llty
75 }
76 }
77 }
78 FieldsShape::Array { count, .. } => cx.type_array(layout.field(cx, 0).llvm_type(cx), count),
79 FieldsShape::Arbitrary { .. } => match name {
80 None => {
81 let (llfields, packed) = struct_llfields(cx, layout);
82 cx.type_struct(&llfields, packed)
83 }
84 Some(ref name) => {
85 let llty = cx.type_named_struct(name);
86 *defer = Some((llty, layout));
87 llty
88 }
89 },
90 }
91}
92
93fn struct_llfields<'a, 'tcx>(
94 cx: &CodegenCx<'a, 'tcx>,
95 layout: TyAndLayout<'tcx>,
96) -> (Vec<&'a Type>, bool) {
97 debug!("struct_llfields: {:#?}", layout);
98 let field_count = layout.fields.count();
99
100 let mut packed = false;
101 let mut offset = Size::ZERO;
102 let mut prev_effective_align = layout.align.abi;
103 let mut result: Vec<_> = Vec::with_capacity(1 + field_count * 2);
104 for i in layout.fields.index_by_increasing_offset() {
105 let target_offset = layout.fields.offset(i as usize);
106 let field = layout.field(cx, i);
107 let effective_field_align =
108 layout.align.abi.min(field.align.abi).restrict_for_offset(target_offset);
109 packed |= effective_field_align < field.align.abi;
110
111 debug!(
112 "struct_llfields: {}: {:?} offset: {:?} target_offset: {:?} \
113 effective_field_align: {}",
114 i,
115 field,
116 offset,
117 target_offset,
118 effective_field_align.bytes()
119 );
120 assert!(target_offset >= offset);
121 let padding = target_offset - offset;
122 if padding != Size::ZERO {
123 let padding_align = prev_effective_align.min(effective_field_align);
124 assert_eq!(offset.align_to(padding_align) + padding, target_offset);
125 result.push(cx.type_padding_filler(padding, padding_align));
126 debug!(" padding before: {:?}", padding);
127 }
128 result.push(field.llvm_type(cx));
129 offset = target_offset + field.size;
130 prev_effective_align = effective_field_align;
131 }
132 if layout.is_sized() && field_count > 0 {
133 if offset > layout.size {
134 bug!("layout: {:#?} stride: {:?} offset: {:?}", layout, layout.size, offset);
135 }
136 let padding = layout.size - offset;
137 if padding != Size::ZERO {
138 let padding_align = prev_effective_align;
139 assert_eq!(offset.align_to(padding_align) + padding, layout.size);
140 debug!(
141 "struct_llfields: pad_bytes: {:?} offset: {:?} stride: {:?}",
142 padding, offset, layout.size
143 );
144 result.push(cx.type_padding_filler(padding, padding_align));
145 }
146 } else {
147 debug!("struct_llfields: offset: {:?} stride: {:?}", offset, layout.size);
148 }
149 (result, packed)
150}
151
152impl<'a, 'tcx> CodegenCx<'a, 'tcx> {
153 pub(crate) fn align_of(&self, ty: Ty<'tcx>) -> Align {
154 self.layout_of(ty).align.abi
155 }
156
157 pub(crate) fn size_of(&self, ty: Ty<'tcx>) -> Size {
158 self.layout_of(ty).size
159 }
160
161 pub(crate) fn size_and_align_of(&self, ty: Ty<'tcx>) -> (Size, Align) {
162 self.spanned_size_and_align_of(ty, DUMMY_SP)
163 }
164
165 pub(crate) fn spanned_size_and_align_of(&self, ty: Ty<'tcx>, span: Span) -> (Size, Align) {
166 let layout = self.spanned_layout_of(ty, span);
167 (layout.size, layout.align.abi)
168 }
169}
170
171pub(crate) trait LayoutLlvmExt<'tcx> {
172 fn is_llvm_immediate(&self) -> bool;
173 fn is_llvm_scalar_pair(&self) -> bool;
174 fn llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type;
175 fn immediate_llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type;
176 fn scalar_llvm_type_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>, scalar: Scalar) -> &'a Type;
177 fn scalar_pair_element_llvm_type<'a>(
178 &self,
179 cx: &CodegenCx<'a, 'tcx>,
180 index: usize,
181 immediate: bool,
182 ) -> &'a Type;
183}
184
185impl<'tcx> LayoutLlvmExt<'tcx> for TyAndLayout<'tcx> {
186 fn is_llvm_immediate(&self) -> bool {
187 match self.backend_repr {
188 BackendRepr::Scalar(_)
189 | BackendRepr::SimdVector { .. }
190 | BackendRepr::ScalableVector { .. } => true,
191 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => false,
192 }
193 }
194
195 fn is_llvm_scalar_pair(&self) -> bool {
196 match self.backend_repr {
197 BackendRepr::ScalarPair(..) => true,
198 BackendRepr::Scalar(_)
199 | BackendRepr::SimdVector { .. }
200 | BackendRepr::ScalableVector { .. }
201 | BackendRepr::Memory { .. } => false,
202 }
203 }
204
205 fn llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type {
217 if let BackendRepr::Scalar(scalar) = self.backend_repr {
221 if let Some(&llty) = cx.scalar_lltypes.borrow().get(&self.ty) {
224 return llty;
225 }
226 let llty = self.scalar_llvm_type_at(cx, scalar);
227 cx.scalar_lltypes.borrow_mut().insert(self.ty, llty);
228 return llty;
229 }
230
231 let variant_index = match self.variants {
233 Variants::Single { index } => Some(index),
234 _ => None,
235 };
236 if let Some(llty) = cx.type_lowering.borrow().get(&(self.ty, variant_index)) {
237 return llty;
238 }
239
240 debug!("llvm_type({:#?})", self);
241
242 assert!(!self.ty.has_escaping_bound_vars(), "{:?} has escaping bound vars", self.ty);
243
244 let normal_ty = cx.tcx.erase_and_anonymize_regions(self.ty);
247
248 let mut defer = None;
249 let llty = if self.ty != normal_ty {
250 let mut layout = cx.layout_of(normal_ty);
251 if let Some(v) = variant_index {
252 layout = layout.for_variant(cx, v);
253 }
254 layout.llvm_type(cx)
255 } else {
256 uncached_llvm_type(cx, *self, &mut defer)
257 };
258 debug!("--> mapped {:#?} to llty={:?}", self, llty);
259
260 cx.type_lowering.borrow_mut().insert((self.ty, variant_index), llty);
261
262 if let Some((llty, layout)) = defer {
263 let (llfields, packed) = struct_llfields(cx, layout);
264 cx.set_struct_body(llty, &llfields, packed);
265 }
266 llty
267 }
268
269 fn immediate_llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type {
270 match self.backend_repr {
271 BackendRepr::Scalar(scalar) => {
272 if scalar.is_bool() {
273 return cx.type_i1();
274 }
275 }
276 BackendRepr::ScalarPair(..) => {
277 return cx.type_struct(
280 &[
281 self.scalar_pair_element_llvm_type(cx, 0, true),
282 self.scalar_pair_element_llvm_type(cx, 1, true),
283 ],
284 false,
285 );
286 }
287 _ => {}
288 };
289 self.llvm_type(cx)
290 }
291
292 fn scalar_llvm_type_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>, scalar: Scalar) -> &'a Type {
293 match scalar.primitive() {
294 Int(i, _) => cx.type_from_integer(i),
295 Float(f) => cx.type_from_float(f),
296 Pointer(address_space) => cx.type_ptr_ext(address_space),
297 }
298 }
299
300 fn scalar_pair_element_llvm_type<'a>(
301 &self,
302 cx: &CodegenCx<'a, 'tcx>,
303 index: usize,
304 immediate: bool,
305 ) -> &'a Type {
306 let BackendRepr::ScalarPair(a, b) = self.backend_repr else {
310 bug!("TyAndLayout::scalar_pair_element_llty({:?}): not applicable", self);
311 };
312 let scalar = [a, b][index];
313
314 if immediate && scalar.is_bool() {
321 return cx.type_i1();
322 }
323
324 self.scalar_llvm_type_at(cx, scalar)
325 }
326}