1use rustc_abi::{
2 Align, BackendRepr, FieldIdx, FieldsShape, Size, TagEncoding, VariantIdx, Variants,
3};
4use rustc_middle::mir::PlaceTy;
5use rustc_middle::mir::interpret::Scalar;
6use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout};
7use rustc_middle::ty::{self, Ty};
8use rustc_middle::{bug, mir};
9use tracing::{debug, instrument};
10
11use super::operand::OperandValue;
12use super::{FunctionCx, LocalRef};
13use crate::common::IntPredicate;
14use crate::size_of_val;
15use crate::traits::*;
16
17#[derive(Copy, Clone, Debug)]
24pub struct PlaceValue<V> {
25 pub llval: V,
27
28 pub llextra: Option<V>,
30
31 pub align: Align,
33}
34
35impl<V: CodegenObject> PlaceValue<V> {
36 pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> {
40 PlaceValue { llval, llextra: None, align }
41 }
42
43 pub fn alloca<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx, Value = V>>(
48 bx: &mut Bx,
49 size: Size,
50 align: Align,
51 ) -> PlaceValue<V> {
52 let llval = bx.alloca(size, align);
53 PlaceValue::new_sized(llval, align)
54 }
55
56 pub fn with_type<'tcx>(self, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
58 assert!(
59 layout.is_unsized() || layout.is_uninhabited() || self.llextra.is_none(),
60 "Had pointer metadata {:?} for sized type {layout:?}",
61 self.llextra,
62 );
63 PlaceRef { val: self, layout }
64 }
65
66 pub fn address(self) -> OperandValue<V> {
71 if let Some(llextra) = self.llextra {
72 OperandValue::Pair(self.llval, llextra)
73 } else {
74 OperandValue::Immediate(self.llval)
75 }
76 }
77}
78
79#[derive(Copy, Clone, Debug)]
80pub struct PlaceRef<'tcx, V> {
81 pub val: PlaceValue<V>,
83
84 pub layout: TyAndLayout<'tcx>,
90}
91
92impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
93 pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
94 PlaceRef::new_sized_aligned(llval, layout, layout.align.abi)
95 }
96
97 pub fn new_sized_aligned(
98 llval: V,
99 layout: TyAndLayout<'tcx>,
100 align: Align,
101 ) -> PlaceRef<'tcx, V> {
102 assert!(layout.is_sized());
103 PlaceValue::new_sized(llval, align).with_type(layout)
104 }
105
106 pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
109 bx: &mut Bx,
110 layout: TyAndLayout<'tcx>,
111 ) -> Self {
112 if layout.is_runtime_sized() {
113 Self::alloca_runtime_sized(bx, layout)
114 } else {
115 Self::alloca_size(bx, layout.size, layout)
116 }
117 }
118
119 pub fn alloca_size<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
120 bx: &mut Bx,
121 size: Size,
122 layout: TyAndLayout<'tcx>,
123 ) -> Self {
124 assert!(layout.is_sized(), "tried to statically allocate unsized place");
125 PlaceValue::alloca(bx, size, layout.align.abi).with_type(layout)
126 }
127
128 pub fn alloca_unsized_indirect<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
132 bx: &mut Bx,
133 layout: TyAndLayout<'tcx>,
134 ) -> Self {
135 assert!(layout.is_unsized(), "tried to allocate indirect place for sized values");
136 let ptr_ty = Ty::new_mut_ptr(bx.cx().tcx(), layout.ty);
137 let ptr_layout = bx.cx().layout_of(ptr_ty);
138 Self::alloca(bx, ptr_layout)
139 }
140
141 pub fn len<Cx: ConstCodegenMethods<Value = V>>(&self, cx: &Cx) -> V {
142 if let FieldsShape::Array { count, .. } = self.layout.fields {
143 if self.layout.is_unsized() {
144 assert_eq!(count, 0);
145 self.val.llextra.unwrap()
146 } else {
147 cx.const_usize(count)
148 }
149 } else {
150 bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
151 }
152 }
153
154 fn alloca_runtime_sized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
155 bx: &mut Bx,
156 layout: TyAndLayout<'tcx>,
157 ) -> Self {
158 let (element_count, ty) = layout.ty.scalable_vector_element_count_and_type(bx.tcx());
159 PlaceValue::new_sized(
160 bx.scalable_alloca(element_count as u64, layout.align.abi, ty),
161 layout.align.abi,
162 )
163 .with_type(layout)
164 }
165}
166
167impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
168 pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
170 self,
171 bx: &mut Bx,
172 ix: usize,
173 ) -> Self {
174 let field = self.layout.field(bx.cx(), ix);
175 let offset = self.layout.fields.offset(ix);
176 let effective_field_align = self.val.align.restrict_for_offset(offset);
177
178 let mut simple = || {
181 let llval = if offset.bytes() == 0 {
182 self.val.llval
183 } else {
184 bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes()))
185 };
186 let val = PlaceValue {
187 llval,
188 llextra: if bx.cx().tcx().type_has_metadata(field.ty, bx.cx().typing_env()) {
189 self.val.llextra
190 } else {
191 None
192 },
193 align: effective_field_align,
194 };
195 val.with_type(field)
196 };
197
198 match field.ty.kind() {
204 _ if field.is_sized() => return simple(),
205 ty::Slice(..) | ty::Str => return simple(),
206 _ if offset.bytes() == 0 => return simple(),
207 _ => {}
208 }
209
210 let meta = self.val.llextra;
226
227 let unaligned_offset = bx.cx().const_usize(offset.bytes());
228
229 let (_, mut unsized_align) = size_of_val::size_and_align_of_dst(bx, field.ty, meta);
231
232 if let ty::Adt(def, _) = self.layout.ty.kind()
234 && let Some(packed) = def.repr().pack
235 {
236 let packed = bx.const_usize(packed.bytes());
237 let cmp = bx.icmp(IntPredicate::IntULT, unsized_align, packed);
238 unsized_align = bx.select(cmp, unsized_align, packed)
239 }
240
241 let offset = round_up_const_value_to_alignment(bx, unaligned_offset, unsized_align);
243
244 debug!("struct_field_ptr: DST field offset: {:?}", offset);
245
246 let ptr = bx.inbounds_ptradd(self.val.llval, offset);
248 let val =
249 PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align };
250 val.with_type(field)
251 }
252
253 pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
256 &self,
257 bx: &mut Bx,
258 variant_index: VariantIdx,
259 ) {
260 match codegen_tag_value(bx.cx(), variant_index, self.layout) {
261 Err(UninhabitedVariantError) => {
262 bx.abort();
265 }
266 Ok(Some((tag_field, imm))) => {
267 let tag_place = self.project_field(bx, tag_field.as_usize());
268 OperandValue::Immediate(imm).store(bx, tag_place);
269 }
270 Ok(None) => {}
271 }
272 }
273
274 pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
275 &self,
276 bx: &mut Bx,
277 llindex: V,
278 ) -> Self {
279 let layout = self.layout.field(bx, 0);
282 let offset = if let Some(llindex) = bx.const_to_opt_uint(llindex) {
283 layout.size.checked_mul(llindex, bx).unwrap_or(layout.size)
284 } else {
285 layout.size
286 };
287
288 let llval = bx.inbounds_nuw_gep(bx.cx().backend_type(layout), self.val.llval, &[llindex]);
289 let align = self.val.align.restrict_for_offset(offset);
290 PlaceValue::new_sized(llval, align).with_type(layout)
291 }
292
293 pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
294 &self,
295 bx: &mut Bx,
296 variant_index: VariantIdx,
297 ) -> Self {
298 let mut downcast = *self;
299 downcast.layout = self.layout.for_variant(bx.cx(), variant_index);
300 downcast
301 }
302
303 pub fn project_type<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
304 &self,
305 bx: &mut Bx,
306 ty: Ty<'tcx>,
307 ) -> Self {
308 let mut downcast = *self;
309 downcast.layout = bx.cx().layout_of(ty);
310 downcast
311 }
312
313 pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
314 bx.lifetime_start(self.val.llval, self.layout.size);
315 }
316
317 pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
318 bx.lifetime_end(self.val.llval, self.layout.size);
319 }
320}
321
322impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
323 #[instrument(level = "trace", skip(self, bx))]
324 pub fn codegen_place(
325 &mut self,
326 bx: &mut Bx,
327 place_ref: mir::PlaceRef<'tcx>,
328 ) -> PlaceRef<'tcx, Bx::Value> {
329 let cx = self.cx;
330 let tcx = self.cx.tcx();
331
332 let mut base = 0;
333 let mut cg_base = match self.locals[place_ref.local] {
334 LocalRef::Place(place) => place,
335 LocalRef::UnsizedPlace(place) => bx.load_operand(place).deref(cx),
336 LocalRef::Operand(..) => {
337 if place_ref.is_indirect_first_projection() {
338 base = 1;
339 let cg_base = self.codegen_consume(
340 bx,
341 mir::PlaceRef { projection: &place_ref.projection[..0], ..place_ref },
342 );
343 cg_base.deref(bx.cx())
344 } else {
345 bug!("using operand local {:?} as place", place_ref);
346 }
347 }
348 LocalRef::PendingOperand => {
349 bug!("using still-pending operand local {:?} as place", place_ref);
350 }
351 };
352 for elem in place_ref.projection[base..].iter() {
353 cg_base = match *elem {
354 mir::ProjectionElem::Deref => bx.load_operand(cg_base).deref(bx.cx()),
355 mir::ProjectionElem::Field(ref field, _) => {
356 assert!(
357 !cg_base.layout.ty.is_any_ptr(),
358 "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
359 but tried to access field {field:?} of pointer {cg_base:?}",
360 );
361 cg_base.project_field(bx, field.index())
362 }
363 mir::ProjectionElem::OpaqueCast(ty) => {
364 bug!("encountered OpaqueCast({ty}) in codegen")
365 }
366 mir::ProjectionElem::UnwrapUnsafeBinder(ty) => {
367 cg_base.project_type(bx, self.monomorphize(ty))
368 }
369 mir::ProjectionElem::Index(index) => {
370 let index = &mir::Operand::Copy(mir::Place::from(index));
371 let index = self.codegen_operand(bx, index);
372 let llindex = index.immediate();
373 cg_base.project_index(bx, llindex)
374 }
375 mir::ProjectionElem::ConstantIndex { offset, from_end: false, min_length: _ } => {
376 let lloffset = bx.cx().const_usize(offset);
377 cg_base.project_index(bx, lloffset)
378 }
379 mir::ProjectionElem::ConstantIndex { offset, from_end: true, min_length: _ } => {
380 let lloffset = bx.cx().const_usize(offset);
381 let lllen = cg_base.len(bx.cx());
382 let llindex = bx.sub(lllen, lloffset);
383 cg_base.project_index(bx, llindex)
384 }
385 mir::ProjectionElem::Subslice { from, to, from_end } => {
386 let mut subslice = cg_base.project_index(bx, bx.cx().const_usize(from));
387 let projected_ty =
388 PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, *elem).ty;
389 subslice.layout = bx.cx().layout_of(self.monomorphize(projected_ty));
390
391 if subslice.layout.is_unsized() {
392 assert!(from_end, "slice subslices should be `from_end`");
393 subslice.val.llextra = Some(
394 bx.sub(cg_base.val.llextra.unwrap(), bx.cx().const_usize(from + to)),
395 );
396 }
397
398 subslice
399 }
400 mir::ProjectionElem::Downcast(_, v) => cg_base.project_downcast(bx, v),
401 };
402 }
403 debug!("codegen_place(place={:?}) => {:?}", place_ref, cg_base);
404 cg_base
405 }
406
407 pub fn monomorphized_place_ty(&self, place_ref: mir::PlaceRef<'tcx>) -> Ty<'tcx> {
408 let tcx = self.cx.tcx();
409 let place_ty = place_ref.ty(self.mir, tcx);
410 self.monomorphize(place_ty.ty)
411 }
412}
413
414fn round_up_const_value_to_alignment<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
415 bx: &mut Bx,
416 value: Bx::Value,
417 align: Bx::Value,
418) -> Bx::Value {
419 let one = bx.const_usize(1);
450 let align_minus_1 = bx.sub(align, one);
451 let neg_value = bx.neg(value);
452 let offset = bx.and(neg_value, align_minus_1);
453 bx.add(value, offset)
454}
455
456pub(super) fn codegen_tag_value<'tcx, V>(
466 cx: &impl CodegenMethods<'tcx, Value = V>,
467 variant_index: VariantIdx,
468 layout: TyAndLayout<'tcx>,
469) -> Result<Option<(FieldIdx, V)>, UninhabitedVariantError> {
470 if layout.for_variant(cx, variant_index).is_uninhabited() {
473 return Err(UninhabitedVariantError);
474 }
475
476 Ok(match layout.variants {
477 Variants::Empty => unreachable!("we already handled uninhabited types"),
478 Variants::Single { index } => {
479 assert_eq!(index, variant_index);
480 None
481 }
482
483 Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
484 let discr = layout.ty.discriminant_for_variant(cx.tcx(), variant_index);
485 let to = discr.unwrap().val;
486 let tag_layout = layout.field(cx, tag_field.as_usize());
487 let tag_llty = cx.immediate_backend_type(tag_layout);
488 let imm = cx.const_uint_big(tag_llty, to);
489 Some((tag_field, imm))
490 }
491 Variants::Multiple {
492 tag_encoding: TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start },
493 tag_field,
494 ..
495 } => {
496 if variant_index != untagged_variant {
497 let niche_layout = layout.field(cx, tag_field.as_usize());
498 let niche_llty = cx.immediate_backend_type(niche_layout);
499 let BackendRepr::Scalar(scalar) = niche_layout.backend_repr else {
500 bug!("expected a scalar placeref for the niche");
501 };
502 let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
507 let niche_value = (niche_value as u128).wrapping_add(niche_start);
508 let niche_value = niche_value & niche_layout.size.unsigned_int_max();
509
510 let niche_llval = cx.scalar_to_backend(
511 Scalar::from_uint(niche_value, niche_layout.size),
512 scalar,
513 niche_llty,
514 );
515 Some((tag_field, niche_llval))
516 } else {
517 None
518 }
519 }
520 })
521}
522
523#[derive(Debug)]
524pub(super) struct UninhabitedVariantError;