1use rustc_abi::Primitive::{Int, Pointer};
2use rustc_abi::{Align, BackendRepr, FieldsShape, Size, TagEncoding, VariantIdx, Variants};
3use rustc_middle::mir::interpret::Scalar;
4use rustc_middle::mir::tcx::PlaceTy;
5use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout};
6use rustc_middle::ty::{self, Ty};
7use rustc_middle::{bug, mir};
8use tracing::{debug, instrument};
9
10use super::operand::OperandValue;
11use super::{FunctionCx, LocalRef};
12use crate::common::IntPredicate;
13use crate::size_of_val;
14use crate::traits::*;
15
16#[derive(Copy, Clone, Debug)]
23pub struct PlaceValue<V> {
24 pub llval: V,
26
27 pub llextra: Option<V>,
29
30 pub align: Align,
32}
33
34impl<V: CodegenObject> PlaceValue<V> {
35 pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> {
39 PlaceValue { llval, llextra: None, align }
40 }
41
42 pub fn alloca<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx, Value = V>>(
47 bx: &mut Bx,
48 size: Size,
49 align: Align,
50 ) -> PlaceValue<V> {
51 let llval = bx.alloca(size, align);
52 PlaceValue::new_sized(llval, align)
53 }
54
55 pub fn with_type<'tcx>(self, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
57 assert!(
58 layout.is_unsized() || layout.is_uninhabited() || self.llextra.is_none(),
59 "Had pointer metadata {:?} for sized type {layout:?}",
60 self.llextra,
61 );
62 PlaceRef { val: self, layout }
63 }
64
65 pub fn address(self) -> OperandValue<V> {
70 if let Some(llextra) = self.llextra {
71 OperandValue::Pair(self.llval, llextra)
72 } else {
73 OperandValue::Immediate(self.llval)
74 }
75 }
76}
77
78#[derive(Copy, Clone, Debug)]
79pub struct PlaceRef<'tcx, V> {
80 pub val: PlaceValue<V>,
82
83 pub layout: TyAndLayout<'tcx>,
89}
90
91impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
92 pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
93 PlaceRef::new_sized_aligned(llval, layout, layout.align.abi)
94 }
95
96 pub fn new_sized_aligned(
97 llval: V,
98 layout: TyAndLayout<'tcx>,
99 align: Align,
100 ) -> PlaceRef<'tcx, V> {
101 assert!(layout.is_sized());
102 PlaceValue::new_sized(llval, align).with_type(layout)
103 }
104
105 pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
108 bx: &mut Bx,
109 layout: TyAndLayout<'tcx>,
110 ) -> Self {
111 Self::alloca_size(bx, layout.size, layout)
112 }
113
114 pub fn alloca_size<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
115 bx: &mut Bx,
116 size: Size,
117 layout: TyAndLayout<'tcx>,
118 ) -> Self {
119 assert!(layout.is_sized(), "tried to statically allocate unsized place");
120 PlaceValue::alloca(bx, size, layout.align.abi).with_type(layout)
121 }
122
123 pub fn alloca_unsized_indirect<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
127 bx: &mut Bx,
128 layout: TyAndLayout<'tcx>,
129 ) -> Self {
130 assert!(layout.is_unsized(), "tried to allocate indirect place for sized values");
131 let ptr_ty = Ty::new_mut_ptr(bx.cx().tcx(), layout.ty);
132 let ptr_layout = bx.cx().layout_of(ptr_ty);
133 Self::alloca(bx, ptr_layout)
134 }
135
136 pub fn len<Cx: ConstCodegenMethods<'tcx, Value = V>>(&self, cx: &Cx) -> V {
137 if let FieldsShape::Array { count, .. } = self.layout.fields {
138 if self.layout.is_unsized() {
139 assert_eq!(count, 0);
140 self.val.llextra.unwrap()
141 } else {
142 cx.const_usize(count)
143 }
144 } else {
145 bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
146 }
147 }
148}
149
150impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
151 pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
153 self,
154 bx: &mut Bx,
155 ix: usize,
156 ) -> Self {
157 let field = self.layout.field(bx.cx(), ix);
158 let offset = self.layout.fields.offset(ix);
159 let effective_field_align = self.val.align.restrict_for_offset(offset);
160
161 let mut simple = || {
164 let llval = if offset.bytes() == 0 {
165 self.val.llval
166 } else {
167 bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes()))
168 };
169 let val = PlaceValue {
170 llval,
171 llextra: if bx.cx().type_has_metadata(field.ty) { self.val.llextra } else { None },
172 align: effective_field_align,
173 };
174 val.with_type(field)
175 };
176
177 match field.ty.kind() {
183 _ if field.is_sized() => return simple(),
184 ty::Slice(..) | ty::Str => return simple(),
185 _ if offset.bytes() == 0 => return simple(),
186 _ => {}
187 }
188
189 let meta = self.val.llextra;
205
206 let unaligned_offset = bx.cx().const_usize(offset.bytes());
207
208 let (_, mut unsized_align) = size_of_val::size_and_align_of_dst(bx, field.ty, meta);
210
211 if let ty::Adt(def, _) = self.layout.ty.kind()
213 && let Some(packed) = def.repr().pack
214 {
215 let packed = bx.const_usize(packed.bytes());
216 let cmp = bx.icmp(IntPredicate::IntULT, unsized_align, packed);
217 unsized_align = bx.select(cmp, unsized_align, packed)
218 }
219
220 let offset = round_up_const_value_to_alignment(bx, unaligned_offset, unsized_align);
222
223 debug!("struct_field_ptr: DST field offset: {:?}", offset);
224
225 let ptr = bx.inbounds_ptradd(self.val.llval, offset);
227 let val =
228 PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align };
229 val.with_type(field)
230 }
231
232 #[instrument(level = "trace", skip(bx))]
234 pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
235 self,
236 bx: &mut Bx,
237 cast_to: Ty<'tcx>,
238 ) -> V {
239 let dl = &bx.tcx().data_layout;
240 let cast_to_layout = bx.cx().layout_of(cast_to);
241 let cast_to = bx.cx().immediate_backend_type(cast_to_layout);
242 if self.layout.is_uninhabited() {
243 return bx.cx().const_poison(cast_to);
244 }
245 let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants {
246 Variants::Empty => unreachable!("we already handled uninhabited types"),
247 Variants::Single { index } => {
248 let discr_val = self
249 .layout
250 .ty
251 .discriminant_for_variant(bx.cx().tcx(), index)
252 .map_or(index.as_u32() as u128, |discr| discr.val);
253 return bx.cx().const_uint_big(cast_to, discr_val);
254 }
255 Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => {
256 (tag, tag_encoding, tag_field)
257 }
258 };
259
260 let tag = self.project_field(bx, tag_field);
262 let tag_op = bx.load_operand(tag);
263 let tag_imm = tag_op.immediate();
264
265 match *tag_encoding {
267 TagEncoding::Direct => {
268 let signed = match tag_scalar.primitive() {
269 Int(_, signed) => !tag_scalar.is_bool() && signed,
274 _ => false,
275 };
276 bx.intcast(tag_imm, cast_to, signed)
277 }
278 TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start } => {
279 let (tag, tag_llty) = match tag_scalar.primitive() {
282 Pointer(_) => {
284 let t = bx.type_from_integer(dl.ptr_sized_integer());
285 let tag = bx.ptrtoint(tag_imm, t);
286 (tag, t)
287 }
288 _ => (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)),
289 };
290
291 let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
292
293 let (is_niche, tagged_discr, delta) = if relative_max == 0 {
308 let niche_start = bx.cx().const_uint_big(tag_llty, niche_start);
318 let is_niche = bx.icmp(IntPredicate::IntEQ, tag, niche_start);
319 let tagged_discr =
320 bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64);
321 (is_niche, tagged_discr, 0)
322 } else {
323 let relative_discr = bx.sub(tag, bx.cx().const_uint_big(tag_llty, niche_start));
326 let cast_tag = bx.intcast(relative_discr, cast_to, false);
327 let is_niche = bx.icmp(
328 IntPredicate::IntULE,
329 relative_discr,
330 bx.cx().const_uint(tag_llty, relative_max as u64),
331 );
332 (is_niche, cast_tag, niche_variants.start().as_u32() as u128)
333 };
334
335 let tagged_discr = if delta == 0 {
336 tagged_discr
337 } else {
338 bx.add(tagged_discr, bx.cx().const_uint_big(cast_to, delta))
339 };
340
341 let discr = bx.select(
342 is_niche,
343 tagged_discr,
344 bx.cx().const_uint(cast_to, untagged_variant.as_u32() as u64),
345 );
346
347 discr
351 }
352 }
353 }
354
355 pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
358 &self,
359 bx: &mut Bx,
360 variant_index: VariantIdx,
361 ) {
362 if self.layout.for_variant(bx.cx(), variant_index).is_uninhabited() {
363 bx.abort();
366 return;
367 }
368 match self.layout.variants {
369 Variants::Empty => unreachable!("we already handled uninhabited types"),
370 Variants::Single { index } => assert_eq!(index, variant_index),
371
372 Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
373 let ptr = self.project_field(bx, tag_field);
374 let to =
375 self.layout.ty.discriminant_for_variant(bx.tcx(), variant_index).unwrap().val;
376 bx.store_to_place(
377 bx.cx().const_uint_big(bx.cx().backend_type(ptr.layout), to),
378 ptr.val,
379 );
380 }
381 Variants::Multiple {
382 tag_encoding:
383 TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start },
384 tag_field,
385 ..
386 } => {
387 if variant_index != untagged_variant {
388 let niche = self.project_field(bx, tag_field);
389 let niche_llty = bx.cx().immediate_backend_type(niche.layout);
390 let BackendRepr::Scalar(scalar) = niche.layout.backend_repr else {
391 bug!("expected a scalar placeref for the niche");
392 };
393 let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
398 let niche_value = (niche_value as u128).wrapping_add(niche_start);
399 let niche_value = niche_value & niche.layout.size.unsigned_int_max();
400
401 let niche_llval = bx.cx().scalar_to_backend(
402 Scalar::from_uint(niche_value, niche.layout.size),
403 scalar,
404 niche_llty,
405 );
406 OperandValue::Immediate(niche_llval).store(bx, niche);
407 }
408 }
409 }
410 }
411
412 pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
413 &self,
414 bx: &mut Bx,
415 llindex: V,
416 ) -> Self {
417 let layout = self.layout.field(bx, 0);
420 let offset = if let Some(llindex) = bx.const_to_opt_uint(llindex) {
421 layout.size.checked_mul(llindex, bx).unwrap_or(layout.size)
422 } else {
423 layout.size
424 };
425
426 let llval = bx.inbounds_gep(bx.cx().backend_type(layout), self.val.llval, &[llindex]);
427 let align = self.val.align.restrict_for_offset(offset);
428 PlaceValue::new_sized(llval, align).with_type(layout)
429 }
430
431 pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
432 &self,
433 bx: &mut Bx,
434 variant_index: VariantIdx,
435 ) -> Self {
436 let mut downcast = *self;
437 downcast.layout = self.layout.for_variant(bx.cx(), variant_index);
438 downcast
439 }
440
441 pub fn project_type<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
442 &self,
443 bx: &mut Bx,
444 ty: Ty<'tcx>,
445 ) -> Self {
446 let mut downcast = *self;
447 downcast.layout = bx.cx().layout_of(ty);
448 downcast
449 }
450
451 pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
452 bx.lifetime_start(self.val.llval, self.layout.size);
453 }
454
455 pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
456 bx.lifetime_end(self.val.llval, self.layout.size);
457 }
458}
459
460impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
461 #[instrument(level = "trace", skip(self, bx))]
462 pub fn codegen_place(
463 &mut self,
464 bx: &mut Bx,
465 place_ref: mir::PlaceRef<'tcx>,
466 ) -> PlaceRef<'tcx, Bx::Value> {
467 let cx = self.cx;
468 let tcx = self.cx.tcx();
469
470 let mut base = 0;
471 let mut cg_base = match self.locals[place_ref.local] {
472 LocalRef::Place(place) => place,
473 LocalRef::UnsizedPlace(place) => bx.load_operand(place).deref(cx),
474 LocalRef::Operand(..) => {
475 if place_ref.is_indirect_first_projection() {
476 base = 1;
477 let cg_base = self.codegen_consume(
478 bx,
479 mir::PlaceRef { projection: &place_ref.projection[..0], ..place_ref },
480 );
481 cg_base.deref(bx.cx())
482 } else {
483 bug!("using operand local {:?} as place", place_ref);
484 }
485 }
486 LocalRef::PendingOperand => {
487 bug!("using still-pending operand local {:?} as place", place_ref);
488 }
489 };
490 for elem in place_ref.projection[base..].iter() {
491 cg_base = match *elem {
492 mir::ProjectionElem::Deref => bx.load_operand(cg_base).deref(bx.cx()),
493 mir::ProjectionElem::Field(ref field, _) => {
494 assert!(
495 !cg_base.layout.ty.is_any_ptr(),
496 "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
497 but tried to access field {field:?} of pointer {cg_base:?}",
498 );
499 cg_base.project_field(bx, field.index())
500 }
501 mir::ProjectionElem::OpaqueCast(ty) => {
502 bug!("encountered OpaqueCast({ty}) in codegen")
503 }
504 mir::ProjectionElem::Subtype(ty) => cg_base.project_type(bx, self.monomorphize(ty)),
505 mir::ProjectionElem::UnwrapUnsafeBinder(ty) => {
506 cg_base.project_type(bx, self.monomorphize(ty))
507 }
508 mir::ProjectionElem::Index(index) => {
509 let index = &mir::Operand::Copy(mir::Place::from(index));
510 let index = self.codegen_operand(bx, index);
511 let llindex = index.immediate();
512 cg_base.project_index(bx, llindex)
513 }
514 mir::ProjectionElem::ConstantIndex { offset, from_end: false, min_length: _ } => {
515 let lloffset = bx.cx().const_usize(offset);
516 cg_base.project_index(bx, lloffset)
517 }
518 mir::ProjectionElem::ConstantIndex { offset, from_end: true, min_length: _ } => {
519 let lloffset = bx.cx().const_usize(offset);
520 let lllen = cg_base.len(bx.cx());
521 let llindex = bx.sub(lllen, lloffset);
522 cg_base.project_index(bx, llindex)
523 }
524 mir::ProjectionElem::Subslice { from, to, from_end } => {
525 let mut subslice = cg_base.project_index(bx, bx.cx().const_usize(from));
526 let projected_ty =
527 PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, *elem).ty;
528 subslice.layout = bx.cx().layout_of(self.monomorphize(projected_ty));
529
530 if subslice.layout.is_unsized() {
531 assert!(from_end, "slice subslices should be `from_end`");
532 subslice.val.llextra = Some(
533 bx.sub(cg_base.val.llextra.unwrap(), bx.cx().const_usize(from + to)),
534 );
535 }
536
537 subslice
538 }
539 mir::ProjectionElem::Downcast(_, v) => cg_base.project_downcast(bx, v),
540 };
541 }
542 debug!("codegen_place(place={:?}) => {:?}", place_ref, cg_base);
543 cg_base
544 }
545
546 pub fn monomorphized_place_ty(&self, place_ref: mir::PlaceRef<'tcx>) -> Ty<'tcx> {
547 let tcx = self.cx.tcx();
548 let place_ty = place_ref.ty(self.mir, tcx);
549 self.monomorphize(place_ty.ty)
550 }
551}
552
553fn round_up_const_value_to_alignment<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
554 bx: &mut Bx,
555 value: Bx::Value,
556 align: Bx::Value,
557) -> Bx::Value {
558 let one = bx.const_usize(1);
589 let align_minus_1 = bx.sub(align, one);
590 let neg_value = bx.neg(value);
591 let offset = bx.and(neg_value, align_minus_1);
592 bx.add(value, offset)
593}