1use rustc_abi::{Align, BackendRepr, FieldsShape, Size, TagEncoding, VariantIdx, Variants};
2use rustc_middle::mir::PlaceTy;
3use rustc_middle::mir::interpret::Scalar;
4use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout};
5use rustc_middle::ty::{self, Ty};
6use rustc_middle::{bug, mir};
7use tracing::{debug, instrument};
8
9use super::operand::OperandValue;
10use super::{FunctionCx, LocalRef};
11use crate::common::IntPredicate;
12use crate::size_of_val;
13use crate::traits::*;
14
15#[derive(Copy, Clone, Debug)]
22pub struct PlaceValue<V> {
23 pub llval: V,
25
26 pub llextra: Option<V>,
28
29 pub align: Align,
31}
32
33impl<V: CodegenObject> PlaceValue<V> {
34 pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> {
38 PlaceValue { llval, llextra: None, align }
39 }
40
41 pub fn alloca<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx, Value = V>>(
46 bx: &mut Bx,
47 size: Size,
48 align: Align,
49 ) -> PlaceValue<V> {
50 let llval = bx.alloca(size, align);
51 PlaceValue::new_sized(llval, align)
52 }
53
54 pub fn with_type<'tcx>(self, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
56 assert!(
57 layout.is_unsized() || layout.is_uninhabited() || self.llextra.is_none(),
58 "Had pointer metadata {:?} for sized type {layout:?}",
59 self.llextra,
60 );
61 PlaceRef { val: self, layout }
62 }
63
64 pub fn address(self) -> OperandValue<V> {
69 if let Some(llextra) = self.llextra {
70 OperandValue::Pair(self.llval, llextra)
71 } else {
72 OperandValue::Immediate(self.llval)
73 }
74 }
75}
76
77#[derive(Copy, Clone, Debug)]
78pub struct PlaceRef<'tcx, V> {
79 pub val: PlaceValue<V>,
81
82 pub layout: TyAndLayout<'tcx>,
88}
89
90impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
91 pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
92 PlaceRef::new_sized_aligned(llval, layout, layout.align.abi)
93 }
94
95 pub fn new_sized_aligned(
96 llval: V,
97 layout: TyAndLayout<'tcx>,
98 align: Align,
99 ) -> PlaceRef<'tcx, V> {
100 assert!(layout.is_sized());
101 PlaceValue::new_sized(llval, align).with_type(layout)
102 }
103
104 pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
107 bx: &mut Bx,
108 layout: TyAndLayout<'tcx>,
109 ) -> Self {
110 Self::alloca_size(bx, layout.size, layout)
111 }
112
113 pub fn alloca_size<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
114 bx: &mut Bx,
115 size: Size,
116 layout: TyAndLayout<'tcx>,
117 ) -> Self {
118 assert!(layout.is_sized(), "tried to statically allocate unsized place");
119 PlaceValue::alloca(bx, size, layout.align.abi).with_type(layout)
120 }
121
122 pub fn alloca_unsized_indirect<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
126 bx: &mut Bx,
127 layout: TyAndLayout<'tcx>,
128 ) -> Self {
129 assert!(layout.is_unsized(), "tried to allocate indirect place for sized values");
130 let ptr_ty = Ty::new_mut_ptr(bx.cx().tcx(), layout.ty);
131 let ptr_layout = bx.cx().layout_of(ptr_ty);
132 Self::alloca(bx, ptr_layout)
133 }
134
135 pub fn len<Cx: ConstCodegenMethods<Value = V>>(&self, cx: &Cx) -> V {
136 if let FieldsShape::Array { count, .. } = self.layout.fields {
137 if self.layout.is_unsized() {
138 assert_eq!(count, 0);
139 self.val.llextra.unwrap()
140 } else {
141 cx.const_usize(count)
142 }
143 } else {
144 bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
145 }
146 }
147}
148
149impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
150 pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
152 self,
153 bx: &mut Bx,
154 ix: usize,
155 ) -> Self {
156 let field = self.layout.field(bx.cx(), ix);
157 let offset = self.layout.fields.offset(ix);
158 let effective_field_align = self.val.align.restrict_for_offset(offset);
159
160 let mut simple = || {
163 let llval = if offset.bytes() == 0 {
164 self.val.llval
165 } else {
166 bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes()))
167 };
168 let val = PlaceValue {
169 llval,
170 llextra: if bx.cx().tcx().type_has_metadata(field.ty, bx.cx().typing_env()) {
171 self.val.llextra
172 } else {
173 None
174 },
175 align: effective_field_align,
176 };
177 val.with_type(field)
178 };
179
180 match field.ty.kind() {
186 _ if field.is_sized() => return simple(),
187 ty::Slice(..) | ty::Str => return simple(),
188 _ if offset.bytes() == 0 => return simple(),
189 _ => {}
190 }
191
192 let meta = self.val.llextra;
208
209 let unaligned_offset = bx.cx().const_usize(offset.bytes());
210
211 let (_, mut unsized_align) = size_of_val::size_and_align_of_dst(bx, field.ty, meta);
213
214 if let ty::Adt(def, _) = self.layout.ty.kind()
216 && let Some(packed) = def.repr().pack
217 {
218 let packed = bx.const_usize(packed.bytes());
219 let cmp = bx.icmp(IntPredicate::IntULT, unsized_align, packed);
220 unsized_align = bx.select(cmp, unsized_align, packed)
221 }
222
223 let offset = round_up_const_value_to_alignment(bx, unaligned_offset, unsized_align);
225
226 debug!("struct_field_ptr: DST field offset: {:?}", offset);
227
228 let ptr = bx.inbounds_ptradd(self.val.llval, offset);
230 let val =
231 PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align };
232 val.with_type(field)
233 }
234
235 pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
238 &self,
239 bx: &mut Bx,
240 variant_index: VariantIdx,
241 ) {
242 if self.layout.for_variant(bx.cx(), variant_index).is_uninhabited() {
243 bx.abort();
246 return;
247 }
248 match self.layout.variants {
249 Variants::Empty => unreachable!("we already handled uninhabited types"),
250 Variants::Single { index } => assert_eq!(index, variant_index),
251
252 Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
253 let ptr = self.project_field(bx, tag_field);
254 let to =
255 self.layout.ty.discriminant_for_variant(bx.tcx(), variant_index).unwrap().val;
256 bx.store_to_place(
257 bx.cx().const_uint_big(bx.cx().backend_type(ptr.layout), to),
258 ptr.val,
259 );
260 }
261 Variants::Multiple {
262 tag_encoding:
263 TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start },
264 tag_field,
265 ..
266 } => {
267 if variant_index != untagged_variant {
268 let niche = self.project_field(bx, tag_field);
269 let niche_llty = bx.cx().immediate_backend_type(niche.layout);
270 let BackendRepr::Scalar(scalar) = niche.layout.backend_repr else {
271 bug!("expected a scalar placeref for the niche");
272 };
273 let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
278 let niche_value = (niche_value as u128).wrapping_add(niche_start);
279 let niche_value = niche_value & niche.layout.size.unsigned_int_max();
280
281 let niche_llval = bx.cx().scalar_to_backend(
282 Scalar::from_uint(niche_value, niche.layout.size),
283 scalar,
284 niche_llty,
285 );
286 OperandValue::Immediate(niche_llval).store(bx, niche);
287 }
288 }
289 }
290 }
291
292 pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
293 &self,
294 bx: &mut Bx,
295 llindex: V,
296 ) -> Self {
297 let layout = self.layout.field(bx, 0);
300 let offset = if let Some(llindex) = bx.const_to_opt_uint(llindex) {
301 layout.size.checked_mul(llindex, bx).unwrap_or(layout.size)
302 } else {
303 layout.size
304 };
305
306 let llval = bx.inbounds_nuw_gep(bx.cx().backend_type(layout), self.val.llval, &[llindex]);
307 let align = self.val.align.restrict_for_offset(offset);
308 PlaceValue::new_sized(llval, align).with_type(layout)
309 }
310
311 pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
312 &self,
313 bx: &mut Bx,
314 variant_index: VariantIdx,
315 ) -> Self {
316 let mut downcast = *self;
317 downcast.layout = self.layout.for_variant(bx.cx(), variant_index);
318 downcast
319 }
320
321 pub fn project_type<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
322 &self,
323 bx: &mut Bx,
324 ty: Ty<'tcx>,
325 ) -> Self {
326 let mut downcast = *self;
327 downcast.layout = bx.cx().layout_of(ty);
328 downcast
329 }
330
331 pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
332 bx.lifetime_start(self.val.llval, self.layout.size);
333 }
334
335 pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
336 bx.lifetime_end(self.val.llval, self.layout.size);
337 }
338}
339
340impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
341 #[instrument(level = "trace", skip(self, bx))]
342 pub fn codegen_place(
343 &mut self,
344 bx: &mut Bx,
345 place_ref: mir::PlaceRef<'tcx>,
346 ) -> PlaceRef<'tcx, Bx::Value> {
347 let cx = self.cx;
348 let tcx = self.cx.tcx();
349
350 let mut base = 0;
351 let mut cg_base = match self.locals[place_ref.local] {
352 LocalRef::Place(place) => place,
353 LocalRef::UnsizedPlace(place) => bx.load_operand(place).deref(cx),
354 LocalRef::Operand(..) => {
355 if place_ref.is_indirect_first_projection() {
356 base = 1;
357 let cg_base = self.codegen_consume(
358 bx,
359 mir::PlaceRef { projection: &place_ref.projection[..0], ..place_ref },
360 );
361 cg_base.deref(bx.cx())
362 } else {
363 bug!("using operand local {:?} as place", place_ref);
364 }
365 }
366 LocalRef::PendingOperand => {
367 bug!("using still-pending operand local {:?} as place", place_ref);
368 }
369 };
370 for elem in place_ref.projection[base..].iter() {
371 cg_base = match *elem {
372 mir::ProjectionElem::Deref => bx.load_operand(cg_base).deref(bx.cx()),
373 mir::ProjectionElem::Field(ref field, _) => {
374 assert!(
375 !cg_base.layout.ty.is_any_ptr(),
376 "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
377 but tried to access field {field:?} of pointer {cg_base:?}",
378 );
379 cg_base.project_field(bx, field.index())
380 }
381 mir::ProjectionElem::OpaqueCast(ty) => {
382 bug!("encountered OpaqueCast({ty}) in codegen")
383 }
384 mir::ProjectionElem::Subtype(ty) => cg_base.project_type(bx, self.monomorphize(ty)),
385 mir::ProjectionElem::UnwrapUnsafeBinder(ty) => {
386 cg_base.project_type(bx, self.monomorphize(ty))
387 }
388 mir::ProjectionElem::Index(index) => {
389 let index = &mir::Operand::Copy(mir::Place::from(index));
390 let index = self.codegen_operand(bx, index);
391 let llindex = index.immediate();
392 cg_base.project_index(bx, llindex)
393 }
394 mir::ProjectionElem::ConstantIndex { offset, from_end: false, min_length: _ } => {
395 let lloffset = bx.cx().const_usize(offset);
396 cg_base.project_index(bx, lloffset)
397 }
398 mir::ProjectionElem::ConstantIndex { offset, from_end: true, min_length: _ } => {
399 let lloffset = bx.cx().const_usize(offset);
400 let lllen = cg_base.len(bx.cx());
401 let llindex = bx.sub(lllen, lloffset);
402 cg_base.project_index(bx, llindex)
403 }
404 mir::ProjectionElem::Subslice { from, to, from_end } => {
405 let mut subslice = cg_base.project_index(bx, bx.cx().const_usize(from));
406 let projected_ty =
407 PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, *elem).ty;
408 subslice.layout = bx.cx().layout_of(self.monomorphize(projected_ty));
409
410 if subslice.layout.is_unsized() {
411 assert!(from_end, "slice subslices should be `from_end`");
412 subslice.val.llextra = Some(
413 bx.sub(cg_base.val.llextra.unwrap(), bx.cx().const_usize(from + to)),
414 );
415 }
416
417 subslice
418 }
419 mir::ProjectionElem::Downcast(_, v) => cg_base.project_downcast(bx, v),
420 };
421 }
422 debug!("codegen_place(place={:?}) => {:?}", place_ref, cg_base);
423 cg_base
424 }
425
426 pub fn monomorphized_place_ty(&self, place_ref: mir::PlaceRef<'tcx>) -> Ty<'tcx> {
427 let tcx = self.cx.tcx();
428 let place_ty = place_ref.ty(self.mir, tcx);
429 self.monomorphize(place_ty.ty)
430 }
431}
432
433fn round_up_const_value_to_alignment<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
434 bx: &mut Bx,
435 value: Bx::Value,
436 align: Bx::Value,
437) -> Bx::Value {
438 let one = bx.const_usize(1);
469 let align_minus_1 = bx.sub(align, one);
470 let neg_value = bx.neg(value);
471 let offset = bx.and(neg_value, align_minus_1);
472 bx.add(value, offset)
473}