1use std::fmt;
2
3use itertools::Either;
4use rustc_abi as abi;
5use rustc_abi::{
6 Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, VariantIdx, Variants,
7};
8use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
9use rustc_middle::mir::{self, ConstValue};
10use rustc_middle::ty::Ty;
11use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
12use rustc_middle::{bug, span_bug};
13use rustc_session::config::OptLevel;
14use tracing::{debug, instrument};
15
16use super::place::{PlaceRef, PlaceValue};
17use super::rvalue::transmute_scalar;
18use super::{FunctionCx, LocalRef};
19use crate::MemFlags;
20use crate::common::IntPredicate;
21use crate::traits::*;
22
23#[derive(Copy, Clone, Debug)]
27pub enum OperandValue<V> {
28 Ref(PlaceValue<V>),
41 Immediate(V),
48 Pair(V, V),
62 ZeroSized,
71}
72
73impl<V: CodegenObject> OperandValue<V> {
74 pub(crate) fn pointer_parts(self) -> (V, Option<V>) {
79 match self {
80 OperandValue::Immediate(llptr) => (llptr, None),
81 OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
82 _ => bug!("OperandValue cannot be a pointer: {self:?}"),
83 }
84 }
85
86 pub(crate) fn deref(self, align: Align) -> PlaceValue<V> {
94 let (llval, llextra) = self.pointer_parts();
95 PlaceValue { llval, llextra, align }
96 }
97
98 pub(crate) fn is_expected_variant_for_type<'tcx, Cx: LayoutTypeCodegenMethods<'tcx>>(
99 &self,
100 cx: &Cx,
101 ty: TyAndLayout<'tcx>,
102 ) -> bool {
103 match self {
104 OperandValue::ZeroSized => ty.is_zst(),
105 OperandValue::Immediate(_) => cx.is_backend_immediate(ty),
106 OperandValue::Pair(_, _) => cx.is_backend_scalar_pair(ty),
107 OperandValue::Ref(_) => cx.is_backend_ref(ty),
108 }
109 }
110}
111
112#[derive(Copy, Clone)]
121pub struct OperandRef<'tcx, V> {
122 pub val: OperandValue<V>,
124
125 pub layout: TyAndLayout<'tcx>,
127}
128
129impl<V: CodegenObject> fmt::Debug for OperandRef<'_, V> {
130 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
131 write!(f, "OperandRef({:?} @ {:?})", self.val, self.layout)
132 }
133}
134
135impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
136 pub fn zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V> {
137 assert!(layout.is_zst());
138 OperandRef { val: OperandValue::ZeroSized, layout }
139 }
140
141 pub(crate) fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
142 bx: &mut Bx,
143 val: mir::ConstValue,
144 ty: Ty<'tcx>,
145 ) -> Self {
146 let layout = bx.layout_of(ty);
147
148 let val = match val {
149 ConstValue::Scalar(x) => {
150 let BackendRepr::Scalar(scalar) = layout.backend_repr else {
151 bug!("from_const: invalid ByVal layout: {:#?}", layout);
152 };
153 let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
154 OperandValue::Immediate(llval)
155 }
156 ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
157 ConstValue::Slice { alloc_id, meta } => {
158 let BackendRepr::ScalarPair(a_scalar, _) = layout.backend_repr else {
159 bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
160 };
161 let a = Scalar::from_pointer(Pointer::new(alloc_id.into(), Size::ZERO), &bx.tcx());
162 let a_llval = bx.scalar_to_backend(
163 a,
164 a_scalar,
165 bx.scalar_pair_element_backend_type(layout, 0, true),
166 );
167 let b_llval = bx.const_usize(meta);
168 OperandValue::Pair(a_llval, b_llval)
169 }
170 ConstValue::Indirect { alloc_id, offset } => {
171 let alloc = bx.tcx().global_alloc(alloc_id).unwrap_memory();
172 return Self::from_const_alloc(bx, layout, alloc, offset);
173 }
174 };
175
176 OperandRef { val, layout }
177 }
178
179 fn from_const_alloc<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
180 bx: &mut Bx,
181 layout: TyAndLayout<'tcx>,
182 alloc: rustc_middle::mir::interpret::ConstAllocation<'tcx>,
183 offset: Size,
184 ) -> Self {
185 let alloc_align = alloc.inner().align;
186 assert!(alloc_align >= layout.align.abi, "{alloc_align:?} < {:?}", layout.align.abi);
187
188 let read_scalar = |start, size, s: abi::Scalar, ty| {
189 match alloc.0.read_scalar(
190 bx,
191 alloc_range(start, size),
192 matches!(s.primitive(), abi::Primitive::Pointer(_)),
193 ) {
194 Ok(val) => bx.scalar_to_backend(val, s, ty),
195 Err(_) => bx.const_poison(ty),
196 }
197 };
198
199 match layout.backend_repr {
206 BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
207 let size = s.size(bx);
208 assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
209 let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
210 OperandRef { val: OperandValue::Immediate(val), layout }
211 }
212 BackendRepr::ScalarPair(
213 a @ abi::Scalar::Initialized { .. },
214 b @ abi::Scalar::Initialized { .. },
215 ) => {
216 let (a_size, b_size) = (a.size(bx), b.size(bx));
217 let b_offset = (offset + a_size).align_to(b.align(bx).abi);
218 assert!(b_offset.bytes() > 0);
219 let a_val = read_scalar(
220 offset,
221 a_size,
222 a,
223 bx.scalar_pair_element_backend_type(layout, 0, true),
224 );
225 let b_val = read_scalar(
226 b_offset,
227 b_size,
228 b,
229 bx.scalar_pair_element_backend_type(layout, 1, true),
230 );
231 OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
232 }
233 _ if layout.is_zst() => OperandRef::zero_sized(layout),
234 _ => {
235 let init = bx.const_data_from_alloc(alloc);
239 let base_addr = bx.static_addr_of(init, alloc_align, None);
240
241 let llval = bx.const_ptr_byte_offset(base_addr, offset);
242 bx.load_operand(PlaceRef::new_sized(llval, layout))
243 }
244 }
245 }
246
247 pub fn immediate(self) -> V {
250 match self.val {
251 OperandValue::Immediate(s) => s,
252 _ => bug!("not immediate: {:?}", self),
253 }
254 }
255
256 pub fn deref<Cx: CodegenMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
266 if self.layout.ty.is_box() {
267 bug!("dereferencing {:?} in codegen", self.layout.ty);
269 }
270
271 let projected_ty = self
272 .layout
273 .ty
274 .builtin_deref(true)
275 .unwrap_or_else(|| bug!("deref of non-pointer {:?}", self));
276
277 let layout = cx.layout_of(projected_ty);
278 self.val.deref(layout.align.abi).with_type(layout)
279 }
280
281 pub fn immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
284 self,
285 bx: &mut Bx,
286 ) -> V {
287 if let OperandValue::Pair(a, b) = self.val {
288 let llty = bx.cx().immediate_backend_type(self.layout);
289 debug!("Operand::immediate_or_packed_pair: packing {:?} into {:?}", self, llty);
290 let mut llpair = bx.cx().const_poison(llty);
292 llpair = bx.insert_value(llpair, a, 0);
293 llpair = bx.insert_value(llpair, b, 1);
294 llpair
295 } else {
296 self.immediate()
297 }
298 }
299
300 pub fn from_immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
302 bx: &mut Bx,
303 llval: V,
304 layout: TyAndLayout<'tcx>,
305 ) -> Self {
306 let val = if let BackendRepr::ScalarPair(..) = layout.backend_repr {
307 debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval, layout);
308
309 let a_llval = bx.extract_value(llval, 0);
311 let b_llval = bx.extract_value(llval, 1);
312 OperandValue::Pair(a_llval, b_llval)
313 } else {
314 OperandValue::Immediate(llval)
315 };
316 OperandRef { val, layout }
317 }
318
319 pub(crate) fn extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
320 &self,
321 fx: &mut FunctionCx<'a, 'tcx, Bx>,
322 bx: &mut Bx,
323 i: usize,
324 ) -> Self {
325 let field = self.layout.field(bx.cx(), i);
326 let offset = self.layout.fields.offset(i);
327
328 if !bx.is_backend_ref(self.layout) && bx.is_backend_ref(field) {
329 span_bug!(
331 fx.mir.span,
332 "Non-ref type {self:?} cannot project to ref field type {field:?}",
333 );
334 }
335
336 let val = if field.is_zst() {
337 OperandValue::ZeroSized
338 } else if field.size == self.layout.size {
339 assert_eq!(offset.bytes(), 0);
340 fx.codegen_transmute_operand(bx, *self, field)
341 } else {
342 let (in_scalar, imm) = match (self.val, self.layout.backend_repr) {
343 (OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => {
345 if offset.bytes() == 0 {
346 assert_eq!(field.size, a.size(bx.cx()));
347 (Some(a), a_llval)
348 } else {
349 assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi));
350 assert_eq!(field.size, b.size(bx.cx()));
351 (Some(b), b_llval)
352 }
353 }
354
355 _ => {
356 span_bug!(fx.mir.span, "OperandRef::extract_field({:?}): not applicable", self)
357 }
358 };
359 OperandValue::Immediate(match field.backend_repr {
360 BackendRepr::SimdVector { .. } => imm,
361 BackendRepr::Scalar(out_scalar) => {
362 let Some(in_scalar) = in_scalar else {
363 span_bug!(
364 fx.mir.span,
365 "OperandRef::extract_field({:?}): missing input scalar for output scalar",
366 self
367 )
368 };
369 if in_scalar != out_scalar {
370 let backend = bx.from_immediate(imm);
375 bx.to_immediate_scalar(backend, out_scalar)
376 } else {
377 imm
378 }
379 }
380 BackendRepr::ScalarPair(_, _) | BackendRepr::Memory { .. } => bug!(),
381 })
382 };
383
384 OperandRef { val, layout: field }
385 }
386
387 #[instrument(level = "trace", skip(fx, bx))]
389 pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
390 self,
391 fx: &mut FunctionCx<'a, 'tcx, Bx>,
392 bx: &mut Bx,
393 cast_to: Ty<'tcx>,
394 ) -> V {
395 let dl = &bx.tcx().data_layout;
396 let cast_to_layout = bx.cx().layout_of(cast_to);
397 let cast_to = bx.cx().immediate_backend_type(cast_to_layout);
398
399 if self.layout.is_uninhabited() {
403 return bx.cx().const_poison(cast_to);
404 }
405
406 let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants {
407 Variants::Empty => unreachable!("we already handled uninhabited types"),
408 Variants::Single { index } => {
409 let discr_val =
410 if let Some(discr) = self.layout.ty.discriminant_for_variant(bx.tcx(), index) {
411 discr.val
412 } else {
413 assert_eq!(index, FIRST_VARIANT);
416 0
419 };
420 return bx.cx().const_uint_big(cast_to, discr_val);
421 }
422 Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => {
423 (tag, tag_encoding, tag_field)
424 }
425 };
426
427 let tag_op = match self.val {
429 OperandValue::ZeroSized => bug!(),
430 OperandValue::Immediate(_) | OperandValue::Pair(_, _) => {
431 self.extract_field(fx, bx, tag_field.as_usize())
432 }
433 OperandValue::Ref(place) => {
434 let tag = place.with_type(self.layout).project_field(bx, tag_field.as_usize());
435 bx.load_operand(tag)
436 }
437 };
438 let tag_imm = tag_op.immediate();
439
440 match *tag_encoding {
442 TagEncoding::Direct => {
443 let signed = match tag_scalar.primitive() {
444 Primitive::Int(_, signed) => !tag_scalar.is_bool() && signed,
449 _ => false,
450 };
451 bx.intcast(tag_imm, cast_to, signed)
452 }
453 TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start } => {
454 let (tag, tag_llty) = match tag_scalar.primitive() {
457 Primitive::Pointer(_) => {
459 let t = bx.type_from_integer(dl.ptr_sized_integer());
460 let tag = bx.ptrtoint(tag_imm, t);
461 (tag, t)
462 }
463 _ => (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)),
464 };
465
466 let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
470 let niche_start_const = bx.cx().const_uint_big(tag_llty, niche_start);
471
472 let (is_niche, tagged_discr, delta) = if relative_max == 0 {
487 let is_niche = bx.icmp(IntPredicate::IntEQ, tag, niche_start_const);
497 let tagged_discr =
498 bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64);
499 (is_niche, tagged_discr, 0)
500 } else {
501 let tag_range = tag_scalar.valid_range(&dl);
549 let tag_size = tag_scalar.size(&dl);
550 let niche_end = u128::from(relative_max).wrapping_add(niche_start);
551 let niche_end = tag_size.truncate(niche_end);
552
553 let relative_discr = bx.sub(tag, niche_start_const);
554 let cast_tag = bx.intcast(relative_discr, cast_to, false);
555 let is_niche = if tag_range.no_unsigned_wraparound(tag_size) == Ok(true) {
556 if niche_start == tag_range.start {
557 let niche_end_const = bx.cx().const_uint_big(tag_llty, niche_end);
558 bx.icmp(IntPredicate::IntULE, tag, niche_end_const)
559 } else {
560 assert_eq!(niche_end, tag_range.end);
561 bx.icmp(IntPredicate::IntUGE, tag, niche_start_const)
562 }
563 } else if tag_range.no_signed_wraparound(tag_size) == Ok(true) {
564 if niche_start == tag_range.start {
565 let niche_end_const = bx.cx().const_uint_big(tag_llty, niche_end);
566 bx.icmp(IntPredicate::IntSLE, tag, niche_end_const)
567 } else {
568 assert_eq!(niche_end, tag_range.end);
569 bx.icmp(IntPredicate::IntSGE, tag, niche_start_const)
570 }
571 } else {
572 bx.icmp(
573 IntPredicate::IntULE,
574 relative_discr,
575 bx.cx().const_uint(tag_llty, relative_max as u64),
576 )
577 };
578
579 (is_niche, cast_tag, niche_variants.start().as_u32() as u128)
580 };
581
582 let tagged_discr = if delta == 0 {
583 tagged_discr
584 } else {
585 bx.add(tagged_discr, bx.cx().const_uint_big(cast_to, delta))
586 };
587
588 let untagged_variant_const =
589 bx.cx().const_uint(cast_to, u64::from(untagged_variant.as_u32()));
590
591 if niche_variants.contains(&untagged_variant)
599 && bx.cx().sess().opts.optimize != OptLevel::No
600 {
601 let ne = bx.icmp(IntPredicate::IntNE, tagged_discr, untagged_variant_const);
602 bx.assume(ne);
603 }
604
605 let discr = bx.select(is_niche, tagged_discr, untagged_variant_const);
606
607 discr
613 }
614 }
615 }
616}
617
618#[derive(Debug, Copy, Clone)]
621enum OperandValueBuilder<V> {
622 ZeroSized,
623 Immediate(Either<V, abi::Scalar>),
624 Pair(Either<V, abi::Scalar>, Either<V, abi::Scalar>),
625 Vector(Either<V, ()>),
630}
631
632#[derive(Debug, Copy, Clone)]
634pub(super) struct OperandRefBuilder<'tcx, V> {
635 val: OperandValueBuilder<V>,
636 layout: TyAndLayout<'tcx>,
637}
638
639impl<'a, 'tcx, V: CodegenObject> OperandRefBuilder<'tcx, V> {
640 pub(super) fn new(layout: TyAndLayout<'tcx>) -> Self {
646 let val = match layout.backend_repr {
647 BackendRepr::Memory { .. } if layout.is_zst() => OperandValueBuilder::ZeroSized,
648 BackendRepr::Scalar(s) => OperandValueBuilder::Immediate(Either::Right(s)),
649 BackendRepr::ScalarPair(a, b) => {
650 OperandValueBuilder::Pair(Either::Right(a), Either::Right(b))
651 }
652 BackendRepr::SimdVector { .. } => OperandValueBuilder::Vector(Either::Right(())),
653 BackendRepr::Memory { .. } => {
654 bug!("Cannot use non-ZST Memory-ABI type in operand builder: {layout:?}");
655 }
656 };
657 OperandRefBuilder { val, layout }
658 }
659
660 pub(super) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
661 &mut self,
662 bx: &mut Bx,
663 variant: VariantIdx,
664 field: FieldIdx,
665 field_operand: OperandRef<'tcx, V>,
666 ) {
667 if let OperandValue::ZeroSized = field_operand.val {
668 return;
674 }
675
676 let is_zero_offset = if let abi::FieldsShape::Primitive = self.layout.fields {
677 assert!(!self.layout.is_zst());
682 assert_eq!(variant, FIRST_VARIANT);
683 assert_eq!(field, FieldIdx::ZERO);
684 true
685 } else {
686 let variant_layout = self.layout.for_variant(bx.cx(), variant);
687 let field_offset = variant_layout.fields.offset(field.as_usize());
688 field_offset == Size::ZERO
689 };
690
691 let mut update = |tgt: &mut Either<V, abi::Scalar>, src, from_scalar| {
692 let to_scalar = tgt.unwrap_right();
693 let imm = transmute_scalar(bx, src, from_scalar, to_scalar);
697 *tgt = Either::Left(imm);
698 };
699
700 match (field_operand.val, field_operand.layout.backend_repr) {
701 (OperandValue::ZeroSized, _) => unreachable!("Handled above"),
702 (OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val {
703 OperandValueBuilder::Immediate(val @ Either::Right(_)) if is_zero_offset => {
704 update(val, v, from_scalar);
705 }
706 OperandValueBuilder::Pair(fst @ Either::Right(_), _) if is_zero_offset => {
707 update(fst, v, from_scalar);
708 }
709 OperandValueBuilder::Pair(_, snd @ Either::Right(_)) if !is_zero_offset => {
710 update(snd, v, from_scalar);
711 }
712 _ => {
713 bug!("Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}")
714 }
715 },
716 (OperandValue::Immediate(v), BackendRepr::SimdVector { .. }) => match &mut self.val {
717 OperandValueBuilder::Vector(val @ Either::Right(())) if is_zero_offset => {
718 *val = Either::Left(v);
719 }
720 _ => {
721 bug!("Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}")
722 }
723 },
724 (OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => {
725 match &mut self.val {
726 OperandValueBuilder::Pair(fst @ Either::Right(_), snd @ Either::Right(_)) => {
727 update(fst, a, from_sa);
728 update(snd, b, from_sb);
729 }
730 _ => bug!(
731 "Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}"
732 ),
733 }
734 }
735 (OperandValue::Ref(place), BackendRepr::Memory { .. }) => match &mut self.val {
736 OperandValueBuilder::Vector(val @ Either::Right(())) => {
737 let ibty = bx.cx().immediate_backend_type(self.layout);
738 let simd = bx.load_from_place(ibty, place);
739 *val = Either::Left(simd);
740 }
741 _ => {
742 bug!("Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}")
743 }
744 },
745 _ => bug!("Operand cannot be used with `insert_field`: {field_operand:?}"),
746 }
747 }
748
749 pub(super) fn insert_imm(&mut self, f: FieldIdx, imm: V) {
755 let field_offset = self.layout.fields.offset(f.as_usize());
756 let is_zero_offset = field_offset == Size::ZERO;
757 match &mut self.val {
758 OperandValueBuilder::Immediate(val @ Either::Right(_)) if is_zero_offset => {
759 *val = Either::Left(imm);
760 }
761 OperandValueBuilder::Pair(fst @ Either::Right(_), _) if is_zero_offset => {
762 *fst = Either::Left(imm);
763 }
764 OperandValueBuilder::Pair(_, snd @ Either::Right(_)) if !is_zero_offset => {
765 *snd = Either::Left(imm);
766 }
767 _ => bug!("Tried to insert {imm:?} into field {f:?} of {self:?}"),
768 }
769 }
770
771 pub(super) fn build(&self, cx: &impl CodegenMethods<'tcx, Value = V>) -> OperandRef<'tcx, V> {
776 let OperandRefBuilder { val, layout } = *self;
777
778 let unwrap = |r: Either<V, abi::Scalar>| match r {
783 Either::Left(v) => v,
784 Either::Right(s) if s.is_uninit_valid() => {
785 let bty = cx.type_from_scalar(s);
786 cx.const_undef(bty)
787 }
788 Either::Right(_) => bug!("OperandRef::build called while fields are missing {self:?}"),
789 };
790
791 let val = match val {
792 OperandValueBuilder::ZeroSized => OperandValue::ZeroSized,
793 OperandValueBuilder::Immediate(v) => OperandValue::Immediate(unwrap(v)),
794 OperandValueBuilder::Pair(a, b) => OperandValue::Pair(unwrap(a), unwrap(b)),
795 OperandValueBuilder::Vector(v) => match v {
796 Either::Left(v) => OperandValue::Immediate(v),
797 Either::Right(())
798 if let BackendRepr::SimdVector { element, .. } = layout.backend_repr
799 && element.is_uninit_valid() =>
800 {
801 let bty = cx.immediate_backend_type(layout);
802 OperandValue::Immediate(cx.const_undef(bty))
803 }
804 Either::Right(()) => {
805 bug!("OperandRef::build called while fields are missing {self:?}")
806 }
807 },
808 };
809 OperandRef { val, layout }
810 }
811}
812
813impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
814 pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
821 bx: &mut Bx,
822 layout: TyAndLayout<'tcx>,
823 ) -> OperandValue<V> {
824 assert!(layout.is_sized());
825 if layout.is_zst() {
826 OperandValue::ZeroSized
827 } else if bx.cx().is_backend_immediate(layout) {
828 let ibty = bx.cx().immediate_backend_type(layout);
829 OperandValue::Immediate(bx.const_poison(ibty))
830 } else if bx.cx().is_backend_scalar_pair(layout) {
831 let ibty0 = bx.cx().scalar_pair_element_backend_type(layout, 0, true);
832 let ibty1 = bx.cx().scalar_pair_element_backend_type(layout, 1, true);
833 OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
834 } else {
835 let ptr = bx.cx().type_ptr();
836 OperandValue::Ref(PlaceValue::new_sized(bx.const_poison(ptr), layout.align.abi))
837 }
838 }
839
840 pub fn store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
841 self,
842 bx: &mut Bx,
843 dest: PlaceRef<'tcx, V>,
844 ) {
845 self.store_with_flags(bx, dest, MemFlags::empty());
846 }
847
848 pub fn volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
849 self,
850 bx: &mut Bx,
851 dest: PlaceRef<'tcx, V>,
852 ) {
853 self.store_with_flags(bx, dest, MemFlags::VOLATILE);
854 }
855
856 pub fn unaligned_volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
857 self,
858 bx: &mut Bx,
859 dest: PlaceRef<'tcx, V>,
860 ) {
861 self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
862 }
863
864 pub fn nontemporal_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
865 self,
866 bx: &mut Bx,
867 dest: PlaceRef<'tcx, V>,
868 ) {
869 self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
870 }
871
872 pub(crate) fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
873 self,
874 bx: &mut Bx,
875 dest: PlaceRef<'tcx, V>,
876 flags: MemFlags,
877 ) {
878 debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
879 match self {
880 OperandValue::ZeroSized => {
881 }
884 OperandValue::Ref(val) => {
885 assert!(dest.layout.is_sized(), "cannot directly store unsized values");
886 if val.llextra.is_some() {
887 bug!("cannot directly store unsized values");
888 }
889 bx.typed_place_copy_with_flags(dest.val, val, dest.layout, flags);
890 }
891 OperandValue::Immediate(s) => {
892 let val = bx.from_immediate(s);
893 bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
894 }
895 OperandValue::Pair(a, b) => {
896 let BackendRepr::ScalarPair(a_scalar, b_scalar) = dest.layout.backend_repr else {
897 bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout);
898 };
899 let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
900
901 let val = bx.from_immediate(a);
902 let align = dest.val.align;
903 bx.store_with_flags(val, dest.val.llval, align, flags);
904
905 let llptr = bx.inbounds_ptradd(dest.val.llval, bx.const_usize(b_offset.bytes()));
906 let val = bx.from_immediate(b);
907 let align = dest.val.align.restrict_for_offset(b_offset);
908 bx.store_with_flags(val, llptr, align, flags);
909 }
910 }
911 }
912}
913
914impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
915 fn maybe_codegen_consume_direct(
916 &mut self,
917 bx: &mut Bx,
918 place_ref: mir::PlaceRef<'tcx>,
919 ) -> Option<OperandRef<'tcx, Bx::Value>> {
920 debug!("maybe_codegen_consume_direct(place_ref={:?})", place_ref);
921
922 match self.locals[place_ref.local] {
923 LocalRef::Operand(mut o) => {
924 for elem in place_ref.projection {
927 match *elem {
928 mir::ProjectionElem::Field(f, _) => {
929 assert!(
930 !o.layout.ty.is_any_ptr(),
931 "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
932 but tried to access field {f:?} of pointer {o:?}",
933 );
934 o = o.extract_field(self, bx, f.index());
935 }
936 mir::PlaceElem::Downcast(_, vidx) => {
937 debug_assert_eq!(
938 o.layout.variants,
939 abi::Variants::Single { index: vidx },
940 );
941 let layout = o.layout.for_variant(bx.cx(), vidx);
942 o = OperandRef { val: o.val, layout }
943 }
944 mir::PlaceElem::Subtype(subtype_ty) => {
945 let subtype_ty = self.monomorphize(subtype_ty);
946 let layout = self.cx.layout_of(subtype_ty);
947 o = OperandRef { val: o.val, layout }
948 }
949 _ => return None,
950 }
951 }
952
953 Some(o)
954 }
955 LocalRef::PendingOperand => {
956 bug!("use of {:?} before def", place_ref);
957 }
958 LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
959 None
962 }
963 }
964 }
965
966 pub fn codegen_consume(
967 &mut self,
968 bx: &mut Bx,
969 place_ref: mir::PlaceRef<'tcx>,
970 ) -> OperandRef<'tcx, Bx::Value> {
971 debug!("codegen_consume(place_ref={:?})", place_ref);
972
973 let ty = self.monomorphized_place_ty(place_ref);
974 let layout = bx.cx().layout_of(ty);
975
976 if layout.is_zst() {
978 return OperandRef::zero_sized(layout);
979 }
980
981 if let Some(o) = self.maybe_codegen_consume_direct(bx, place_ref) {
982 return o;
983 }
984
985 let place = self.codegen_place(bx, place_ref);
988 bx.load_operand(place)
989 }
990
991 pub fn codegen_operand(
992 &mut self,
993 bx: &mut Bx,
994 operand: &mir::Operand<'tcx>,
995 ) -> OperandRef<'tcx, Bx::Value> {
996 debug!("codegen_operand(operand={:?})", operand);
997
998 match *operand {
999 mir::Operand::Copy(ref place) | mir::Operand::Move(ref place) => {
1000 self.codegen_consume(bx, place.as_ref())
1001 }
1002
1003 mir::Operand::Constant(ref constant) => {
1004 let constant_ty = self.monomorphize(constant.ty());
1005 if constant_ty.is_simd() {
1008 let layout = bx.layout_of(constant_ty);
1011 if let BackendRepr::SimdVector { .. } = layout.backend_repr {
1012 let (llval, ty) = self.immediate_const_vector(bx, constant);
1013 return OperandRef {
1014 val: OperandValue::Immediate(llval),
1015 layout: bx.layout_of(ty),
1016 };
1017 }
1018 }
1019 self.eval_mir_constant_to_operand(bx, constant)
1020 }
1021 }
1022 }
1023}