1use std::fmt;
2
3use arrayvec::ArrayVec;
4use either::Either;
5use rustc_abi as abi;
6use rustc_abi::{Align, BackendRepr, FIRST_VARIANT, Primitive, Size, TagEncoding, Variants};
7use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
8use rustc_middle::mir::{self, ConstValue};
9use rustc_middle::ty::Ty;
10use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
11use rustc_middle::{bug, span_bug};
12use tracing::{debug, instrument};
13
14use super::place::{PlaceRef, PlaceValue};
15use super::{FunctionCx, LocalRef};
16use crate::common::IntPredicate;
17use crate::traits::*;
18use crate::{MemFlags, size_of_val};
19
20#[derive(Copy, Clone, Debug)]
24pub enum OperandValue<V> {
25 Ref(PlaceValue<V>),
38 Immediate(V),
45 Pair(V, V),
53 ZeroSized,
62}
63
64impl<V: CodegenObject> OperandValue<V> {
65 #[inline]
68 pub(crate) fn immediates_or_place(self) -> Either<ArrayVec<V, 2>, PlaceValue<V>> {
69 match self {
70 OperandValue::ZeroSized => Either::Left(ArrayVec::new()),
71 OperandValue::Immediate(a) => Either::Left(ArrayVec::from_iter([a])),
72 OperandValue::Pair(a, b) => Either::Left([a, b].into()),
73 OperandValue::Ref(p) => Either::Right(p),
74 }
75 }
76
77 #[inline]
79 pub(crate) fn from_immediates(immediates: ArrayVec<V, 2>) -> Self {
80 let mut it = immediates.into_iter();
81 let Some(a) = it.next() else {
82 return OperandValue::ZeroSized;
83 };
84 let Some(b) = it.next() else {
85 return OperandValue::Immediate(a);
86 };
87 OperandValue::Pair(a, b)
88 }
89
90 pub(crate) fn pointer_parts(self) -> (V, Option<V>) {
95 match self {
96 OperandValue::Immediate(llptr) => (llptr, None),
97 OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
98 _ => bug!("OperandValue cannot be a pointer: {self:?}"),
99 }
100 }
101
102 pub(crate) fn deref(self, align: Align) -> PlaceValue<V> {
110 let (llval, llextra) = self.pointer_parts();
111 PlaceValue { llval, llextra, align }
112 }
113
114 pub(crate) fn is_expected_variant_for_type<'tcx, Cx: LayoutTypeCodegenMethods<'tcx>>(
115 &self,
116 cx: &Cx,
117 ty: TyAndLayout<'tcx>,
118 ) -> bool {
119 match self {
120 OperandValue::ZeroSized => ty.is_zst(),
121 OperandValue::Immediate(_) => cx.is_backend_immediate(ty),
122 OperandValue::Pair(_, _) => cx.is_backend_scalar_pair(ty),
123 OperandValue::Ref(_) => cx.is_backend_ref(ty),
124 }
125 }
126}
127
128#[derive(Copy, Clone)]
137pub struct OperandRef<'tcx, V> {
138 pub val: OperandValue<V>,
140
141 pub layout: TyAndLayout<'tcx>,
143}
144
145impl<V: CodegenObject> fmt::Debug for OperandRef<'_, V> {
146 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
147 write!(f, "OperandRef({:?} @ {:?})", self.val, self.layout)
148 }
149}
150
151impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
152 pub fn zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V> {
153 assert!(layout.is_zst());
154 OperandRef { val: OperandValue::ZeroSized, layout }
155 }
156
157 pub(crate) fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
158 bx: &mut Bx,
159 val: mir::ConstValue<'tcx>,
160 ty: Ty<'tcx>,
161 ) -> Self {
162 let layout = bx.layout_of(ty);
163
164 let val = match val {
165 ConstValue::Scalar(x) => {
166 let BackendRepr::Scalar(scalar) = layout.backend_repr else {
167 bug!("from_const: invalid ByVal layout: {:#?}", layout);
168 };
169 let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
170 OperandValue::Immediate(llval)
171 }
172 ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
173 ConstValue::Slice { data, meta } => {
174 let BackendRepr::ScalarPair(a_scalar, _) = layout.backend_repr else {
175 bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
176 };
177 let a = Scalar::from_pointer(
178 Pointer::new(bx.tcx().reserve_and_set_memory_alloc(data).into(), Size::ZERO),
179 &bx.tcx(),
180 );
181 let a_llval = bx.scalar_to_backend(
182 a,
183 a_scalar,
184 bx.scalar_pair_element_backend_type(layout, 0, true),
185 );
186 let b_llval = bx.const_usize(meta);
187 OperandValue::Pair(a_llval, b_llval)
188 }
189 ConstValue::Indirect { alloc_id, offset } => {
190 let alloc = bx.tcx().global_alloc(alloc_id).unwrap_memory();
191 return Self::from_const_alloc(bx, layout, alloc, offset);
192 }
193 };
194
195 OperandRef { val, layout }
196 }
197
198 fn from_const_alloc<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
199 bx: &mut Bx,
200 layout: TyAndLayout<'tcx>,
201 alloc: rustc_middle::mir::interpret::ConstAllocation<'tcx>,
202 offset: Size,
203 ) -> Self {
204 let alloc_align = alloc.inner().align;
205 assert!(alloc_align >= layout.align.abi);
206
207 let read_scalar = |start, size, s: abi::Scalar, ty| {
208 match alloc.0.read_scalar(
209 bx,
210 alloc_range(start, size),
211 matches!(s.primitive(), abi::Primitive::Pointer(_)),
212 ) {
213 Ok(val) => bx.scalar_to_backend(val, s, ty),
214 Err(_) => bx.const_poison(ty),
215 }
216 };
217
218 match layout.backend_repr {
225 BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
226 let size = s.size(bx);
227 assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
228 let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
229 OperandRef { val: OperandValue::Immediate(val), layout }
230 }
231 BackendRepr::ScalarPair(
232 a @ abi::Scalar::Initialized { .. },
233 b @ abi::Scalar::Initialized { .. },
234 ) => {
235 let (a_size, b_size) = (a.size(bx), b.size(bx));
236 let b_offset = (offset + a_size).align_to(b.align(bx).abi);
237 assert!(b_offset.bytes() > 0);
238 let a_val = read_scalar(
239 offset,
240 a_size,
241 a,
242 bx.scalar_pair_element_backend_type(layout, 0, true),
243 );
244 let b_val = read_scalar(
245 b_offset,
246 b_size,
247 b,
248 bx.scalar_pair_element_backend_type(layout, 1, true),
249 );
250 OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
251 }
252 _ if layout.is_zst() => OperandRef::zero_sized(layout),
253 _ => {
254 let init = bx.const_data_from_alloc(alloc);
258 let base_addr = bx.static_addr_of(init, alloc_align, None);
259
260 let llval = bx.const_ptr_byte_offset(base_addr, offset);
261 bx.load_operand(PlaceRef::new_sized(llval, layout))
262 }
263 }
264 }
265
266 pub fn immediate(self) -> V {
269 match self.val {
270 OperandValue::Immediate(s) => s,
271 _ => bug!("not immediate: {:?}", self),
272 }
273 }
274
275 pub fn deref<Cx: CodegenMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
285 if self.layout.ty.is_box() {
286 bug!("dereferencing {:?} in codegen", self.layout.ty);
288 }
289
290 let projected_ty = self
291 .layout
292 .ty
293 .builtin_deref(true)
294 .unwrap_or_else(|| bug!("deref of non-pointer {:?}", self));
295
296 let layout = cx.layout_of(projected_ty);
297 self.val.deref(layout.align.abi).with_type(layout)
298 }
299
300 pub fn immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
303 self,
304 bx: &mut Bx,
305 ) -> V {
306 if let OperandValue::Pair(a, b) = self.val {
307 let llty = bx.cx().immediate_backend_type(self.layout);
308 debug!("Operand::immediate_or_packed_pair: packing {:?} into {:?}", self, llty);
309 let mut llpair = bx.cx().const_poison(llty);
311 llpair = bx.insert_value(llpair, a, 0);
312 llpair = bx.insert_value(llpair, b, 1);
313 llpair
314 } else {
315 self.immediate()
316 }
317 }
318
319 pub fn from_immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
321 bx: &mut Bx,
322 llval: V,
323 layout: TyAndLayout<'tcx>,
324 ) -> Self {
325 let val = if let BackendRepr::ScalarPair(..) = layout.backend_repr {
326 debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval, layout);
327
328 let a_llval = bx.extract_value(llval, 0);
330 let b_llval = bx.extract_value(llval, 1);
331 OperandValue::Pair(a_llval, b_llval)
332 } else {
333 OperandValue::Immediate(llval)
334 };
335 OperandRef { val, layout }
336 }
337
338 pub(crate) fn extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
339 &self,
340 fx: &mut FunctionCx<'a, 'tcx, Bx>,
341 bx: &mut Bx,
342 i: usize,
343 ) -> Self {
344 let field = self.layout.field(bx.cx(), i);
345 let offset = self.layout.fields.offset(i);
346
347 if !bx.is_backend_ref(self.layout) && bx.is_backend_ref(field) {
348 if let BackendRepr::SimdVector { count, .. } = self.layout.backend_repr
349 && let BackendRepr::Memory { sized: true } = field.backend_repr
350 && count.is_power_of_two()
351 {
352 assert_eq!(field.size, self.layout.size);
353 let place = PlaceRef::alloca(bx, field);
356 self.val.store(bx, place.val.with_type(self.layout));
357 return bx.load_operand(place);
358 } else {
359 bug!("Non-ref type {self:?} cannot project to ref field type {field:?}");
361 }
362 }
363
364 let val = if field.is_zst() {
365 OperandValue::ZeroSized
366 } else if field.size == self.layout.size {
367 assert_eq!(offset.bytes(), 0);
368 fx.codegen_transmute_operand(bx, *self, field).unwrap_or_else(|| {
369 bug!(
370 "Expected `codegen_transmute_operand` to handle equal-size \
371 field {i:?} projection from {self:?} to {field:?}"
372 )
373 })
374 } else {
375 let (in_scalar, imm) = match (self.val, self.layout.backend_repr) {
376 (OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => {
378 if offset.bytes() == 0 {
379 assert_eq!(field.size, a.size(bx.cx()));
380 (Some(a), a_llval)
381 } else {
382 assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi));
383 assert_eq!(field.size, b.size(bx.cx()));
384 (Some(b), b_llval)
385 }
386 }
387
388 _ => {
389 span_bug!(fx.mir.span, "OperandRef::extract_field({:?}): not applicable", self)
390 }
391 };
392 OperandValue::Immediate(match field.backend_repr {
393 BackendRepr::SimdVector { .. } => imm,
394 BackendRepr::Scalar(out_scalar) => {
395 let Some(in_scalar) = in_scalar else {
396 span_bug!(
397 fx.mir.span,
398 "OperandRef::extract_field({:?}): missing input scalar for output scalar",
399 self
400 )
401 };
402 if in_scalar != out_scalar {
403 let backend = bx.from_immediate(imm);
408 bx.to_immediate_scalar(backend, out_scalar)
409 } else {
410 imm
411 }
412 }
413 BackendRepr::ScalarPair(_, _) | BackendRepr::Memory { .. } => bug!(),
414 })
415 };
416
417 OperandRef { val, layout: field }
418 }
419
420 #[instrument(level = "trace", skip(fx, bx))]
422 pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
423 self,
424 fx: &mut FunctionCx<'a, 'tcx, Bx>,
425 bx: &mut Bx,
426 cast_to: Ty<'tcx>,
427 ) -> V {
428 let dl = &bx.tcx().data_layout;
429 let cast_to_layout = bx.cx().layout_of(cast_to);
430 let cast_to = bx.cx().immediate_backend_type(cast_to_layout);
431
432 if self.layout.is_uninhabited() {
436 return bx.cx().const_poison(cast_to);
437 }
438
439 let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants {
440 Variants::Empty => unreachable!("we already handled uninhabited types"),
441 Variants::Single { index } => {
442 let discr_val =
443 if let Some(discr) = self.layout.ty.discriminant_for_variant(bx.tcx(), index) {
444 discr.val
445 } else {
446 assert_eq!(index, FIRST_VARIANT);
449 0
452 };
453 return bx.cx().const_uint_big(cast_to, discr_val);
454 }
455 Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => {
456 (tag, tag_encoding, tag_field)
457 }
458 };
459
460 let tag_op = match self.val {
462 OperandValue::ZeroSized => bug!(),
463 OperandValue::Immediate(_) | OperandValue::Pair(_, _) => {
464 self.extract_field(fx, bx, tag_field)
465 }
466 OperandValue::Ref(place) => {
467 let tag = place.with_type(self.layout).project_field(bx, tag_field);
468 bx.load_operand(tag)
469 }
470 };
471 let tag_imm = tag_op.immediate();
472
473 match *tag_encoding {
475 TagEncoding::Direct => {
476 let signed = match tag_scalar.primitive() {
477 Primitive::Int(_, signed) => !tag_scalar.is_bool() && signed,
482 _ => false,
483 };
484 bx.intcast(tag_imm, cast_to, signed)
485 }
486 TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start } => {
487 let (tag, tag_llty) = match tag_scalar.primitive() {
490 Primitive::Pointer(_) => {
492 let t = bx.type_from_integer(dl.ptr_sized_integer());
493 let tag = bx.ptrtoint(tag_imm, t);
494 (tag, t)
495 }
496 _ => (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)),
497 };
498
499 let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
500
501 let (is_niche, tagged_discr, delta) = if relative_max == 0 {
516 let niche_start = bx.cx().const_uint_big(tag_llty, niche_start);
526 let is_niche = bx.icmp(IntPredicate::IntEQ, tag, niche_start);
527 let tagged_discr =
528 bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64);
529 (is_niche, tagged_discr, 0)
530 } else {
531 let relative_discr = bx.sub(tag, bx.cx().const_uint_big(tag_llty, niche_start));
534 let cast_tag = bx.intcast(relative_discr, cast_to, false);
535 let is_niche = bx.icmp(
536 IntPredicate::IntULE,
537 relative_discr,
538 bx.cx().const_uint(tag_llty, relative_max as u64),
539 );
540 (is_niche, cast_tag, niche_variants.start().as_u32() as u128)
541 };
542
543 let tagged_discr = if delta == 0 {
544 tagged_discr
545 } else {
546 bx.add(tagged_discr, bx.cx().const_uint_big(cast_to, delta))
547 };
548
549 let discr = bx.select(
550 is_niche,
551 tagged_discr,
552 bx.cx().const_uint(cast_to, untagged_variant.as_u32() as u64),
553 );
554
555 discr
559 }
560 }
561 }
562}
563
564impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
565 pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
572 bx: &mut Bx,
573 layout: TyAndLayout<'tcx>,
574 ) -> OperandValue<V> {
575 assert!(layout.is_sized());
576 if layout.is_zst() {
577 OperandValue::ZeroSized
578 } else if bx.cx().is_backend_immediate(layout) {
579 let ibty = bx.cx().immediate_backend_type(layout);
580 OperandValue::Immediate(bx.const_poison(ibty))
581 } else if bx.cx().is_backend_scalar_pair(layout) {
582 let ibty0 = bx.cx().scalar_pair_element_backend_type(layout, 0, true);
583 let ibty1 = bx.cx().scalar_pair_element_backend_type(layout, 1, true);
584 OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
585 } else {
586 let ptr = bx.cx().type_ptr();
587 OperandValue::Ref(PlaceValue::new_sized(bx.const_poison(ptr), layout.align.abi))
588 }
589 }
590
591 pub fn store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
592 self,
593 bx: &mut Bx,
594 dest: PlaceRef<'tcx, V>,
595 ) {
596 self.store_with_flags(bx, dest, MemFlags::empty());
597 }
598
599 pub fn volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
600 self,
601 bx: &mut Bx,
602 dest: PlaceRef<'tcx, V>,
603 ) {
604 self.store_with_flags(bx, dest, MemFlags::VOLATILE);
605 }
606
607 pub fn unaligned_volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
608 self,
609 bx: &mut Bx,
610 dest: PlaceRef<'tcx, V>,
611 ) {
612 self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
613 }
614
615 pub fn nontemporal_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
616 self,
617 bx: &mut Bx,
618 dest: PlaceRef<'tcx, V>,
619 ) {
620 self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
621 }
622
623 pub(crate) fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
624 self,
625 bx: &mut Bx,
626 dest: PlaceRef<'tcx, V>,
627 flags: MemFlags,
628 ) {
629 debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
630 match self {
631 OperandValue::ZeroSized => {
632 }
635 OperandValue::Ref(val) => {
636 assert!(dest.layout.is_sized(), "cannot directly store unsized values");
637 if val.llextra.is_some() {
638 bug!("cannot directly store unsized values");
639 }
640 bx.typed_place_copy_with_flags(dest.val, val, dest.layout, flags);
641 }
642 OperandValue::Immediate(s) => {
643 let val = bx.from_immediate(s);
644 bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
645 }
646 OperandValue::Pair(a, b) => {
647 let BackendRepr::ScalarPair(a_scalar, b_scalar) = dest.layout.backend_repr else {
648 bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout);
649 };
650 let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
651
652 let val = bx.from_immediate(a);
653 let align = dest.val.align;
654 bx.store_with_flags(val, dest.val.llval, align, flags);
655
656 let llptr = bx.inbounds_ptradd(dest.val.llval, bx.const_usize(b_offset.bytes()));
657 let val = bx.from_immediate(b);
658 let align = dest.val.align.restrict_for_offset(b_offset);
659 bx.store_with_flags(val, llptr, align, flags);
660 }
661 }
662 }
663
664 pub fn store_unsized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
665 self,
666 bx: &mut Bx,
667 indirect_dest: PlaceRef<'tcx, V>,
668 ) {
669 debug!("OperandRef::store_unsized: operand={:?}, indirect_dest={:?}", self, indirect_dest);
670 let unsized_ty = indirect_dest
672 .layout
673 .ty
674 .builtin_deref(true)
675 .unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest));
676
677 let OperandValue::Ref(PlaceValue { llval: llptr, llextra: Some(llextra), .. }) = self
678 else {
679 bug!("store_unsized called with a sized value (or with an extern type)")
680 };
681
682 let (size, align) = size_of_val::size_and_align_of_dst(bx, unsized_ty, Some(llextra));
686 let one = bx.const_usize(1);
687 let align_minus_1 = bx.sub(align, one);
688 let size_extra = bx.add(size, align_minus_1);
689 let min_align = Align::ONE;
690 let alloca = bx.dynamic_alloca(size_extra, min_align);
691 let address = bx.ptrtoint(alloca, bx.type_isize());
692 let neg_address = bx.neg(address);
693 let offset = bx.and(neg_address, align_minus_1);
694 let dst = bx.inbounds_ptradd(alloca, offset);
695 bx.memcpy(dst, min_align, llptr, min_align, size, MemFlags::empty());
696
697 let indirect_operand = OperandValue::Pair(dst, llextra);
699 indirect_operand.store(bx, indirect_dest);
700 }
701}
702
703impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
704 fn maybe_codegen_consume_direct(
705 &mut self,
706 bx: &mut Bx,
707 place_ref: mir::PlaceRef<'tcx>,
708 ) -> Option<OperandRef<'tcx, Bx::Value>> {
709 debug!("maybe_codegen_consume_direct(place_ref={:?})", place_ref);
710
711 match self.locals[place_ref.local] {
712 LocalRef::Operand(mut o) => {
713 for elem in place_ref.projection.iter() {
715 match elem {
716 mir::ProjectionElem::Field(f, _) => {
717 assert!(
718 !o.layout.ty.is_any_ptr(),
719 "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
720 but tried to access field {f:?} of pointer {o:?}",
721 );
722 o = o.extract_field(self, bx, f.index());
723 }
724 mir::ProjectionElem::Index(_)
725 | mir::ProjectionElem::ConstantIndex { .. } => {
726 let elem = o.layout.field(bx.cx(), 0);
730 if elem.is_zst() {
731 o = OperandRef::zero_sized(elem);
732 } else {
733 return None;
734 }
735 }
736 _ => return None,
737 }
738 }
739
740 Some(o)
741 }
742 LocalRef::PendingOperand => {
743 bug!("use of {:?} before def", place_ref);
744 }
745 LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
746 None
749 }
750 }
751 }
752
753 pub fn codegen_consume(
754 &mut self,
755 bx: &mut Bx,
756 place_ref: mir::PlaceRef<'tcx>,
757 ) -> OperandRef<'tcx, Bx::Value> {
758 debug!("codegen_consume(place_ref={:?})", place_ref);
759
760 let ty = self.monomorphized_place_ty(place_ref);
761 let layout = bx.cx().layout_of(ty);
762
763 if layout.is_zst() {
765 return OperandRef::zero_sized(layout);
766 }
767
768 if let Some(o) = self.maybe_codegen_consume_direct(bx, place_ref) {
769 return o;
770 }
771
772 let place = self.codegen_place(bx, place_ref);
775 bx.load_operand(place)
776 }
777
778 pub fn codegen_operand(
779 &mut self,
780 bx: &mut Bx,
781 operand: &mir::Operand<'tcx>,
782 ) -> OperandRef<'tcx, Bx::Value> {
783 debug!("codegen_operand(operand={:?})", operand);
784
785 match *operand {
786 mir::Operand::Copy(ref place) | mir::Operand::Move(ref place) => {
787 self.codegen_consume(bx, place.as_ref())
788 }
789
790 mir::Operand::Constant(ref constant) => {
791 let constant_ty = self.monomorphize(constant.ty());
792 if constant_ty.is_simd() {
795 let layout = bx.layout_of(constant_ty);
798 if let BackendRepr::SimdVector { .. } = layout.backend_repr {
799 let (llval, ty) = self.immediate_const_vector(bx, constant);
800 return OperandRef {
801 val: OperandValue::Immediate(llval),
802 layout: bx.layout_of(ty),
803 };
804 }
805 }
806 self.eval_mir_constant_to_operand(bx, constant)
807 }
808 }
809 }
810}