1use std::assert_matches::assert_matches;
2
3use rustc_abi::Integer;
4use rustc_apfloat::ieee::{Double, Half, Quad, Single};
5use rustc_apfloat::{Float, FloatConvert};
6use rustc_middle::mir::CastKind;
7use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::layout::{IntegerExt, LayoutOf, TyAndLayout};
10use rustc_middle::ty::{self, FloatTy, Ty};
11use rustc_middle::{bug, span_bug};
12use rustc_type_ir::TyKind::*;
13use tracing::trace;
14
15use super::util::ensure_monomorphic_enough;
16use super::{
17 FnVal, ImmTy, Immediate, InterpCx, Machine, OpTy, PlaceTy, err_inval, interp_ok, throw_ub,
18 throw_ub_custom,
19};
20use crate::fluent_generated as fluent;
21
22impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
23 pub fn cast(
24 &mut self,
25 src: &OpTy<'tcx, M::Provenance>,
26 cast_kind: CastKind,
27 cast_ty: Ty<'tcx>,
28 dest: &PlaceTy<'tcx, M::Provenance>,
29 ) -> InterpResult<'tcx> {
30 let cast_layout =
33 if cast_ty == dest.layout.ty { dest.layout } else { self.layout_of(cast_ty)? };
34 match cast_kind {
36 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
37 self.unsize_into(src, cast_layout, dest)?;
38 }
39
40 CastKind::PointerExposeProvenance => {
41 let src = self.read_immediate(src)?;
42 let res = self.pointer_expose_provenance_cast(&src, cast_layout)?;
43 self.write_immediate(*res, dest)?;
44 }
45
46 CastKind::PointerWithExposedProvenance => {
47 let src = self.read_immediate(src)?;
48 let res = self.pointer_with_exposed_provenance_cast(&src, cast_layout)?;
49 self.write_immediate(*res, dest)?;
50 }
51
52 CastKind::IntToInt | CastKind::IntToFloat => {
53 let src = self.read_immediate(src)?;
54 let res = self.int_to_int_or_float(&src, cast_layout)?;
55 self.write_immediate(*res, dest)?;
56 }
57
58 CastKind::FloatToFloat | CastKind::FloatToInt => {
59 let src = self.read_immediate(src)?;
60 let res = self.float_to_float_or_int(&src, cast_layout)?;
61 self.write_immediate(*res, dest)?;
62 }
63
64 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
65 let src = self.read_immediate(src)?;
66 let res = self.ptr_to_ptr(&src, cast_layout)?;
67 self.write_immediate(*res, dest)?;
68 }
69
70 CastKind::PointerCoercion(
71 PointerCoercion::MutToConstPointer | PointerCoercion::ArrayToPointer,
72 _,
73 ) => {
74 bug!("{cast_kind:?} casts are for borrowck only, not runtime MIR");
75 }
76
77 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
78 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
80
81 match *src.layout.ty.kind() {
83 ty::FnDef(def_id, args) => {
84 let instance = ty::Instance::resolve_for_fn_ptr(
85 *self.tcx,
86 self.typing_env,
87 def_id,
88 args,
89 )
90 .ok_or_else(|| err_inval!(TooGeneric))?;
91
92 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
93 self.write_pointer(fn_ptr, dest)?;
94 }
95 _ => span_bug!(self.cur_span(), "reify fn pointer on {}", src.layout.ty),
96 }
97 }
98
99 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
100 let src = self.read_immediate(src)?;
101 match cast_ty.kind() {
102 ty::FnPtr(..) => {
103 self.write_immediate(*src, dest)?;
105 }
106 _ => span_bug!(self.cur_span(), "fn to unsafe fn cast on {}", cast_ty),
107 }
108 }
109
110 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(_), _) => {
111 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
113
114 match *src.layout.ty.kind() {
116 ty::Closure(def_id, args) => {
117 let instance = ty::Instance::resolve_closure(
118 *self.tcx,
119 def_id,
120 args,
121 ty::ClosureKind::FnOnce,
122 );
123 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
124 self.write_pointer(fn_ptr, dest)?;
125 }
126 _ => span_bug!(self.cur_span(), "closure fn pointer on {}", src.layout.ty),
127 }
128 }
129
130 CastKind::PointerCoercion(PointerCoercion::DynStar, _) => {
131 if let ty::Dynamic(data, _, ty::DynStar) = cast_ty.kind() {
132 let vtable = self.get_vtable_ptr(src.layout.ty, data)?;
134 let vtable = Scalar::from_maybe_pointer(vtable, self);
135 let data = self.read_immediate(src)?.to_scalar();
136 let _assert_pointer_like = data.to_pointer(self)?;
137 let val = Immediate::ScalarPair(data, vtable);
138 self.write_immediate(val, dest)?;
139 } else {
140 bug!()
141 }
142 }
143
144 CastKind::Transmute => {
145 assert!(src.layout.is_sized());
146 assert!(dest.layout.is_sized());
147 assert_eq!(cast_ty, dest.layout.ty); if src.layout.size != dest.layout.size {
149 throw_ub_custom!(
150 fluent::const_eval_invalid_transmute,
151 src_bytes = src.layout.size.bytes(),
152 dest_bytes = dest.layout.size.bytes(),
153 src = src.layout.ty,
154 dest = dest.layout.ty,
155 );
156 }
157
158 self.copy_op_allow_transmute(src, dest)?;
159 }
160 }
161 interp_ok(())
162 }
163
164 pub fn int_to_int_or_float(
166 &self,
167 src: &ImmTy<'tcx, M::Provenance>,
168 cast_to: TyAndLayout<'tcx>,
169 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
170 assert!(src.layout.ty.is_integral() || src.layout.ty.is_char() || src.layout.ty.is_bool());
171 assert!(cast_to.ty.is_floating_point() || cast_to.ty.is_integral() || cast_to.ty.is_char());
172
173 interp_ok(ImmTy::from_scalar(
174 self.cast_from_int_like(src.to_scalar(), src.layout, cast_to.ty)?,
175 cast_to,
176 ))
177 }
178
179 pub fn float_to_float_or_int(
181 &self,
182 src: &ImmTy<'tcx, M::Provenance>,
183 cast_to: TyAndLayout<'tcx>,
184 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
185 use rustc_type_ir::TyKind::*;
186
187 let Float(fty) = src.layout.ty.kind() else {
188 bug!("FloatToFloat/FloatToInt cast: source type {} is not a float type", src.layout.ty)
189 };
190 let val = match fty {
191 FloatTy::F16 => self.cast_from_float(src.to_scalar().to_f16()?, cast_to.ty),
192 FloatTy::F32 => self.cast_from_float(src.to_scalar().to_f32()?, cast_to.ty),
193 FloatTy::F64 => self.cast_from_float(src.to_scalar().to_f64()?, cast_to.ty),
194 FloatTy::F128 => self.cast_from_float(src.to_scalar().to_f128()?, cast_to.ty),
195 };
196 interp_ok(ImmTy::from_scalar(val, cast_to))
197 }
198
199 pub fn ptr_to_ptr(
201 &self,
202 src: &ImmTy<'tcx, M::Provenance>,
203 cast_to: TyAndLayout<'tcx>,
204 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
205 assert!(src.layout.ty.is_any_ptr());
206 assert!(cast_to.ty.is_raw_ptr());
207 if cast_to.size == src.layout.size {
209 return interp_ok(ImmTy::from_immediate(**src, cast_to));
211 } else {
212 assert_eq!(src.layout.size, 2 * self.pointer_size());
214 assert_eq!(cast_to.size, self.pointer_size());
215 assert!(src.layout.ty.is_raw_ptr());
216 return match **src {
217 Immediate::ScalarPair(data, _) => interp_ok(ImmTy::from_scalar(data, cast_to)),
218 Immediate::Scalar(..) => span_bug!(
219 self.cur_span(),
220 "{:?} input to a fat-to-thin cast ({} -> {})",
221 *src,
222 src.layout.ty,
223 cast_to.ty
224 ),
225 Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
226 };
227 }
228 }
229
230 pub fn pointer_expose_provenance_cast(
231 &mut self,
232 src: &ImmTy<'tcx, M::Provenance>,
233 cast_to: TyAndLayout<'tcx>,
234 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
235 assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(..));
236 assert!(cast_to.ty.is_integral());
237
238 let scalar = src.to_scalar();
239 let ptr = scalar.to_pointer(self)?;
240 match ptr.into_pointer_or_addr() {
241 Ok(ptr) => M::expose_provenance(self, ptr.provenance)?,
242 Err(_) => {} };
244 interp_ok(ImmTy::from_scalar(
245 self.cast_from_int_like(scalar, src.layout, cast_to.ty)?,
246 cast_to,
247 ))
248 }
249
250 pub fn pointer_with_exposed_provenance_cast(
251 &self,
252 src: &ImmTy<'tcx, M::Provenance>,
253 cast_to: TyAndLayout<'tcx>,
254 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
255 assert!(src.layout.ty.is_integral());
256 assert_matches!(cast_to.ty.kind(), ty::RawPtr(_, _));
257
258 let scalar = src.to_scalar();
260 let addr = self.cast_from_int_like(scalar, src.layout, self.tcx.types.usize)?;
261 let addr = addr.to_target_usize(self)?;
262
263 let ptr = M::ptr_from_addr_cast(self, addr)?;
265 interp_ok(ImmTy::from_scalar(Scalar::from_maybe_pointer(ptr, self), cast_to))
266 }
267
268 fn cast_from_int_like(
271 &self,
272 scalar: Scalar<M::Provenance>, src_layout: TyAndLayout<'tcx>,
274 cast_ty: Ty<'tcx>,
275 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
276 let signed = src_layout.backend_repr.is_signed(); let v = match src_layout.ty.kind() {
280 Uint(_) | RawPtr(..) | FnPtr(..) => scalar.to_uint(src_layout.size)?,
281 Int(_) => scalar.to_int(src_layout.size)? as u128, Bool => scalar.to_bool()?.into(),
283 Char => scalar.to_char()?.into(),
284 _ => span_bug!(self.cur_span(), "invalid int-like cast from {}", src_layout.ty),
285 };
286
287 interp_ok(match *cast_ty.kind() {
288 Int(_) | Uint(_) => {
290 let size = match *cast_ty.kind() {
291 Int(t) => Integer::from_int_ty(self, t).size(),
292 Uint(t) => Integer::from_uint_ty(self, t).size(),
293 _ => bug!(),
294 };
295 let v = size.truncate(v);
296 Scalar::from_uint(v, size)
297 }
298
299 Float(fty) if signed => {
301 let v = v as i128;
302 match fty {
303 FloatTy::F16 => Scalar::from_f16(Half::from_i128(v).value),
304 FloatTy::F32 => Scalar::from_f32(Single::from_i128(v).value),
305 FloatTy::F64 => Scalar::from_f64(Double::from_i128(v).value),
306 FloatTy::F128 => Scalar::from_f128(Quad::from_i128(v).value),
307 }
308 }
309 Float(fty) => match fty {
311 FloatTy::F16 => Scalar::from_f16(Half::from_u128(v).value),
312 FloatTy::F32 => Scalar::from_f32(Single::from_u128(v).value),
313 FloatTy::F64 => Scalar::from_f64(Double::from_u128(v).value),
314 FloatTy::F128 => Scalar::from_f128(Quad::from_u128(v).value),
315 },
316
317 Char => Scalar::from_u32(u8::try_from(v).unwrap().into()),
319
320 _ => span_bug!(self.cur_span(), "invalid int to {} cast", cast_ty),
322 })
323 }
324
325 fn cast_from_float<F>(&self, f: F, dest_ty: Ty<'tcx>) -> Scalar<M::Provenance>
327 where
328 F: Float
329 + Into<Scalar<M::Provenance>>
330 + FloatConvert<Half>
331 + FloatConvert<Single>
332 + FloatConvert<Double>
333 + FloatConvert<Quad>,
334 {
335 use rustc_type_ir::TyKind::*;
336
337 match *dest_ty.kind() {
338 Uint(t) => {
340 let size = Integer::from_uint_ty(self, t).size();
341 let v = f.to_u128(size.bits_usize()).value;
344 Scalar::from_uint(v, size)
346 }
347 Int(t) => {
349 let size = Integer::from_int_ty(self, t).size();
350 let v = f.to_i128(size.bits_usize()).value;
353 Scalar::from_int(v, size)
354 }
355 Float(fty) => match fty {
357 FloatTy::F16 => {
358 Scalar::from_f16(self.adjust_nan(f.convert(&mut false).value, &[f]))
359 }
360 FloatTy::F32 => {
361 Scalar::from_f32(self.adjust_nan(f.convert(&mut false).value, &[f]))
362 }
363 FloatTy::F64 => {
364 Scalar::from_f64(self.adjust_nan(f.convert(&mut false).value, &[f]))
365 }
366 FloatTy::F128 => {
367 Scalar::from_f128(self.adjust_nan(f.convert(&mut false).value, &[f]))
368 }
369 },
370 _ => span_bug!(self.cur_span(), "invalid float to {} cast", dest_ty),
372 }
373 }
374
375 fn unsize_into_ptr(
378 &mut self,
379 src: &OpTy<'tcx, M::Provenance>,
380 dest: &PlaceTy<'tcx, M::Provenance>,
381 source_ty: Ty<'tcx>,
383 cast_ty: Ty<'tcx>,
384 ) -> InterpResult<'tcx> {
385 let (src_pointee_ty, dest_pointee_ty) =
387 self.tcx.struct_lockstep_tails_for_codegen(source_ty, cast_ty, self.typing_env);
388
389 match (src_pointee_ty.kind(), dest_pointee_ty.kind()) {
390 (&ty::Array(_, length), &ty::Slice(_)) => {
391 let ptr = self.read_pointer(src)?;
392 let val = Immediate::new_slice(
393 ptr,
394 length
395 .try_to_target_usize(*self.tcx)
396 .expect("expected monomorphic const in const eval"),
397 self,
398 );
399 self.write_immediate(val, dest)
400 }
401 (ty::Dynamic(data_a, _, ty::Dyn), ty::Dynamic(data_b, _, ty::Dyn)) => {
402 let val = self.read_immediate(src)?;
403 if data_a == data_b {
407 return self.write_immediate(*val, dest);
408 }
409 let (old_data, old_vptr) = val.to_scalar_pair();
411 let old_data = old_data.to_pointer(self)?;
412 let old_vptr = old_vptr.to_pointer(self)?;
413 let ty = self.get_ptr_vtable_ty(old_vptr, Some(data_a))?;
414
415 let vptr_entry_idx =
418 self.tcx.supertrait_vtable_slot((src_pointee_ty, dest_pointee_ty));
419 let vtable_entries = self.vtable_entries(data_a.principal(), ty);
420 if let Some(entry_idx) = vptr_entry_idx {
421 let Some(&ty::VtblEntry::TraitVPtr(upcast_trait_ref)) =
422 vtable_entries.get(entry_idx)
423 else {
424 span_bug!(
425 self.cur_span(),
426 "invalid vtable entry index in {} -> {} upcast",
427 src_pointee_ty,
428 dest_pointee_ty
429 );
430 };
431 let erased_trait_ref =
432 ty::ExistentialTraitRef::erase_self_ty(*self.tcx, upcast_trait_ref);
433 assert!(data_b.principal().is_some_and(|b| self.eq_in_param_env(
434 erased_trait_ref,
435 self.tcx.instantiate_bound_regions_with_erased(b)
436 )));
437 } else {
438 let vtable_entries_b = self.vtable_entries(data_b.principal(), ty);
442 assert!(&vtable_entries[..vtable_entries_b.len()] == vtable_entries_b);
443 };
444
445 let new_vptr = self.get_vtable_ptr(ty, data_b)?;
447 self.write_immediate(Immediate::new_dyn_trait(old_data, new_vptr, self), dest)
448 }
449 (_, &ty::Dynamic(data, _, ty::Dyn)) => {
450 let vtable = self.get_vtable_ptr(src_pointee_ty, data)?;
452 let ptr = self.read_pointer(src)?;
453 let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
454 self.write_immediate(val, dest)
455 }
456 _ => {
457 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
459 ensure_monomorphic_enough(*self.tcx, cast_ty)?;
460
461 span_bug!(
462 self.cur_span(),
463 "invalid pointer unsizing {} -> {}",
464 src.layout.ty,
465 cast_ty
466 )
467 }
468 }
469 }
470
471 pub fn unsize_into(
472 &mut self,
473 src: &OpTy<'tcx, M::Provenance>,
474 cast_ty: TyAndLayout<'tcx>,
475 dest: &PlaceTy<'tcx, M::Provenance>,
476 ) -> InterpResult<'tcx> {
477 trace!("Unsizing {:?} of type {} into {}", *src, src.layout.ty, cast_ty.ty);
478 match (src.layout.ty.kind(), cast_ty.ty.kind()) {
479 (&ty::Ref(_, s, _), &ty::Ref(_, c, _) | &ty::RawPtr(c, _))
480 | (&ty::RawPtr(s, _), &ty::RawPtr(c, _)) => self.unsize_into_ptr(src, dest, s, c),
481 (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
482 assert_eq!(def_a, def_b); let mut found_cast_field = false;
488 for i in 0..src.layout.fields.count() {
489 let cast_ty_field = cast_ty.field(self, i);
490 let src_field = self.project_field(src, i)?;
491 let dst_field = self.project_field(dest, i)?;
492 if src_field.layout.is_1zst() && cast_ty_field.is_1zst() {
493 } else if src_field.layout.ty == cast_ty_field.ty {
495 self.copy_op(&src_field, &dst_field)?;
496 } else {
497 if found_cast_field {
498 span_bug!(self.cur_span(), "unsize_into: more than one field to cast");
499 }
500 found_cast_field = true;
501 self.unsize_into(&src_field, cast_ty_field, &dst_field)?;
502 }
503 }
504 interp_ok(())
505 }
506 _ => {
507 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
509 ensure_monomorphic_enough(*self.tcx, cast_ty.ty)?;
510
511 span_bug!(
512 self.cur_span(),
513 "unsize_into: invalid conversion: {:?} -> {:?}",
514 src.layout,
515 dest.layout
516 )
517 }
518 }
519 }
520}