1use std::assert_matches::assert_matches;
2
3use rustc_abi::{FieldIdx, Integer};
4use rustc_apfloat::ieee::{Double, Half, Quad, Single};
5use rustc_apfloat::{Float, FloatConvert};
6use rustc_middle::mir::CastKind;
7use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
10use rustc_middle::ty::{self, FloatTy, Ty};
11use rustc_middle::{bug, span_bug};
12use tracing::trace;
13
14use super::util::ensure_monomorphic_enough;
15use super::{
16 FnVal, ImmTy, Immediate, InterpCx, Machine, OpTy, PlaceTy, err_inval, interp_ok, throw_ub,
17 throw_ub_custom,
18};
19use crate::interpret::Writeable;
20use crate::{enter_trace_span, fluent_generated as fluent};
21
22impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
23 pub fn cast(
24 &mut self,
25 src: &OpTy<'tcx, M::Provenance>,
26 cast_kind: CastKind,
27 cast_ty: Ty<'tcx>,
28 dest: &PlaceTy<'tcx, M::Provenance>,
29 ) -> InterpResult<'tcx> {
30 let cast_layout =
33 if cast_ty == dest.layout.ty { dest.layout } else { self.layout_of(cast_ty)? };
34 match cast_kind {
36 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
37 self.unsize_into(src, cast_layout, dest)?;
38 }
39
40 CastKind::PointerExposeProvenance => {
41 let src = self.read_immediate(src)?;
42 let res = self.pointer_expose_provenance_cast(&src, cast_layout)?;
43 self.write_immediate(*res, dest)?;
44 }
45
46 CastKind::PointerWithExposedProvenance => {
47 let src = self.read_immediate(src)?;
48 let res = self.pointer_with_exposed_provenance_cast(&src, cast_layout)?;
49 self.write_immediate(*res, dest)?;
50 }
51
52 CastKind::IntToInt | CastKind::IntToFloat => {
53 let src = self.read_immediate(src)?;
54 let res = self.int_to_int_or_float(&src, cast_layout)?;
55 self.write_immediate(*res, dest)?;
56 }
57
58 CastKind::FloatToFloat | CastKind::FloatToInt => {
59 let src = self.read_immediate(src)?;
60 let res = self.float_to_float_or_int(&src, cast_layout)?;
61 self.write_immediate(*res, dest)?;
62 }
63
64 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
65 let src = self.read_immediate(src)?;
66 let res = self.ptr_to_ptr(&src, cast_layout)?;
67 self.write_immediate(*res, dest)?;
68 }
69
70 CastKind::PointerCoercion(
71 PointerCoercion::MutToConstPointer | PointerCoercion::ArrayToPointer,
72 _,
73 ) => {
74 ::rustc_middle::util::bug::bug_fmt(format_args!("{0:?} casts are for borrowck only, not runtime MIR",
cast_kind));bug!("{cast_kind:?} casts are for borrowck only, not runtime MIR");
75 }
76
77 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer(_), _) => {
78 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
80
81 match *src.layout.ty.kind() {
83 ty::FnDef(def_id, args) => {
84 let instance = {
85 let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::cast", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/cast.rs"),
::tracing_core::__macro_support::Option::Some(85u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::cast"),
::tracing_core::field::FieldSet::new(&["resolve", "def_id"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"resolve_for_fn_ptr")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&def_id) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::resolve_for_fn_ptr, ?def_id);
86 ty::Instance::resolve_for_fn_ptr(
87 *self.tcx,
88 self.typing_env,
89 def_id,
90 args,
91 )
92 .ok_or_else(|| ::rustc_middle::mir::interpret::InterpErrorKind::InvalidProgram(::rustc_middle::mir::interpret::InvalidProgramInfo::TooGeneric)err_inval!(TooGeneric))?
93 };
94
95 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
96 self.write_pointer(fn_ptr, dest)?;
97 }
98 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("reify fn pointer on {0}", src.layout.ty))span_bug!(self.cur_span(), "reify fn pointer on {}", src.layout.ty),
99 }
100 }
101
102 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
103 let src = self.read_immediate(src)?;
104 match cast_ty.kind() {
105 ty::FnPtr(..) => {
106 self.write_immediate(*src, dest)?;
108 }
109 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("fn to unsafe fn cast on {0}", cast_ty))span_bug!(self.cur_span(), "fn to unsafe fn cast on {}", cast_ty),
110 }
111 }
112
113 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(_), _) => {
114 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
116
117 match *src.layout.ty.kind() {
119 ty::Closure(def_id, args) => {
120 let instance = {
121 let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::cast", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/cast.rs"),
::tracing_core::__macro_support::Option::Some(121u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::cast"),
::tracing_core::field::FieldSet::new(&["resolve", "def_id"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"resolve_closure")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&def_id) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::resolve_closure, ?def_id);
122 ty::Instance::resolve_closure(
123 *self.tcx,
124 def_id,
125 args,
126 ty::ClosureKind::FnOnce,
127 )
128 };
129 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
130 self.write_pointer(fn_ptr, dest)?;
131 }
132 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("closure fn pointer on {0}", src.layout.ty))span_bug!(self.cur_span(), "closure fn pointer on {}", src.layout.ty),
133 }
134 }
135
136 CastKind::Transmute | CastKind::Subtype => {
137 if !src.layout.is_sized() {
::core::panicking::panic("assertion failed: src.layout.is_sized()")
};assert!(src.layout.is_sized());
138 if !dest.layout.is_sized() {
::core::panicking::panic("assertion failed: dest.layout.is_sized()")
};assert!(dest.layout.is_sized());
139 match (&cast_ty, &dest.layout.ty) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(cast_ty, dest.layout.ty); if src.layout.size != dest.layout.size {
141 do yeet {
let (src_bytes, dest_bytes, src, dest) =
(src.layout.size.bytes(), dest.layout.size.bytes(), src.layout.ty,
dest.layout.ty);
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: || fluent::const_eval_invalid_transmute,
add_args: Box::new(move |mut set_arg|
{
set_arg("src_bytes".into(),
rustc_errors::IntoDiagArg::into_diag_arg(src_bytes,
&mut None));
set_arg("dest_bytes".into(),
rustc_errors::IntoDiagArg::into_diag_arg(dest_bytes,
&mut None));
set_arg("src".into(),
rustc_errors::IntoDiagArg::into_diag_arg(src, &mut None));
set_arg("dest".into(),
rustc_errors::IntoDiagArg::into_diag_arg(dest, &mut None));
}),
}))
};throw_ub_custom!(
142 fluent::const_eval_invalid_transmute,
143 src_bytes = src.layout.size.bytes(),
144 dest_bytes = dest.layout.size.bytes(),
145 src = src.layout.ty,
146 dest = dest.layout.ty,
147 );
148 }
149
150 self.copy_op_allow_transmute(src, dest)?;
151 }
152 }
153 interp_ok(())
154 }
155
156 pub fn int_to_int_or_float(
158 &self,
159 src: &ImmTy<'tcx, M::Provenance>,
160 cast_to: TyAndLayout<'tcx>,
161 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
162 if !(src.layout.ty.is_integral() || src.layout.ty.is_char() ||
src.layout.ty.is_bool()) {
::core::panicking::panic("assertion failed: src.layout.ty.is_integral() || src.layout.ty.is_char() ||\n src.layout.ty.is_bool()")
};assert!(src.layout.ty.is_integral() || src.layout.ty.is_char() || src.layout.ty.is_bool());
163 if !(cast_to.ty.is_floating_point() || cast_to.ty.is_integral() ||
cast_to.ty.is_char()) {
::core::panicking::panic("assertion failed: cast_to.ty.is_floating_point() || cast_to.ty.is_integral() ||\n cast_to.ty.is_char()")
};assert!(cast_to.ty.is_floating_point() || cast_to.ty.is_integral() || cast_to.ty.is_char());
164
165 interp_ok(ImmTy::from_scalar(
166 self.cast_from_int_like(src.to_scalar(), src.layout, cast_to.ty)?,
167 cast_to,
168 ))
169 }
170
171 pub fn float_to_float_or_int(
173 &self,
174 src: &ImmTy<'tcx, M::Provenance>,
175 cast_to: TyAndLayout<'tcx>,
176 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
177 let ty::Float(fty) = src.layout.ty.kind() else {
178 ::rustc_middle::util::bug::bug_fmt(format_args!("FloatToFloat/FloatToInt cast: source type {0} is not a float type",
src.layout.ty))bug!("FloatToFloat/FloatToInt cast: source type {} is not a float type", src.layout.ty)
179 };
180 let val = match fty {
181 FloatTy::F16 => self.cast_from_float(src.to_scalar().to_f16()?, cast_to.ty),
182 FloatTy::F32 => self.cast_from_float(src.to_scalar().to_f32()?, cast_to.ty),
183 FloatTy::F64 => self.cast_from_float(src.to_scalar().to_f64()?, cast_to.ty),
184 FloatTy::F128 => self.cast_from_float(src.to_scalar().to_f128()?, cast_to.ty),
185 };
186 interp_ok(ImmTy::from_scalar(val, cast_to))
187 }
188
189 pub fn ptr_to_ptr(
191 &self,
192 src: &ImmTy<'tcx, M::Provenance>,
193 cast_to: TyAndLayout<'tcx>,
194 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
195 if !src.layout.ty.is_any_ptr() {
::core::panicking::panic("assertion failed: src.layout.ty.is_any_ptr()")
};assert!(src.layout.ty.is_any_ptr());
196 if !cast_to.ty.is_raw_ptr() {
::core::panicking::panic("assertion failed: cast_to.ty.is_raw_ptr()")
};assert!(cast_to.ty.is_raw_ptr());
197 if cast_to.size == src.layout.size {
199 return interp_ok(ImmTy::from_immediate(**src, cast_to));
201 } else {
202 match (&src.layout.size, &(2 * self.pointer_size())) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(src.layout.size, 2 * self.pointer_size());
204 match (&cast_to.size, &self.pointer_size()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(cast_to.size, self.pointer_size());
205 if !src.layout.ty.is_raw_ptr() {
::core::panicking::panic("assertion failed: src.layout.ty.is_raw_ptr()")
};assert!(src.layout.ty.is_raw_ptr());
206 return match **src {
207 Immediate::ScalarPair(data, _) => interp_ok(ImmTy::from_scalar(data, cast_to)),
208 Immediate::Scalar(..) => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("{0:?} input to a fat-to-thin cast ({1} -> {2})", *src,
src.layout.ty, cast_to.ty))span_bug!(
209 self.cur_span(),
210 "{:?} input to a fat-to-thin cast ({} -> {})",
211 *src,
212 src.layout.ty,
213 cast_to.ty
214 ),
215 Immediate::Uninit => do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::InvalidUninitBytes(None))throw_ub!(InvalidUninitBytes(None)),
216 };
217 }
218 }
219
220 pub fn pointer_expose_provenance_cast(
221 &mut self,
222 src: &ImmTy<'tcx, M::Provenance>,
223 cast_to: TyAndLayout<'tcx>,
224 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
225 match src.layout.ty.kind() {
ty::RawPtr(_, _) | ty::FnPtr(..) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"ty::RawPtr(_, _) | ty::FnPtr(..)", ::core::option::Option::None);
}
};assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(..));
226 if !cast_to.ty.is_integral() {
::core::panicking::panic("assertion failed: cast_to.ty.is_integral()")
};assert!(cast_to.ty.is_integral());
227
228 let scalar = src.to_scalar();
229 let ptr = scalar.to_pointer(self)?;
230 match ptr.into_pointer_or_addr() {
231 Ok(ptr) => M::expose_provenance(self, ptr.provenance)?,
232 Err(_) => {} };
234 interp_ok(ImmTy::from_scalar(
235 self.cast_from_int_like(scalar, src.layout, cast_to.ty)?,
236 cast_to,
237 ))
238 }
239
240 pub fn pointer_with_exposed_provenance_cast(
241 &self,
242 src: &ImmTy<'tcx, M::Provenance>,
243 cast_to: TyAndLayout<'tcx>,
244 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
245 if !src.layout.ty.is_integral() {
::core::panicking::panic("assertion failed: src.layout.ty.is_integral()")
};assert!(src.layout.ty.is_integral());
246 match cast_to.ty.kind() {
ty::RawPtr(_, _) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val, "ty::RawPtr(_, _)",
::core::option::Option::None);
}
};assert_matches!(cast_to.ty.kind(), ty::RawPtr(_, _));
247
248 let scalar = src.to_scalar();
250 let addr = self.cast_from_int_like(scalar, src.layout, self.tcx.types.usize)?;
251 let addr = addr.to_target_usize(self)?;
252
253 let ptr = M::ptr_from_addr_cast(self, addr)?;
255 interp_ok(ImmTy::from_scalar(Scalar::from_maybe_pointer(ptr, self), cast_to))
256 }
257
258 fn cast_from_int_like(
261 &self,
262 scalar: Scalar<M::Provenance>, src_layout: TyAndLayout<'tcx>,
264 cast_ty: Ty<'tcx>,
265 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
266 let signed = src_layout.backend_repr.is_signed(); let v = match src_layout.ty.kind() {
270 ty::Uint(_) | ty::RawPtr(..) | ty::FnPtr(..) => scalar.to_uint(src_layout.size)?,
271 ty::Int(_) => scalar.to_int(src_layout.size)? as u128, ty::Bool => scalar.to_bool()?.into(),
273 ty::Char => scalar.to_char()?.into(),
274 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid int-like cast from {0}", src_layout.ty))span_bug!(self.cur_span(), "invalid int-like cast from {}", src_layout.ty),
275 };
276
277 interp_ok(match *cast_ty.kind() {
278 ty::Int(_) | ty::Uint(_) => {
280 let size = match *cast_ty.kind() {
281 ty::Int(t) => Integer::from_int_ty(self, t).size(),
282 ty::Uint(t) => Integer::from_uint_ty(self, t).size(),
283 _ => ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!(),
284 };
285 let v = size.truncate(v);
286 Scalar::from_uint(v, size)
287 }
288
289 ty::Float(fty) if signed => {
291 let v = v as i128;
292 match fty {
293 FloatTy::F16 => Scalar::from_f16(Half::from_i128(v).value),
294 FloatTy::F32 => Scalar::from_f32(Single::from_i128(v).value),
295 FloatTy::F64 => Scalar::from_f64(Double::from_i128(v).value),
296 FloatTy::F128 => Scalar::from_f128(Quad::from_i128(v).value),
297 }
298 }
299 ty::Float(fty) => match fty {
301 FloatTy::F16 => Scalar::from_f16(Half::from_u128(v).value),
302 FloatTy::F32 => Scalar::from_f32(Single::from_u128(v).value),
303 FloatTy::F64 => Scalar::from_f64(Double::from_u128(v).value),
304 FloatTy::F128 => Scalar::from_f128(Quad::from_u128(v).value),
305 },
306
307 ty::Char => Scalar::from_u32(u8::try_from(v).unwrap().into()),
309
310 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid int to {0} cast", cast_ty))span_bug!(self.cur_span(), "invalid int to {} cast", cast_ty),
312 })
313 }
314
315 fn cast_from_float<F>(&self, f: F, dest_ty: Ty<'tcx>) -> Scalar<M::Provenance>
317 where
318 F: Float
319 + Into<Scalar<M::Provenance>>
320 + FloatConvert<Half>
321 + FloatConvert<Single>
322 + FloatConvert<Double>
323 + FloatConvert<Quad>,
324 {
325 match *dest_ty.kind() {
326 ty::Uint(t) => {
328 let size = Integer::from_uint_ty(self, t).size();
329 let v = f.to_u128(size.bits_usize()).value;
332 Scalar::from_uint(v, size)
334 }
335 ty::Int(t) => {
337 let size = Integer::from_int_ty(self, t).size();
338 let v = f.to_i128(size.bits_usize()).value;
341 Scalar::from_int(v, size)
342 }
343 ty::Float(fty) => match fty {
345 FloatTy::F16 => {
346 Scalar::from_f16(self.adjust_nan(f.convert(&mut false).value, &[f]))
347 }
348 FloatTy::F32 => {
349 Scalar::from_f32(self.adjust_nan(f.convert(&mut false).value, &[f]))
350 }
351 FloatTy::F64 => {
352 Scalar::from_f64(self.adjust_nan(f.convert(&mut false).value, &[f]))
353 }
354 FloatTy::F128 => {
355 Scalar::from_f128(self.adjust_nan(f.convert(&mut false).value, &[f]))
356 }
357 },
358 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid float to {0} cast", dest_ty))span_bug!(self.cur_span(), "invalid float to {} cast", dest_ty),
360 }
361 }
362
363 fn unsize_into_ptr(
366 &mut self,
367 src: &OpTy<'tcx, M::Provenance>,
368 dest: &impl Writeable<'tcx, M::Provenance>,
369 source_ty: Ty<'tcx>,
371 cast_ty: Ty<'tcx>,
372 ) -> InterpResult<'tcx> {
373 let (src_pointee_ty, dest_pointee_ty) =
375 self.tcx.struct_lockstep_tails_for_codegen(source_ty, cast_ty, self.typing_env);
376
377 match (src_pointee_ty.kind(), dest_pointee_ty.kind()) {
378 (&ty::Array(_, length), &ty::Slice(_)) => {
379 let ptr = self.read_pointer(src)?;
380 let val = Immediate::new_slice(
381 ptr,
382 length
383 .try_to_target_usize(*self.tcx)
384 .expect("expected monomorphic const in const eval"),
385 self,
386 );
387 self.write_immediate(val, dest)
388 }
389 (ty::Dynamic(data_a, _), ty::Dynamic(data_b, _)) => {
390 let val = self.read_immediate(src)?;
391 if data_a == data_b {
395 return self.write_immediate(*val, dest);
396 }
397 let (old_data, old_vptr) = val.to_scalar_pair();
399 let old_data = old_data.to_pointer(self)?;
400 let old_vptr = old_vptr.to_pointer(self)?;
401 let ty = self.get_ptr_vtable_ty(old_vptr, Some(data_a))?;
402
403 let vptr_entry_idx =
406 self.tcx.supertrait_vtable_slot((src_pointee_ty, dest_pointee_ty));
407 let vtable_entries = self.vtable_entries(data_a.principal(), ty);
408 if let Some(entry_idx) = vptr_entry_idx {
409 let Some(&ty::VtblEntry::TraitVPtr(upcast_trait_ref)) =
410 vtable_entries.get(entry_idx)
411 else {
412 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid vtable entry index in {0} -> {1} upcast",
src_pointee_ty, dest_pointee_ty));span_bug!(
413 self.cur_span(),
414 "invalid vtable entry index in {} -> {} upcast",
415 src_pointee_ty,
416 dest_pointee_ty
417 );
418 };
419 let erased_trait_ref =
420 ty::ExistentialTraitRef::erase_self_ty(*self.tcx, upcast_trait_ref);
421 match (&data_b.principal().map(|b|
{
self.tcx.normalize_erasing_late_bound_regions(self.typing_env,
b)
}), &Some(erased_trait_ref)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(
422 data_b.principal().map(|b| {
423 self.tcx.normalize_erasing_late_bound_regions(self.typing_env, b)
424 }),
425 Some(erased_trait_ref),
426 );
427 } else {
428 let vtable_entries_b = self.vtable_entries(data_b.principal(), ty);
432 if !(&vtable_entries[..vtable_entries_b.len()] == vtable_entries_b) {
::core::panicking::panic("assertion failed: &vtable_entries[..vtable_entries_b.len()] == vtable_entries_b")
};assert!(&vtable_entries[..vtable_entries_b.len()] == vtable_entries_b);
433 };
434
435 let new_vptr = self.get_vtable_ptr(ty, data_b)?;
437 self.write_immediate(Immediate::new_dyn_trait(old_data, new_vptr, self), dest)
438 }
439 (_, &ty::Dynamic(data, _)) => {
440 let vtable = self.get_vtable_ptr(src_pointee_ty, data)?;
442 let ptr = self.read_pointer(src)?;
443 let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
444 self.write_immediate(val, dest)
445 }
446 _ => {
447 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
449 ensure_monomorphic_enough(*self.tcx, cast_ty)?;
450
451 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid pointer unsizing {0} -> {1}", src.layout.ty,
cast_ty))span_bug!(
452 self.cur_span(),
453 "invalid pointer unsizing {} -> {}",
454 src.layout.ty,
455 cast_ty
456 )
457 }
458 }
459 }
460
461 pub fn unsize_into(
462 &mut self,
463 src: &OpTy<'tcx, M::Provenance>,
464 cast_ty: TyAndLayout<'tcx>,
465 dest: &impl Writeable<'tcx, M::Provenance>,
466 ) -> InterpResult<'tcx> {
467 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/cast.rs:467",
"rustc_const_eval::interpret::cast",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/cast.rs"),
::tracing_core::__macro_support::Option::Some(467u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::cast"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Unsizing {0:?} of type {1} into {2}",
*src, src.layout.ty, cast_ty.ty) as &dyn Value))])
});
} else { ; }
};trace!("Unsizing {:?} of type {} into {}", *src, src.layout.ty, cast_ty.ty);
468 match (src.layout.ty.kind(), cast_ty.ty.kind()) {
469 (&ty::Pat(_, s_pat), &ty::Pat(cast_ty, c_pat)) if s_pat == c_pat => {
470 let src = self.project_field(src, FieldIdx::ZERO)?;
471 let dest = self.project_field(dest, FieldIdx::ZERO)?;
472 let cast_ty = self.layout_of(cast_ty)?;
473 self.unsize_into(&src, cast_ty, &dest)
474 }
475 (&ty::Ref(_, s, _), &ty::Ref(_, c, _) | &ty::RawPtr(c, _))
476 | (&ty::RawPtr(s, _), &ty::RawPtr(c, _)) => self.unsize_into_ptr(src, dest, s, c),
477 (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
478 match (&def_a, &def_b) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(def_a, def_b); let mut found_cast_field = false;
484 for i in 0..src.layout.fields.count() {
485 let cast_ty_field = cast_ty.field(self, i);
486 let i = FieldIdx::from_usize(i);
487 let src_field = self.project_field(src, i)?;
488 let dst_field = self.project_field(dest, i)?;
489 if src_field.layout.is_1zst() && cast_ty_field.is_1zst() {
490 } else if src_field.layout.ty == cast_ty_field.ty {
492 self.copy_op(&src_field, &dst_field)?;
493 } else {
494 if found_cast_field {
495 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("unsize_into: more than one field to cast"));span_bug!(self.cur_span(), "unsize_into: more than one field to cast");
496 }
497 found_cast_field = true;
498 self.unsize_into(&src_field, cast_ty_field, &dst_field)?;
499 }
500 }
501 interp_ok(())
502 }
503 _ => {
504 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
506 ensure_monomorphic_enough(*self.tcx, cast_ty.ty)?;
507
508 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("unsize_into: invalid conversion: {0:?} -> {1:?}",
src.layout, dest.layout()))span_bug!(
509 self.cur_span(),
510 "unsize_into: invalid conversion: {:?} -> {:?}",
511 src.layout,
512 dest.layout()
513 )
514 }
515 }
516 }
517}