1use rustc_abi::{FieldIdx, Integer};
2use rustc_apfloat::ieee::{Double, Half, Quad, Single};
3use rustc_apfloat::{Float, FloatConvert};
4use rustc_data_structures::assert_matches;
5use rustc_middle::mir::CastKind;
6use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
7use rustc_middle::ty::adjustment::PointerCoercion;
8use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
9use rustc_middle::ty::{self, FloatTy, Ty};
10use rustc_middle::{bug, span_bug};
11use tracing::trace;
12
13use super::util::ensure_monomorphic_enough;
14use super::{
15 FnVal, ImmTy, Immediate, InterpCx, Machine, OpTy, PlaceTy, err_inval, interp_ok, throw_ub,
16 throw_ub_custom,
17};
18use crate::interpret::Writeable;
19use crate::{enter_trace_span, fluent_generated as fluent};
20
21impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
22 pub fn cast(
23 &mut self,
24 src: &OpTy<'tcx, M::Provenance>,
25 cast_kind: CastKind,
26 cast_ty: Ty<'tcx>,
27 dest: &PlaceTy<'tcx, M::Provenance>,
28 ) -> InterpResult<'tcx> {
29 let cast_layout =
32 if cast_ty == dest.layout.ty { dest.layout } else { self.layout_of(cast_ty)? };
33 match cast_kind {
35 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
36 self.unsize_into(src, cast_layout, dest)?;
37 }
38
39 CastKind::PointerExposeProvenance => {
40 let src = self.read_immediate(src)?;
41 let res = self.pointer_expose_provenance_cast(&src, cast_layout)?;
42 self.write_immediate(*res, dest)?;
43 }
44
45 CastKind::PointerWithExposedProvenance => {
46 let src = self.read_immediate(src)?;
47 let res = self.pointer_with_exposed_provenance_cast(&src, cast_layout)?;
48 self.write_immediate(*res, dest)?;
49 }
50
51 CastKind::IntToInt | CastKind::IntToFloat => {
52 let src = self.read_immediate(src)?;
53 let res = self.int_to_int_or_float(&src, cast_layout)?;
54 self.write_immediate(*res, dest)?;
55 }
56
57 CastKind::FloatToFloat | CastKind::FloatToInt => {
58 let src = self.read_immediate(src)?;
59 let res = self.float_to_float_or_int(&src, cast_layout)?;
60 self.write_immediate(*res, dest)?;
61 }
62
63 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
64 let src = self.read_immediate(src)?;
65 let res = self.ptr_to_ptr(&src, cast_layout)?;
66 self.write_immediate(*res, dest)?;
67 }
68
69 CastKind::PointerCoercion(
70 PointerCoercion::MutToConstPointer | PointerCoercion::ArrayToPointer,
71 _,
72 ) => {
73 ::rustc_middle::util::bug::bug_fmt(format_args!("{0:?} casts are for borrowck only, not runtime MIR",
cast_kind));bug!("{cast_kind:?} casts are for borrowck only, not runtime MIR");
74 }
75
76 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer(_), _) => {
77 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
79
80 match *src.layout.ty.kind() {
82 ty::FnDef(def_id, args) => {
83 let instance = {
84 let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::cast", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/cast.rs"),
::tracing_core::__macro_support::Option::Some(84u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::cast"),
::tracing_core::field::FieldSet::new(&["resolve", "def_id"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"resolve_for_fn_ptr")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&def_id) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::resolve_for_fn_ptr, ?def_id);
85 ty::Instance::resolve_for_fn_ptr(
86 *self.tcx,
87 self.typing_env,
88 def_id,
89 args,
90 )
91 .ok_or_else(|| ::rustc_middle::mir::interpret::InterpErrorKind::InvalidProgram(::rustc_middle::mir::interpret::InvalidProgramInfo::TooGeneric)err_inval!(TooGeneric))?
92 };
93
94 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
95 self.write_pointer(fn_ptr, dest)?;
96 }
97 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("reify fn pointer on {0}", src.layout.ty))span_bug!(self.cur_span(), "reify fn pointer on {}", src.layout.ty),
98 }
99 }
100
101 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
102 let src = self.read_immediate(src)?;
103 match cast_ty.kind() {
104 ty::FnPtr(..) => {
105 self.write_immediate(*src, dest)?;
107 }
108 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("fn to unsafe fn cast on {0}", cast_ty))span_bug!(self.cur_span(), "fn to unsafe fn cast on {}", cast_ty),
109 }
110 }
111
112 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(_), _) => {
113 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
115
116 match *src.layout.ty.kind() {
118 ty::Closure(def_id, args) => {
119 let instance = {
120 let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::cast", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/cast.rs"),
::tracing_core::__macro_support::Option::Some(120u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::cast"),
::tracing_core::field::FieldSet::new(&["resolve", "def_id"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"resolve_closure")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&def_id) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::resolve_closure, ?def_id);
121 ty::Instance::resolve_closure(
122 *self.tcx,
123 def_id,
124 args,
125 ty::ClosureKind::FnOnce,
126 )
127 };
128 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
129 self.write_pointer(fn_ptr, dest)?;
130 }
131 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("closure fn pointer on {0}", src.layout.ty))span_bug!(self.cur_span(), "closure fn pointer on {}", src.layout.ty),
132 }
133 }
134
135 CastKind::Transmute | CastKind::Subtype => {
136 if !src.layout.is_sized() {
::core::panicking::panic("assertion failed: src.layout.is_sized()")
};assert!(src.layout.is_sized());
137 if !dest.layout.is_sized() {
::core::panicking::panic("assertion failed: dest.layout.is_sized()")
};assert!(dest.layout.is_sized());
138 match (&cast_ty, &dest.layout.ty) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(cast_ty, dest.layout.ty); if src.layout.size != dest.layout.size {
140 do yeet {
let (src_bytes, dest_bytes, src, dest) =
(src.layout.size.bytes(), dest.layout.size.bytes(), src.layout.ty,
dest.layout.ty);
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: || fluent::const_eval_invalid_transmute,
add_args: Box::new(move |mut set_arg|
{
set_arg("src_bytes".into(),
rustc_errors::IntoDiagArg::into_diag_arg(src_bytes,
&mut None));
set_arg("dest_bytes".into(),
rustc_errors::IntoDiagArg::into_diag_arg(dest_bytes,
&mut None));
set_arg("src".into(),
rustc_errors::IntoDiagArg::into_diag_arg(src, &mut None));
set_arg("dest".into(),
rustc_errors::IntoDiagArg::into_diag_arg(dest, &mut None));
}),
}))
};throw_ub_custom!(
141 fluent::const_eval_invalid_transmute,
142 src_bytes = src.layout.size.bytes(),
143 dest_bytes = dest.layout.size.bytes(),
144 src = src.layout.ty,
145 dest = dest.layout.ty,
146 );
147 }
148
149 self.copy_op_allow_transmute(src, dest)?;
150 }
151 }
152 interp_ok(())
153 }
154
155 pub fn int_to_int_or_float(
157 &self,
158 src: &ImmTy<'tcx, M::Provenance>,
159 cast_to: TyAndLayout<'tcx>,
160 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
161 if !(src.layout.ty.is_integral() || src.layout.ty.is_char() ||
src.layout.ty.is_bool()) {
::core::panicking::panic("assertion failed: src.layout.ty.is_integral() || src.layout.ty.is_char() ||\n src.layout.ty.is_bool()")
};assert!(src.layout.ty.is_integral() || src.layout.ty.is_char() || src.layout.ty.is_bool());
162 if !(cast_to.ty.is_floating_point() || cast_to.ty.is_integral() ||
cast_to.ty.is_char()) {
::core::panicking::panic("assertion failed: cast_to.ty.is_floating_point() || cast_to.ty.is_integral() ||\n cast_to.ty.is_char()")
};assert!(cast_to.ty.is_floating_point() || cast_to.ty.is_integral() || cast_to.ty.is_char());
163
164 interp_ok(ImmTy::from_scalar(
165 self.cast_from_int_like(src.to_scalar(), src.layout, cast_to.ty)?,
166 cast_to,
167 ))
168 }
169
170 pub fn float_to_float_or_int(
172 &self,
173 src: &ImmTy<'tcx, M::Provenance>,
174 cast_to: TyAndLayout<'tcx>,
175 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
176 let ty::Float(fty) = src.layout.ty.kind() else {
177 ::rustc_middle::util::bug::bug_fmt(format_args!("FloatToFloat/FloatToInt cast: source type {0} is not a float type",
src.layout.ty))bug!("FloatToFloat/FloatToInt cast: source type {} is not a float type", src.layout.ty)
178 };
179 let val = match fty {
180 FloatTy::F16 => self.cast_from_float(src.to_scalar().to_f16()?, cast_to.ty),
181 FloatTy::F32 => self.cast_from_float(src.to_scalar().to_f32()?, cast_to.ty),
182 FloatTy::F64 => self.cast_from_float(src.to_scalar().to_f64()?, cast_to.ty),
183 FloatTy::F128 => self.cast_from_float(src.to_scalar().to_f128()?, cast_to.ty),
184 };
185 interp_ok(ImmTy::from_scalar(val, cast_to))
186 }
187
188 pub fn ptr_to_ptr(
190 &self,
191 src: &ImmTy<'tcx, M::Provenance>,
192 cast_to: TyAndLayout<'tcx>,
193 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
194 if !src.layout.ty.is_any_ptr() {
::core::panicking::panic("assertion failed: src.layout.ty.is_any_ptr()")
};assert!(src.layout.ty.is_any_ptr());
195 if !cast_to.ty.is_raw_ptr() {
::core::panicking::panic("assertion failed: cast_to.ty.is_raw_ptr()")
};assert!(cast_to.ty.is_raw_ptr());
196 if cast_to.size == src.layout.size {
198 return interp_ok(ImmTy::from_immediate(**src, cast_to));
200 } else {
201 match (&src.layout.size, &(2 * self.pointer_size())) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(src.layout.size, 2 * self.pointer_size());
203 match (&cast_to.size, &self.pointer_size()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(cast_to.size, self.pointer_size());
204 if !src.layout.ty.is_raw_ptr() {
::core::panicking::panic("assertion failed: src.layout.ty.is_raw_ptr()")
};assert!(src.layout.ty.is_raw_ptr());
205 return match **src {
206 Immediate::ScalarPair(data, _) => interp_ok(ImmTy::from_scalar(data, cast_to)),
207 Immediate::Scalar(..) => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("{0:?} input to a fat-to-thin cast ({1} -> {2})", *src,
src.layout.ty, cast_to.ty))span_bug!(
208 self.cur_span(),
209 "{:?} input to a fat-to-thin cast ({} -> {})",
210 *src,
211 src.layout.ty,
212 cast_to.ty
213 ),
214 Immediate::Uninit => do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::InvalidUninitBytes(None))throw_ub!(InvalidUninitBytes(None)),
215 };
216 }
217 }
218
219 pub fn pointer_expose_provenance_cast(
220 &mut self,
221 src: &ImmTy<'tcx, M::Provenance>,
222 cast_to: TyAndLayout<'tcx>,
223 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
224 match src.layout.ty.kind() {
ty::RawPtr(_, _) | ty::FnPtr(..) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"ty::RawPtr(_, _) | ty::FnPtr(..)", ::core::option::Option::None);
}
};assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(..));
225 if !cast_to.ty.is_integral() {
::core::panicking::panic("assertion failed: cast_to.ty.is_integral()")
};assert!(cast_to.ty.is_integral());
226
227 let scalar = src.to_scalar();
228 let ptr = scalar.to_pointer(self)?;
229 match ptr.into_pointer_or_addr() {
230 Ok(ptr) => M::expose_provenance(self, ptr.provenance)?,
231 Err(_) => {} };
233 interp_ok(ImmTy::from_scalar(
234 self.cast_from_int_like(scalar, src.layout, cast_to.ty)?,
235 cast_to,
236 ))
237 }
238
239 pub fn pointer_with_exposed_provenance_cast(
240 &self,
241 src: &ImmTy<'tcx, M::Provenance>,
242 cast_to: TyAndLayout<'tcx>,
243 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
244 if !src.layout.ty.is_integral() {
::core::panicking::panic("assertion failed: src.layout.ty.is_integral()")
};assert!(src.layout.ty.is_integral());
245 match cast_to.ty.kind() {
ty::RawPtr(_, _) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val, "ty::RawPtr(_, _)",
::core::option::Option::None);
}
};assert_matches!(cast_to.ty.kind(), ty::RawPtr(_, _));
246
247 let scalar = src.to_scalar();
249 let addr = self.cast_from_int_like(scalar, src.layout, self.tcx.types.usize)?;
250 let addr = addr.to_target_usize(self)?;
251
252 let ptr = M::ptr_from_addr_cast(self, addr)?;
254 interp_ok(ImmTy::from_scalar(Scalar::from_maybe_pointer(ptr, self), cast_to))
255 }
256
257 fn cast_from_int_like(
260 &self,
261 scalar: Scalar<M::Provenance>, src_layout: TyAndLayout<'tcx>,
263 cast_ty: Ty<'tcx>,
264 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
265 let signed = src_layout.backend_repr.is_signed(); let v = match src_layout.ty.kind() {
269 ty::Uint(_) | ty::RawPtr(..) | ty::FnPtr(..) => scalar.to_uint(src_layout.size)?,
270 ty::Int(_) => scalar.to_int(src_layout.size)? as u128, ty::Bool => scalar.to_bool()?.into(),
272 ty::Char => scalar.to_char()?.into(),
273 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid int-like cast from {0}", src_layout.ty))span_bug!(self.cur_span(), "invalid int-like cast from {}", src_layout.ty),
274 };
275
276 interp_ok(match *cast_ty.kind() {
277 ty::Int(_) | ty::Uint(_) => {
279 let size = match *cast_ty.kind() {
280 ty::Int(t) => Integer::from_int_ty(self, t).size(),
281 ty::Uint(t) => Integer::from_uint_ty(self, t).size(),
282 _ => ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!(),
283 };
284 let v = size.truncate(v);
285 Scalar::from_uint(v, size)
286 }
287
288 ty::Float(fty) if signed => {
290 let v = v as i128;
291 match fty {
292 FloatTy::F16 => Scalar::from_f16(Half::from_i128(v).value),
293 FloatTy::F32 => Scalar::from_f32(Single::from_i128(v).value),
294 FloatTy::F64 => Scalar::from_f64(Double::from_i128(v).value),
295 FloatTy::F128 => Scalar::from_f128(Quad::from_i128(v).value),
296 }
297 }
298 ty::Float(fty) => match fty {
300 FloatTy::F16 => Scalar::from_f16(Half::from_u128(v).value),
301 FloatTy::F32 => Scalar::from_f32(Single::from_u128(v).value),
302 FloatTy::F64 => Scalar::from_f64(Double::from_u128(v).value),
303 FloatTy::F128 => Scalar::from_f128(Quad::from_u128(v).value),
304 },
305
306 ty::Char => Scalar::from_u32(u8::try_from(v).unwrap().into()),
308
309 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid int to {0} cast", cast_ty))span_bug!(self.cur_span(), "invalid int to {} cast", cast_ty),
311 })
312 }
313
314 fn cast_from_float<F>(&self, f: F, dest_ty: Ty<'tcx>) -> Scalar<M::Provenance>
316 where
317 F: Float
318 + Into<Scalar<M::Provenance>>
319 + FloatConvert<Half>
320 + FloatConvert<Single>
321 + FloatConvert<Double>
322 + FloatConvert<Quad>,
323 {
324 match *dest_ty.kind() {
325 ty::Uint(t) => {
327 let size = Integer::from_uint_ty(self, t).size();
328 let v = f.to_u128(size.bits_usize()).value;
331 Scalar::from_uint(v, size)
333 }
334 ty::Int(t) => {
336 let size = Integer::from_int_ty(self, t).size();
337 let v = f.to_i128(size.bits_usize()).value;
340 Scalar::from_int(v, size)
341 }
342 ty::Float(fty) => match fty {
344 FloatTy::F16 => {
345 Scalar::from_f16(self.adjust_nan(f.convert(&mut false).value, &[f]))
346 }
347 FloatTy::F32 => {
348 Scalar::from_f32(self.adjust_nan(f.convert(&mut false).value, &[f]))
349 }
350 FloatTy::F64 => {
351 Scalar::from_f64(self.adjust_nan(f.convert(&mut false).value, &[f]))
352 }
353 FloatTy::F128 => {
354 Scalar::from_f128(self.adjust_nan(f.convert(&mut false).value, &[f]))
355 }
356 },
357 _ => ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid float to {0} cast", dest_ty))span_bug!(self.cur_span(), "invalid float to {} cast", dest_ty),
359 }
360 }
361
362 fn unsize_into_ptr(
365 &mut self,
366 src: &OpTy<'tcx, M::Provenance>,
367 dest: &impl Writeable<'tcx, M::Provenance>,
368 source_ty: Ty<'tcx>,
370 cast_ty: Ty<'tcx>,
371 ) -> InterpResult<'tcx> {
372 let (src_pointee_ty, dest_pointee_ty) =
374 self.tcx.struct_lockstep_tails_for_codegen(source_ty, cast_ty, self.typing_env);
375
376 match (src_pointee_ty.kind(), dest_pointee_ty.kind()) {
377 (&ty::Array(_, length), &ty::Slice(_)) => {
378 let ptr = self.read_pointer(src)?;
379 let val = Immediate::new_slice(
380 ptr,
381 length
382 .try_to_target_usize(*self.tcx)
383 .expect("expected monomorphic const in const eval"),
384 self,
385 );
386 self.write_immediate(val, dest)
387 }
388 (ty::Dynamic(data_a, _), ty::Dynamic(data_b, _)) => {
389 let val = self.read_immediate(src)?;
390 if data_a == data_b {
394 return self.write_immediate(*val, dest);
395 }
396 let (old_data, old_vptr) = val.to_scalar_pair();
398 let old_data = old_data.to_pointer(self)?;
399 let old_vptr = old_vptr.to_pointer(self)?;
400 let ty = self.get_ptr_vtable_ty(old_vptr, Some(data_a))?;
401
402 let vptr_entry_idx =
405 self.tcx.supertrait_vtable_slot((src_pointee_ty, dest_pointee_ty));
406 let vtable_entries = self.vtable_entries(data_a.principal(), ty);
407 if let Some(entry_idx) = vptr_entry_idx {
408 let Some(&ty::VtblEntry::TraitVPtr(upcast_trait_ref)) =
409 vtable_entries.get(entry_idx)
410 else {
411 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid vtable entry index in {0} -> {1} upcast",
src_pointee_ty, dest_pointee_ty));span_bug!(
412 self.cur_span(),
413 "invalid vtable entry index in {} -> {} upcast",
414 src_pointee_ty,
415 dest_pointee_ty
416 );
417 };
418 let erased_trait_ref =
419 ty::ExistentialTraitRef::erase_self_ty(*self.tcx, upcast_trait_ref);
420 match (&data_b.principal().map(|b|
{
self.tcx.normalize_erasing_late_bound_regions(self.typing_env,
b)
}), &Some(erased_trait_ref)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(
421 data_b.principal().map(|b| {
422 self.tcx.normalize_erasing_late_bound_regions(self.typing_env, b)
423 }),
424 Some(erased_trait_ref),
425 );
426 } else {
427 let vtable_entries_b = self.vtable_entries(data_b.principal(), ty);
431 if !(&vtable_entries[..vtable_entries_b.len()] == vtable_entries_b) {
::core::panicking::panic("assertion failed: &vtable_entries[..vtable_entries_b.len()] == vtable_entries_b")
};assert!(&vtable_entries[..vtable_entries_b.len()] == vtable_entries_b);
432 };
433
434 let new_vptr = self.get_vtable_ptr(ty, data_b)?;
436 self.write_immediate(Immediate::new_dyn_trait(old_data, new_vptr, self), dest)
437 }
438 (_, &ty::Dynamic(data, _)) => {
439 let vtable = self.get_vtable_ptr(src_pointee_ty, data)?;
441 let ptr = self.read_pointer(src)?;
442 let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
443 self.write_immediate(val, dest)
444 }
445 _ => {
446 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
448 ensure_monomorphic_enough(*self.tcx, cast_ty)?;
449
450 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("invalid pointer unsizing {0} -> {1}", src.layout.ty,
cast_ty))span_bug!(
451 self.cur_span(),
452 "invalid pointer unsizing {} -> {}",
453 src.layout.ty,
454 cast_ty
455 )
456 }
457 }
458 }
459
460 pub fn unsize_into(
461 &mut self,
462 src: &OpTy<'tcx, M::Provenance>,
463 cast_ty: TyAndLayout<'tcx>,
464 dest: &impl Writeable<'tcx, M::Provenance>,
465 ) -> InterpResult<'tcx> {
466 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/cast.rs:466",
"rustc_const_eval::interpret::cast",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/cast.rs"),
::tracing_core::__macro_support::Option::Some(466u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::cast"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Unsizing {0:?} of type {1} into {2}",
*src, src.layout.ty, cast_ty.ty) as &dyn Value))])
});
} else { ; }
};trace!("Unsizing {:?} of type {} into {}", *src, src.layout.ty, cast_ty.ty);
467 match (src.layout.ty.kind(), cast_ty.ty.kind()) {
468 (&ty::Pat(_, s_pat), &ty::Pat(cast_ty, c_pat)) if s_pat == c_pat => {
469 let src = self.project_field(src, FieldIdx::ZERO)?;
470 let dest = self.project_field(dest, FieldIdx::ZERO)?;
471 let cast_ty = self.layout_of(cast_ty)?;
472 self.unsize_into(&src, cast_ty, &dest)
473 }
474 (&ty::Ref(_, s, _), &ty::Ref(_, c, _) | &ty::RawPtr(c, _))
475 | (&ty::RawPtr(s, _), &ty::RawPtr(c, _)) => self.unsize_into_ptr(src, dest, s, c),
476 (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
477 match (&def_a, &def_b) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(def_a, def_b); let mut found_cast_field = false;
483 for i in 0..src.layout.fields.count() {
484 let cast_ty_field = cast_ty.field(self, i);
485 let i = FieldIdx::from_usize(i);
486 let src_field = self.project_field(src, i)?;
487 let dst_field = self.project_field(dest, i)?;
488 if src_field.layout.is_1zst() && cast_ty_field.is_1zst() {
489 } else if src_field.layout.ty == cast_ty_field.ty {
491 self.copy_op(&src_field, &dst_field)?;
492 } else {
493 if found_cast_field {
494 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("unsize_into: more than one field to cast"));span_bug!(self.cur_span(), "unsize_into: more than one field to cast");
495 }
496 found_cast_field = true;
497 self.unsize_into(&src_field, cast_ty_field, &dst_field)?;
498 }
499 }
500 interp_ok(())
501 }
502 _ => {
503 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
505 ensure_monomorphic_enough(*self.tcx, cast_ty.ty)?;
506
507 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("unsize_into: invalid conversion: {0:?} -> {1:?}",
src.layout, dest.layout()))span_bug!(
508 self.cur_span(),
509 "unsize_into: invalid conversion: {:?} -> {:?}",
510 src.layout,
511 dest.layout()
512 )
513 }
514 }
515 }
516}