1use either::{Either, Left, Right};
5use rustc_abi as abi;
6use rustc_abi::{BackendRepr, HasDataLayout, Size};
7use rustc_data_structures::assert_matches;
8use rustc_hir::def::Namespace;
9use rustc_middle::mir::interpret::ScalarSizeMismatch;
10use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, TyAndLayout};
11use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter};
12use rustc_middle::ty::{ConstInt, ScalarInt, Ty, TyCtxt};
13use rustc_middle::{bug, mir, span_bug, ty};
14use rustc_span::DUMMY_SP;
15use tracing::field::Empty;
16use tracing::trace;
17
18use super::{
19 CtfeProvenance, Frame, InterpCx, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta,
20 OffsetMode, PlaceTy, Pointer, Projectable, Provenance, Scalar, alloc_range, err_ub,
21 from_known_layout, interp_ok, mir_assign_valid_types, throw_ub,
22};
23use crate::enter_trace_span;
24
25#[derive(#[automatically_derived]
impl<Prov: ::core::marker::Copy + Provenance> ::core::marker::Copy for
Immediate<Prov> {
}Copy, #[automatically_derived]
impl<Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
Immediate<Prov> {
#[inline]
fn clone(&self) -> Immediate<Prov> {
match self {
Immediate::Scalar(__self_0) =>
Immediate::Scalar(::core::clone::Clone::clone(__self_0)),
Immediate::ScalarPair(__self_0, __self_1) =>
Immediate::ScalarPair(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
Immediate::Uninit => Immediate::Uninit,
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
Immediate<Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
Immediate::Scalar(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Scalar",
&__self_0),
Immediate::ScalarPair(__self_0, __self_1) =>
::core::fmt::Formatter::debug_tuple_field2_finish(f,
"ScalarPair", __self_0, &__self_1),
Immediate::Uninit =>
::core::fmt::Formatter::write_str(f, "Uninit"),
}
}
}Debug)]
33pub enum Immediate<Prov: Provenance = CtfeProvenance> {
34 Scalar(Scalar<Prov>),
36 ScalarPair(Scalar<Prov>, Scalar<Prov>),
39 Uninit,
41}
42
43impl<Prov: Provenance> From<Scalar<Prov>> for Immediate<Prov> {
44 #[inline(always)]
45 fn from(val: Scalar<Prov>) -> Self {
46 Immediate::Scalar(val)
47 }
48}
49
50impl<Prov: Provenance> Immediate<Prov> {
51 pub fn new_pointer_with_meta(
52 ptr: Pointer<Option<Prov>>,
53 meta: MemPlaceMeta<Prov>,
54 cx: &impl HasDataLayout,
55 ) -> Self {
56 let ptr = Scalar::from_maybe_pointer(ptr, cx);
57 match meta {
58 MemPlaceMeta::None => Immediate::from(ptr),
59 MemPlaceMeta::Meta(meta) => Immediate::ScalarPair(ptr, meta),
60 }
61 }
62
63 pub fn new_slice(ptr: Pointer<Option<Prov>>, len: u64, cx: &impl HasDataLayout) -> Self {
64 Immediate::ScalarPair(
65 Scalar::from_maybe_pointer(ptr, cx),
66 Scalar::from_target_usize(len, cx),
67 )
68 }
69
70 pub fn new_dyn_trait(
71 val: Pointer<Option<Prov>>,
72 vtable: Pointer<Option<Prov>>,
73 cx: &impl HasDataLayout,
74 ) -> Self {
75 Immediate::ScalarPair(
76 Scalar::from_maybe_pointer(val, cx),
77 Scalar::from_maybe_pointer(vtable, cx),
78 )
79 }
80
81 #[inline]
82 #[cfg_attr(debug_assertions, track_caller)] pub fn to_scalar(self) -> Scalar<Prov> {
84 match self {
85 Immediate::Scalar(val) => val,
86 Immediate::ScalarPair(..) => ::rustc_middle::util::bug::bug_fmt(format_args!("Got a scalar pair where a scalar was expected"))bug!("Got a scalar pair where a scalar was expected"),
87 Immediate::Uninit => ::rustc_middle::util::bug::bug_fmt(format_args!("Got uninit where a scalar was expected"))bug!("Got uninit where a scalar was expected"),
88 }
89 }
90
91 #[inline]
92 #[cfg_attr(debug_assertions, track_caller)] pub fn to_scalar_int(self) -> ScalarInt {
94 self.to_scalar().try_to_scalar_int().unwrap()
95 }
96
97 #[inline]
98 #[cfg_attr(debug_assertions, track_caller)] pub fn to_scalar_pair(self) -> (Scalar<Prov>, Scalar<Prov>) {
100 match self {
101 Immediate::ScalarPair(val1, val2) => (val1, val2),
102 Immediate::Scalar(..) => ::rustc_middle::util::bug::bug_fmt(format_args!("Got a scalar where a scalar pair was expected"))bug!("Got a scalar where a scalar pair was expected"),
103 Immediate::Uninit => ::rustc_middle::util::bug::bug_fmt(format_args!("Got uninit where a scalar pair was expected"))bug!("Got uninit where a scalar pair was expected"),
104 }
105 }
106
107 #[inline]
109 #[cfg_attr(debug_assertions, track_caller)] pub fn to_scalar_and_meta(self) -> (Scalar<Prov>, MemPlaceMeta<Prov>) {
111 match self {
112 Immediate::ScalarPair(val1, val2) => (val1, MemPlaceMeta::Meta(val2)),
113 Immediate::Scalar(val) => (val, MemPlaceMeta::None),
114 Immediate::Uninit => ::rustc_middle::util::bug::bug_fmt(format_args!("Got uninit where a scalar or scalar pair was expected"))bug!("Got uninit where a scalar or scalar pair was expected"),
115 }
116 }
117
118 pub fn assert_matches_abi(self, abi: BackendRepr, msg: &str, cx: &impl HasDataLayout) {
120 match (self, abi) {
121 (Immediate::Scalar(scalar), BackendRepr::Scalar(s)) => {
122 match (&scalar.size(), &s.size(cx)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("{0}: scalar value has wrong size",
msg)));
}
}
};assert_eq!(scalar.size(), s.size(cx), "{msg}: scalar value has wrong size");
123 if !#[allow(non_exhaustive_omitted_patterns)] match s.primitive() {
abi::Primitive::Pointer(..) => true,
_ => false,
}matches!(s.primitive(), abi::Primitive::Pointer(..)) {
124 if !#[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Int(..) => true,
_ => false,
} {
{
::core::panicking::panic_fmt(format_args!("{0}: scalar value should be an integer, but has provenance",
msg));
}
};assert!(
126 matches!(scalar, Scalar::Int(..)),
127 "{msg}: scalar value should be an integer, but has provenance"
128 );
129 }
130 }
131 (Immediate::ScalarPair(a_val, b_val), BackendRepr::ScalarPair(a, b)) => {
132 match (&a_val.size(), &a.size(cx)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("{0}: first component of scalar pair has wrong size",
msg)));
}
}
};assert_eq!(
133 a_val.size(),
134 a.size(cx),
135 "{msg}: first component of scalar pair has wrong size"
136 );
137 if !#[allow(non_exhaustive_omitted_patterns)] match a.primitive() {
abi::Primitive::Pointer(..) => true,
_ => false,
}matches!(a.primitive(), abi::Primitive::Pointer(..)) {
138 if !#[allow(non_exhaustive_omitted_patterns)] match a_val {
Scalar::Int(..) => true,
_ => false,
} {
{
::core::panicking::panic_fmt(format_args!("{0}: first component of scalar pair should be an integer, but has provenance",
msg));
}
};assert!(
139 matches!(a_val, Scalar::Int(..)),
140 "{msg}: first component of scalar pair should be an integer, but has provenance"
141 );
142 }
143 match (&b_val.size(), &b.size(cx)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("{0}: second component of scalar pair has wrong size",
msg)));
}
}
};assert_eq!(
144 b_val.size(),
145 b.size(cx),
146 "{msg}: second component of scalar pair has wrong size"
147 );
148 if !#[allow(non_exhaustive_omitted_patterns)] match b.primitive() {
abi::Primitive::Pointer(..) => true,
_ => false,
}matches!(b.primitive(), abi::Primitive::Pointer(..)) {
149 if !#[allow(non_exhaustive_omitted_patterns)] match b_val {
Scalar::Int(..) => true,
_ => false,
} {
{
::core::panicking::panic_fmt(format_args!("{0}: second component of scalar pair should be an integer, but has provenance",
msg));
}
};assert!(
150 matches!(b_val, Scalar::Int(..)),
151 "{msg}: second component of scalar pair should be an integer, but has provenance"
152 );
153 }
154 }
155 (Immediate::Uninit, _) => {
156 if !abi.is_sized() {
{
::core::panicking::panic_fmt(format_args!("{0}: unsized immediates are not a thing",
msg));
}
};assert!(abi.is_sized(), "{msg}: unsized immediates are not a thing");
157 }
158 _ => {
159 ::rustc_middle::util::bug::bug_fmt(format_args!("{0}: value {1:?} does not match ABI {2:?})",
msg, self, abi))bug!("{msg}: value {self:?} does not match ABI {abi:?})",)
160 }
161 }
162 }
163
164 pub fn clear_provenance<'tcx>(&mut self) -> InterpResult<'tcx> {
165 match self {
166 Immediate::Scalar(s) => {
167 s.clear_provenance()?;
168 }
169 Immediate::ScalarPair(a, b) => {
170 a.clear_provenance()?;
171 b.clear_provenance()?;
172 }
173 Immediate::Uninit => {}
174 }
175 interp_ok(())
176 }
177
178 pub fn has_provenance(&self) -> bool {
179 match self {
180 Immediate::Scalar(scalar) => #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Ptr { .. } => true,
_ => false,
}matches!(scalar, Scalar::Ptr { .. }),
181 Immediate::ScalarPair(s1, s2) => {
182 #[allow(non_exhaustive_omitted_patterns)] match s1 {
Scalar::Ptr { .. } => true,
_ => false,
}matches!(s1, Scalar::Ptr { .. }) || #[allow(non_exhaustive_omitted_patterns)] match s2 {
Scalar::Ptr { .. } => true,
_ => false,
}matches!(s2, Scalar::Ptr { .. })
183 }
184 Immediate::Uninit => false,
185 }
186 }
187}
188
189#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
ImmTy<'tcx, Prov> {
#[inline]
fn clone(&self) -> ImmTy<'tcx, Prov> {
ImmTy {
imm: ::core::clone::Clone::clone(&self.imm),
layout: ::core::clone::Clone::clone(&self.layout),
}
}
}Clone)]
192pub struct ImmTy<'tcx, Prov: Provenance = CtfeProvenance> {
193 imm: Immediate<Prov>,
194 pub layout: TyAndLayout<'tcx>,
195}
196
197impl<Prov: Provenance> std::fmt::Display for ImmTy<'_, Prov> {
198 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
199 fn print_scalar<'a, 'tcx, Prov: Provenance>(
201 p: &mut FmtPrinter<'a, 'tcx>,
202 s: Scalar<Prov>,
203 ty: Ty<'tcx>,
204 ) -> Result<(), std::fmt::Error> {
205 match s {
206 Scalar::Int(int) => p.pretty_print_const_scalar_int(int, ty, true),
207 Scalar::Ptr(ptr, _sz) => {
208 p.pretty_print_const_pointer(ptr, ty)
212 }
213 }
214 }
215 ty::tls::with(|tcx| {
216 match self.imm {
217 Immediate::Scalar(s) => {
218 if let Some(ty) = tcx.lift(self.layout.ty) {
219 let s = FmtPrinter::print_string(tcx, Namespace::ValueNS, |p| {
220 print_scalar(p, s, ty)
221 })?;
222 f.write_str(&s)?;
223 return Ok(());
224 }
225 f.write_fmt(format_args!("{0:x}: {1}", s, self.layout.ty))write!(f, "{:x}: {}", s, self.layout.ty)
226 }
227 Immediate::ScalarPair(a, b) => {
228 f.write_fmt(format_args!("({0:x}, {1:x}): {2}", a, b, self.layout.ty))write!(f, "({:x}, {:x}): {}", a, b, self.layout.ty)
230 }
231 Immediate::Uninit => {
232 f.write_fmt(format_args!("uninit: {0}", self.layout.ty))write!(f, "uninit: {}", self.layout.ty)
233 }
234 }
235 })
236 }
237}
238
239impl<Prov: Provenance> std::fmt::Debug for ImmTy<'_, Prov> {
240 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
241 f.debug_struct("ImmTy")
243 .field("imm", &self.imm)
244 .field("ty", &format_args!("{0}", self.layout.ty)format_args!("{}", self.layout.ty))
245 .finish()
246 }
247}
248
249impl<'tcx, Prov: Provenance> std::ops::Deref for ImmTy<'tcx, Prov> {
250 type Target = Immediate<Prov>;
251 #[inline(always)]
252 fn deref(&self) -> &Immediate<Prov> {
253 &self.imm
254 }
255}
256
257impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
258 #[inline]
259 pub fn from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
260 if true {
if !layout.backend_repr.is_scalar() {
{
::core::panicking::panic_fmt(format_args!("`ImmTy::from_scalar` on non-scalar layout"));
}
};
};debug_assert!(layout.backend_repr.is_scalar(), "`ImmTy::from_scalar` on non-scalar layout");
261 if true {
match (&val.size(), &layout.size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(val.size(), layout.size);
262 ImmTy { imm: val.into(), layout }
263 }
264
265 #[inline]
266 pub fn from_scalar_pair(a: Scalar<Prov>, b: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
267 if true {
if !#[allow(non_exhaustive_omitted_patterns)] match layout.backend_repr {
BackendRepr::ScalarPair(..) => true,
_ => false,
} {
{
::core::panicking::panic_fmt(format_args!("`ImmTy::from_scalar_pair` on non-scalar-pair layout"));
}
};
};debug_assert!(
268 matches!(layout.backend_repr, BackendRepr::ScalarPair(..)),
269 "`ImmTy::from_scalar_pair` on non-scalar-pair layout"
270 );
271 let imm = Immediate::ScalarPair(a, b);
272 ImmTy { imm, layout }
273 }
274
275 #[inline(always)]
276 pub fn from_immediate(imm: Immediate<Prov>, layout: TyAndLayout<'tcx>) -> Self {
277 if true {
if !match (imm, layout.backend_repr) {
(Immediate::Scalar(..), BackendRepr::Scalar(..)) => true,
(Immediate::ScalarPair(..), BackendRepr::ScalarPair(..)) =>
true,
(Immediate::Uninit, _) if layout.is_sized() => true,
_ => false,
} {
{
::core::panicking::panic_fmt(format_args!("immediate {0:?} does not fit to layout {1:?}",
imm, layout));
}
};
};debug_assert!(
279 match (imm, layout.backend_repr) {
280 (Immediate::Scalar(..), BackendRepr::Scalar(..)) => true,
281 (Immediate::ScalarPair(..), BackendRepr::ScalarPair(..)) => true,
282 (Immediate::Uninit, _) if layout.is_sized() => true,
283 _ => false,
284 },
285 "immediate {imm:?} does not fit to layout {layout:?}",
286 );
287 ImmTy { imm, layout }
288 }
289
290 #[inline]
291 pub fn uninit(layout: TyAndLayout<'tcx>) -> Self {
292 if true {
if !layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("immediates must be sized"));
}
};
};debug_assert!(layout.is_sized(), "immediates must be sized");
293 ImmTy { imm: Immediate::Uninit, layout }
294 }
295
296 #[inline]
297 pub fn from_scalar_int(s: ScalarInt, layout: TyAndLayout<'tcx>) -> Self {
298 Self::from_scalar(Scalar::from(s), layout)
299 }
300
301 #[inline]
302 pub fn from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Self {
303 Self::from_scalar(Scalar::from_uint(i, layout.size), layout)
304 }
305
306 #[inline]
307 pub fn from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Self {
308 Self::from_scalar(Scalar::from_int(i, layout.size), layout)
309 }
310
311 #[inline]
312 pub fn from_bool(b: bool, tcx: TyCtxt<'tcx>) -> Self {
313 let layout = tcx
315 .layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(tcx.types.bool))
316 .unwrap();
317 Self::from_scalar(Scalar::from_bool(b), layout)
318 }
319
320 #[inline]
321 pub fn from_ordering(c: std::cmp::Ordering, tcx: TyCtxt<'tcx>) -> Self {
322 let ty = tcx.ty_ordering_enum(DUMMY_SP);
324 let layout =
325 tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(ty)).unwrap();
326 Self::from_scalar(Scalar::Int(c.into()), layout)
327 }
328
329 pub fn from_pair(a: Self, b: Self, cx: &(impl HasTypingEnv<'tcx> + HasTyCtxt<'tcx>)) -> Self {
330 let layout = cx
331 .tcx()
332 .layout_of(
333 cx.typing_env().as_query_input(Ty::new_tup(cx.tcx(), &[a.layout.ty, b.layout.ty])),
334 )
335 .unwrap();
336 Self::from_scalar_pair(a.to_scalar(), b.to_scalar(), layout)
337 }
338
339 #[inline]
342 pub fn to_scalar_int(&self) -> InterpResult<'tcx, ScalarInt> {
343 let s = self.to_scalar().to_scalar_int()?;
344 if s.size() != self.layout.size {
345 do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ScalarSizeMismatch(ScalarSizeMismatch {
target_size: self.layout.size.bytes(),
data_size: s.size().bytes(),
}));throw_ub!(ScalarSizeMismatch(ScalarSizeMismatch {
346 target_size: self.layout.size.bytes(),
347 data_size: s.size().bytes(),
348 }));
349 }
350 interp_ok(s)
351 }
352
353 #[inline]
354 pub fn to_const_int(self) -> ConstInt {
355 if !self.layout.ty.is_integral() {
::core::panicking::panic("assertion failed: self.layout.ty.is_integral()")
};assert!(self.layout.ty.is_integral());
356 let int = self.imm.to_scalar_int();
357 match (&int.size(), &self.layout.size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(int.size(), self.layout.size);
358 ConstInt::new(int, self.layout.ty.is_signed(), self.layout.ty.is_ptr_sized_integral())
359 }
360
361 #[inline]
362 #[cfg_attr(debug_assertions, track_caller)] pub fn to_pair(self, cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>)) -> (Self, Self) {
364 let layout = self.layout;
365 let (val0, val1) = self.to_scalar_pair();
366 (
367 ImmTy::from_scalar(val0, layout.field(cx, 0)),
368 ImmTy::from_scalar(val1, layout.field(cx, 1)),
369 )
370 }
371
372 fn offset_(&self, offset: Size, layout: TyAndLayout<'tcx>, cx: &impl HasDataLayout) -> Self {
376 if truecfg!(debug_assertions) {
378 self.assert_matches_abi(
379 self.layout.backend_repr,
380 "invalid input to Immediate::offset",
381 cx,
382 );
383 }
384 if !(offset + layout.size <= self.layout.size) {
{
::core::panicking::panic_fmt(format_args!("attempting to project to field at offset {0} with size {1} into immediate with layout {2:#?}",
offset.bytes(), layout.size.bytes(), self.layout));
}
};assert!(
388 offset + layout.size <= self.layout.size,
389 "attempting to project to field at offset {} with size {} into immediate with layout {:#?}",
390 offset.bytes(),
391 layout.size.bytes(),
392 self.layout,
393 );
394 let inner_val: Immediate<_> = match (**self, self.layout.backend_repr) {
397 (Immediate::Uninit, _) => Immediate::Uninit,
399 _ if layout.is_uninhabited() => Immediate::Uninit,
403 _ if layout.is_zst() => Immediate::Uninit,
406 _ if #[allow(non_exhaustive_omitted_patterns)] match layout.backend_repr {
BackendRepr::Memory { .. } => true,
_ => false,
}matches!(layout.backend_repr, BackendRepr::Memory { .. })
409 && #[allow(non_exhaustive_omitted_patterns)] match layout.variants {
abi::Variants::Single { .. } => true,
_ => false,
}matches!(layout.variants, abi::Variants::Single { .. })
410 && #[allow(non_exhaustive_omitted_patterns)] match &layout.fields {
abi::FieldsShape::Arbitrary { offsets, .. } if offsets.len() == 0 => true,
_ => false,
}matches!(&layout.fields, abi::FieldsShape::Arbitrary { offsets, .. } if offsets.len() == 0) =>
411 {
412 Immediate::Uninit
413 }
414 _ if layout.size == self.layout.size => {
416 match (&offset.bytes(), &0) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(offset.bytes(), 0);
417 **self
418 }
419 (Immediate::ScalarPair(a_val, b_val), BackendRepr::ScalarPair(a, b)) => {
421 Immediate::from(if offset.bytes() == 0 {
422 a_val
423 } else {
424 match (&offset, &a.size(cx).align_to(b.align(cx).abi)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(offset, a.size(cx).align_to(b.align(cx).abi));
425 b_val
426 })
427 }
428 _ => ::rustc_middle::util::bug::bug_fmt(format_args!("invalid field access on immediate {0} at offset {1}, original layout {2:#?}",
self, offset.bytes(), self.layout))bug!(
430 "invalid field access on immediate {} at offset {}, original layout {:#?}",
431 self,
432 offset.bytes(),
433 self.layout
434 ),
435 };
436 inner_val.assert_matches_abi(
438 layout.backend_repr,
439 "invalid field type in Immediate::offset",
440 cx,
441 );
442
443 ImmTy::from_immediate(inner_val, layout)
444 }
445}
446
447impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for ImmTy<'tcx, Prov> {
448 #[inline(always)]
449 fn layout(&self) -> TyAndLayout<'tcx> {
450 self.layout
451 }
452
453 #[inline(always)]
454 fn meta(&self) -> MemPlaceMeta<Prov> {
455 if true {
if !self.layout.is_sized() {
::core::panicking::panic("assertion failed: self.layout.is_sized()")
};
};debug_assert!(self.layout.is_sized()); MemPlaceMeta::None
457 }
458
459 fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
460 &self,
461 offset: Size,
462 _mode: OffsetMode,
463 meta: MemPlaceMeta<Prov>,
464 layout: TyAndLayout<'tcx>,
465 ecx: &InterpCx<'tcx, M>,
466 ) -> InterpResult<'tcx, Self> {
467 match meta {
MemPlaceMeta::None => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"MemPlaceMeta::None", ::core::option::Option::None);
}
};assert_matches!(meta, MemPlaceMeta::None); interp_ok(self.offset_(offset, layout, ecx))
469 }
470
471 #[inline(always)]
472 fn to_op<M: Machine<'tcx, Provenance = Prov>>(
473 &self,
474 _ecx: &InterpCx<'tcx, M>,
475 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
476 interp_ok(self.clone().into())
477 }
478}
479
480#[derive(#[automatically_derived]
impl<Prov: ::core::marker::Copy + Provenance> ::core::marker::Copy for
Operand<Prov> {
}Copy, #[automatically_derived]
impl<Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
Operand<Prov> {
#[inline]
fn clone(&self) -> Operand<Prov> {
match self {
Operand::Immediate(__self_0) =>
Operand::Immediate(::core::clone::Clone::clone(__self_0)),
Operand::Indirect(__self_0) =>
Operand::Indirect(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
Operand<Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
Operand::Immediate(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Immediate", &__self_0),
Operand::Indirect(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Indirect", &__self_0),
}
}
}Debug)]
484pub(super) enum Operand<Prov: Provenance = CtfeProvenance> {
485 Immediate(Immediate<Prov>),
486 Indirect(MemPlace<Prov>),
487}
488
489#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
OpTy<'tcx, Prov> {
#[inline]
fn clone(&self) -> OpTy<'tcx, Prov> {
OpTy {
op: ::core::clone::Clone::clone(&self.op),
layout: ::core::clone::Clone::clone(&self.layout),
}
}
}Clone)]
490pub struct OpTy<'tcx, Prov: Provenance = CtfeProvenance> {
491 op: Operand<Prov>, pub layout: TyAndLayout<'tcx>,
493}
494
495impl<Prov: Provenance> std::fmt::Debug for OpTy<'_, Prov> {
496 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
497 f.debug_struct("OpTy")
499 .field("op", &self.op)
500 .field("ty", &format_args!("{0}", self.layout.ty)format_args!("{}", self.layout.ty))
501 .finish()
502 }
503}
504
505impl<'tcx, Prov: Provenance> From<ImmTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
506 #[inline(always)]
507 fn from(val: ImmTy<'tcx, Prov>) -> Self {
508 OpTy { op: Operand::Immediate(val.imm), layout: val.layout }
509 }
510}
511
512impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
513 #[inline(always)]
514 fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
515 OpTy { op: Operand::Indirect(*mplace.mplace()), layout: mplace.layout }
516 }
517}
518
519impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
520 #[inline(always)]
521 pub(super) fn op(&self) -> &Operand<Prov> {
522 &self.op
523 }
524
525 pub fn is_immediate_uninit(&self) -> bool {
526 #[allow(non_exhaustive_omitted_patterns)] match self.op {
Operand::Immediate(Immediate::Uninit) => true,
_ => false,
}matches!(self.op, Operand::Immediate(Immediate::Uninit))
527 }
528}
529
530impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for OpTy<'tcx, Prov> {
531 #[inline(always)]
532 fn layout(&self) -> TyAndLayout<'tcx> {
533 self.layout
534 }
535
536 #[inline]
537 fn meta(&self) -> MemPlaceMeta<Prov> {
538 match self.as_mplace_or_imm() {
539 Left(mplace) => mplace.meta(),
540 Right(_) => {
541 if true {
if !self.layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("unsized immediates are not a thing"));
}
};
};debug_assert!(self.layout.is_sized(), "unsized immediates are not a thing");
542 MemPlaceMeta::None
543 }
544 }
545 }
546
547 fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
548 &self,
549 offset: Size,
550 mode: OffsetMode,
551 meta: MemPlaceMeta<Prov>,
552 layout: TyAndLayout<'tcx>,
553 ecx: &InterpCx<'tcx, M>,
554 ) -> InterpResult<'tcx, Self> {
555 match self.as_mplace_or_imm() {
556 Left(mplace) => {
557 interp_ok(mplace.offset_with_meta(offset, mode, meta, layout, ecx)?.into())
558 }
559 Right(imm) => {
560 match meta {
MemPlaceMeta::None => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"MemPlaceMeta::None", ::core::option::Option::None);
}
};assert_matches!(meta, MemPlaceMeta::None); interp_ok(imm.offset_(offset, layout, ecx).into())
563 }
564 }
565 }
566
567 #[inline(always)]
568 fn to_op<M: Machine<'tcx, Provenance = Prov>>(
569 &self,
570 _ecx: &InterpCx<'tcx, M>,
571 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
572 interp_ok(self.clone())
573 }
574}
575
576impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
577 fn read_immediate_from_mplace_raw(
582 &self,
583 mplace: &MPlaceTy<'tcx, M::Provenance>,
584 ) -> InterpResult<'tcx, Option<ImmTy<'tcx, M::Provenance>>> {
585 if mplace.layout.is_unsized() {
586 return interp_ok(None);
588 }
589
590 let Some(alloc) = self.get_place_alloc(mplace)? else {
591 return interp_ok(Some(ImmTy::uninit(mplace.layout)));
593 };
594
595 interp_ok(match mplace.layout.backend_repr {
602 BackendRepr::Scalar(abi::Scalar::Initialized { value: s, .. }) => {
603 let size = s.size(self);
604 match (&size, &mplace.layout.size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("abi::Scalar size does not match layout size")));
}
}
};assert_eq!(size, mplace.layout.size, "abi::Scalar size does not match layout size");
605 let scalar = alloc.read_scalar(
606 alloc_range(Size::ZERO, size),
607 #[allow(non_exhaustive_omitted_patterns)] match s {
abi::Primitive::Pointer(_) => true,
_ => false,
}matches!(s, abi::Primitive::Pointer(_)),
608 )?;
609 Some(ImmTy::from_scalar(scalar, mplace.layout))
610 }
611 BackendRepr::ScalarPair(
612 abi::Scalar::Initialized { value: a, .. },
613 abi::Scalar::Initialized { value: b, .. },
614 ) => {
615 let (a_size, b_size) = (a.size(self), b.size(self));
619 let b_offset = a_size.align_to(b.align(self).abi);
620 if !(b_offset.bytes() > 0) {
::core::panicking::panic("assertion failed: b_offset.bytes() > 0")
};assert!(b_offset.bytes() > 0); let a_val = alloc.read_scalar(
622 alloc_range(Size::ZERO, a_size),
623 #[allow(non_exhaustive_omitted_patterns)] match a {
abi::Primitive::Pointer(_) => true,
_ => false,
}matches!(a, abi::Primitive::Pointer(_)),
624 )?;
625 let b_val = alloc.read_scalar(
626 alloc_range(b_offset, b_size),
627 #[allow(non_exhaustive_omitted_patterns)] match b {
abi::Primitive::Pointer(_) => true,
_ => false,
}matches!(b, abi::Primitive::Pointer(_)),
628 )?;
629 Some(ImmTy::from_immediate(Immediate::ScalarPair(a_val, b_val), mplace.layout))
630 }
631 _ => {
632 None
634 }
635 })
636 }
637
638 pub fn read_immediate_raw(
647 &self,
648 src: &impl Projectable<'tcx, M::Provenance>,
649 ) -> InterpResult<'tcx, Either<MPlaceTy<'tcx, M::Provenance>, ImmTy<'tcx, M::Provenance>>> {
650 interp_ok(match src.to_op(self)?.as_mplace_or_imm() {
651 Left(ref mplace) => {
652 if let Some(val) = self.read_immediate_from_mplace_raw(mplace)? {
653 Right(val)
654 } else {
655 Left(mplace.clone())
656 }
657 }
658 Right(val) => Right(val),
659 })
660 }
661
662 #[inline(always)]
666 pub fn read_immediate(
667 &self,
668 op: &impl Projectable<'tcx, M::Provenance>,
669 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
670 if !#[allow(non_exhaustive_omitted_patterns)] match op.layout().backend_repr {
BackendRepr::Scalar(abi::Scalar::Initialized { .. }) |
BackendRepr::ScalarPair(abi::Scalar::Initialized { .. },
abi::Scalar::Initialized { .. }) => true,
_ => false,
}matches!(
671 op.layout().backend_repr,
672 BackendRepr::Scalar(abi::Scalar::Initialized { .. })
673 | BackendRepr::ScalarPair(
674 abi::Scalar::Initialized { .. },
675 abi::Scalar::Initialized { .. }
676 )
677 ) {
678 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("primitive read not possible for type: {0}",
op.layout().ty));span_bug!(self.cur_span(), "primitive read not possible for type: {}", op.layout().ty);
679 }
680 let imm = self.read_immediate_raw(op)?.right().unwrap();
681 if #[allow(non_exhaustive_omitted_patterns)] match *imm {
Immediate::Uninit => true,
_ => false,
}matches!(*imm, Immediate::Uninit) {
682 do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::InvalidUninitBytes(None));throw_ub!(InvalidUninitBytes(None));
683 }
684 interp_ok(imm)
685 }
686
687 pub fn read_scalar(
689 &self,
690 op: &impl Projectable<'tcx, M::Provenance>,
691 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
692 interp_ok(self.read_immediate(op)?.to_scalar())
693 }
694
695 pub fn read_pointer(
700 &self,
701 op: &impl Projectable<'tcx, M::Provenance>,
702 ) -> InterpResult<'tcx, Pointer<Option<M::Provenance>>> {
703 self.read_scalar(op)?.to_pointer(self)
704 }
705 pub fn read_target_usize(
707 &self,
708 op: &impl Projectable<'tcx, M::Provenance>,
709 ) -> InterpResult<'tcx, u64> {
710 self.read_scalar(op)?.to_target_usize(self)
711 }
712 pub fn read_target_isize(
714 &self,
715 op: &impl Projectable<'tcx, M::Provenance>,
716 ) -> InterpResult<'tcx, i64> {
717 self.read_scalar(op)?.to_target_isize(self)
718 }
719
720 pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, &str> {
722 let len = mplace.len(self)?;
723 let bytes = self.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len))?;
724 let s = std::str::from_utf8(bytes).map_err(|err| ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::InvalidStr(err))err_ub!(InvalidStr(err)))?;
725 interp_ok(s)
726 }
727
728 pub fn local_to_op(
730 &self,
731 local: mir::Local,
732 layout: Option<TyAndLayout<'tcx>>,
733 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
734 self.local_at_frame_to_op(self.frame(), local, layout)
735 }
736
737 pub fn local_at_frame_to_op(
743 &self,
744 frame: &Frame<'tcx, M::Provenance, M::FrameExtra>,
745 local: mir::Local,
746 layout: Option<TyAndLayout<'tcx>>,
747 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
748 let layout = self.layout_of_local(frame, local, layout)?;
749 let op = *frame.locals[local].access()?;
750 if #[allow(non_exhaustive_omitted_patterns)] match op {
Operand::Immediate(_) => true,
_ => false,
}matches!(op, Operand::Immediate(_)) {
751 if !!layout.is_unsized() {
::core::panicking::panic("assertion failed: !layout.is_unsized()")
};assert!(!layout.is_unsized());
752 }
753 M::after_local_read(self, frame, local)?;
754 interp_ok(OpTy { op, layout })
755 }
756
757 pub fn place_to_op(
761 &self,
762 place: &PlaceTy<'tcx, M::Provenance>,
763 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
764 match place.as_mplace_or_local() {
765 Left(mplace) => interp_ok(mplace.into()),
766 Right((local, offset, locals_addr, _)) => {
767 if true {
if !place.layout.is_sized() {
::core::panicking::panic("assertion failed: place.layout.is_sized()")
};
};debug_assert!(place.layout.is_sized()); if true {
match (&locals_addr, &self.frame().locals_addr()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(locals_addr, self.frame().locals_addr());
769 let base = self.local_to_op(local, None)?;
770 interp_ok(match offset {
771 Some(offset) => base.offset(offset, place.layout, self)?,
772 None => {
773 if true {
match (&place.layout, &base.layout) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(place.layout, base.layout);
775 base
776 }
777 })
778 }
779 }
780 }
781
782 pub fn eval_place_to_op(
785 &self,
786 mir_place: mir::Place<'tcx>,
787 layout: Option<TyAndLayout<'tcx>>,
788 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
789 let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::operand",
::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/operand.rs"),
::tracing_core::__macro_support::Option::Some(789u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::operand"),
::tracing_core::field::FieldSet::new(&["step", "mir_place",
"tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"eval_place_to_op")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&mir_place)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(
790 M,
791 step::eval_place_to_op,
792 ?mir_place,
793 tracing_separate_thread = Empty
794 );
795
796 let layout = if mir_place.projection.is_empty() { layout } else { None };
799
800 let mut op = self.local_to_op(mir_place.local, layout)?;
801 for elem in mir_place.projection.iter() {
803 op = self.project(&op, elem)?
804 }
805
806 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/operand.rs:806",
"rustc_const_eval::interpret::operand",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/operand.rs"),
::tracing_core::__macro_support::Option::Some(806u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::operand"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("eval_place_to_op: got {0:?}",
op) as &dyn Value))])
});
} else { ; }
};trace!("eval_place_to_op: got {:?}", op);
807 if truecfg!(debug_assertions) {
809 let normalized_place_ty = self
810 .instantiate_from_current_frame_and_normalize_erasing_regions(
811 mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty,
812 )?;
813 if !mir_assign_valid_types(
814 *self.tcx,
815 self.typing_env(),
816 self.layout_of(normalized_place_ty)?,
817 op.layout,
818 ) {
819 ::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("eval_place of a MIR place with type {0} produced an interpreter operand with type {1}",
normalized_place_ty, op.layout.ty))span_bug!(
820 self.cur_span(),
821 "eval_place of a MIR place with type {} produced an interpreter operand with type {}",
822 normalized_place_ty,
823 op.layout.ty,
824 )
825 }
826 }
827 interp_ok(op)
828 }
829
830 #[inline]
834 pub fn eval_operand(
835 &self,
836 mir_op: &mir::Operand<'tcx>,
837 layout: Option<TyAndLayout<'tcx>>,
838 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
839 let _trace =
840 <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::operand",
::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/operand.rs"),
::tracing_core::__macro_support::Option::Some(840u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::operand"),
::tracing_core::field::FieldSet::new(&["step", "mir_op",
"tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"eval_operand")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&mir_op) as
&dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, step::eval_operand, ?mir_op, tracing_separate_thread = Empty);
841
842 use rustc_middle::mir::Operand::*;
843 let op = match mir_op {
844 &Copy(place) | &Move(place) => self.eval_place_to_op(place, layout)?,
846
847 &RuntimeChecks(checks) => {
848 let val = M::runtime_checks(self, checks)?;
849 ImmTy::from_bool(val, self.tcx()).into()
850 }
851
852 Constant(constant) => {
853 let c = self.instantiate_from_current_frame_and_normalize_erasing_regions(
854 constant.const_,
855 )?;
856
857 self.eval_mir_constant(&c, constant.span, layout)?
862 }
863 };
864 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/operand.rs:864",
"rustc_const_eval::interpret::operand",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/operand.rs"),
::tracing_core::__macro_support::Option::Some(864u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::operand"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0:?}: {1:?}",
mir_op, op) as &dyn Value))])
});
} else { ; }
};trace!("{:?}: {:?}", mir_op, op);
865 interp_ok(op)
866 }
867
868 pub(crate) fn const_val_to_op(
869 &self,
870 val_val: mir::ConstValue,
871 ty: Ty<'tcx>,
872 layout: Option<TyAndLayout<'tcx>>,
873 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
874 let adjust_scalar = |scalar| -> InterpResult<'tcx, _> {
876 interp_ok(match scalar {
877 Scalar::Ptr(ptr, size) => Scalar::Ptr(self.global_root_pointer(ptr)?, size),
878 Scalar::Int(int) => Scalar::Int(int),
879 })
880 };
881 let layout =
882 from_known_layout(self.tcx, self.typing_env(), layout, || self.layout_of(ty).into())?;
883 let imm = match val_val {
884 mir::ConstValue::Indirect { alloc_id, offset } => {
885 let ptr = self.global_root_pointer(Pointer::new(
887 CtfeProvenance::from(alloc_id).as_immutable(),
888 offset,
889 ))?;
890 return interp_ok(self.ptr_to_mplace(ptr.into(), layout).into());
891 }
892 mir::ConstValue::Scalar(x) => adjust_scalar(x)?.into(),
893 mir::ConstValue::ZeroSized => Immediate::Uninit,
894 mir::ConstValue::Slice { alloc_id, meta } => {
895 let ptr = Pointer::new(CtfeProvenance::from(alloc_id).as_immutable(), Size::ZERO);
897 Immediate::new_slice(self.global_root_pointer(ptr)?.into(), meta, self)
898 }
899 };
900 interp_ok(OpTy { op: Operand::Immediate(imm), layout })
901 }
902}
903
904#[cfg(target_pointer_width = "64")]
906mod size_asserts {
907 use rustc_data_structures::static_assert_size;
908
909 use super::*;
910 const _: [(); 64] = [(); ::std::mem::size_of::<ImmTy<'_>>()];static_assert_size!(ImmTy<'_>, 64);
912 const _: [(); 48] = [(); ::std::mem::size_of::<Immediate>()];static_assert_size!(Immediate, 48);
913 const _: [(); 72] = [(); ::std::mem::size_of::<OpTy<'_>>()];static_assert_size!(OpTy<'_>, 72);
914 const _: [(); 56] = [(); ::std::mem::size_of::<Operand>()];static_assert_size!(Operand, 56);
915 }