stable_mir/mir/
body.rs

1use std::io;
2
3use serde::Serialize;
4
5use crate::compiler_interface::with;
6use crate::mir::pretty::function_body;
7use crate::ty::{
8    AdtDef, ClosureDef, CoroutineClosureDef, CoroutineDef, GenericArgs, MirConst, Movability,
9    Region, RigidTy, Ty, TyConst, TyKind, VariantIdx,
10};
11use crate::{Error, Opaque, Span, Symbol};
12
13/// The SMIR representation of a single function.
14#[derive(Clone, Debug, Serialize)]
15pub struct Body {
16    pub blocks: Vec<BasicBlock>,
17
18    /// Declarations of locals within the function.
19    ///
20    /// The first local is the return value pointer, followed by `arg_count`
21    /// locals for the function arguments, followed by any user-declared
22    /// variables and temporaries.
23    pub(super) locals: LocalDecls,
24
25    /// The number of arguments this function takes.
26    pub(super) arg_count: usize,
27
28    /// Debug information pertaining to user variables, including captures.
29    pub var_debug_info: Vec<VarDebugInfo>,
30
31    /// Mark an argument (which must be a tuple) as getting passed as its individual components.
32    ///
33    /// This is used for the "rust-call" ABI such as closures.
34    pub(super) spread_arg: Option<Local>,
35
36    /// The span that covers the entire function body.
37    pub span: Span,
38}
39
40pub type BasicBlockIdx = usize;
41
42impl Body {
43    /// Constructs a `Body`.
44    ///
45    /// A constructor is required to build a `Body` from outside the crate
46    /// because the `arg_count` and `locals` fields are private.
47    pub fn new(
48        blocks: Vec<BasicBlock>,
49        locals: LocalDecls,
50        arg_count: usize,
51        var_debug_info: Vec<VarDebugInfo>,
52        spread_arg: Option<Local>,
53        span: Span,
54    ) -> Self {
55        // If locals doesn't contain enough entries, it can lead to panics in
56        // `ret_local`, `arg_locals`, and `inner_locals`.
57        assert!(
58            locals.len() > arg_count,
59            "A Body must contain at least a local for the return value and each of the function's arguments"
60        );
61        Self { blocks, locals, arg_count, var_debug_info, spread_arg, span }
62    }
63
64    /// Return local that holds this function's return value.
65    pub fn ret_local(&self) -> &LocalDecl {
66        &self.locals[RETURN_LOCAL]
67    }
68
69    /// Locals in `self` that correspond to this function's arguments.
70    pub fn arg_locals(&self) -> &[LocalDecl] {
71        &self.locals[1..][..self.arg_count]
72    }
73
74    /// Inner locals for this function. These are the locals that are
75    /// neither the return local nor the argument locals.
76    pub fn inner_locals(&self) -> &[LocalDecl] {
77        &self.locals[self.arg_count + 1..]
78    }
79
80    /// Returns a mutable reference to the local that holds this function's return value.
81    pub(crate) fn ret_local_mut(&mut self) -> &mut LocalDecl {
82        &mut self.locals[RETURN_LOCAL]
83    }
84
85    /// Returns a mutable slice of locals corresponding to this function's arguments.
86    pub(crate) fn arg_locals_mut(&mut self) -> &mut [LocalDecl] {
87        &mut self.locals[1..][..self.arg_count]
88    }
89
90    /// Returns a mutable slice of inner locals for this function.
91    /// Inner locals are those that are neither the return local nor the argument locals.
92    pub(crate) fn inner_locals_mut(&mut self) -> &mut [LocalDecl] {
93        &mut self.locals[self.arg_count + 1..]
94    }
95
96    /// Convenience function to get all the locals in this function.
97    ///
98    /// Locals are typically accessed via the more specific methods `ret_local`,
99    /// `arg_locals`, and `inner_locals`.
100    pub fn locals(&self) -> &[LocalDecl] {
101        &self.locals
102    }
103
104    /// Get the local declaration for this local.
105    pub fn local_decl(&self, local: Local) -> Option<&LocalDecl> {
106        self.locals.get(local)
107    }
108
109    /// Get an iterator for all local declarations.
110    pub fn local_decls(&self) -> impl Iterator<Item = (Local, &LocalDecl)> {
111        self.locals.iter().enumerate()
112    }
113
114    /// Emit the body using the provided name for the signature.
115    pub fn dump<W: io::Write>(&self, w: &mut W, fn_name: &str) -> io::Result<()> {
116        function_body(w, self, fn_name)
117    }
118
119    pub fn spread_arg(&self) -> Option<Local> {
120        self.spread_arg
121    }
122}
123
124type LocalDecls = Vec<LocalDecl>;
125
126#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
127pub struct LocalDecl {
128    pub ty: Ty,
129    pub span: Span,
130    pub mutability: Mutability,
131}
132
133#[derive(Clone, PartialEq, Eq, Debug, Serialize)]
134pub struct BasicBlock {
135    pub statements: Vec<Statement>,
136    pub terminator: Terminator,
137}
138
139#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
140pub struct Terminator {
141    pub kind: TerminatorKind,
142    pub span: Span,
143}
144
145impl Terminator {
146    pub fn successors(&self) -> Successors {
147        self.kind.successors()
148    }
149}
150
151pub type Successors = Vec<BasicBlockIdx>;
152
153#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
154pub enum TerminatorKind {
155    Goto {
156        target: BasicBlockIdx,
157    },
158    SwitchInt {
159        discr: Operand,
160        targets: SwitchTargets,
161    },
162    Resume,
163    Abort,
164    Return,
165    Unreachable,
166    Drop {
167        place: Place,
168        target: BasicBlockIdx,
169        unwind: UnwindAction,
170    },
171    Call {
172        func: Operand,
173        args: Vec<Operand>,
174        destination: Place,
175        target: Option<BasicBlockIdx>,
176        unwind: UnwindAction,
177    },
178    Assert {
179        cond: Operand,
180        expected: bool,
181        msg: AssertMessage,
182        target: BasicBlockIdx,
183        unwind: UnwindAction,
184    },
185    InlineAsm {
186        template: String,
187        operands: Vec<InlineAsmOperand>,
188        options: String,
189        line_spans: String,
190        destination: Option<BasicBlockIdx>,
191        unwind: UnwindAction,
192    },
193}
194
195impl TerminatorKind {
196    pub fn successors(&self) -> Successors {
197        use self::TerminatorKind::*;
198        match *self {
199            Call { target: Some(t), unwind: UnwindAction::Cleanup(u), .. }
200            | Drop { target: t, unwind: UnwindAction::Cleanup(u), .. }
201            | Assert { target: t, unwind: UnwindAction::Cleanup(u), .. }
202            | InlineAsm { destination: Some(t), unwind: UnwindAction::Cleanup(u), .. } => {
203                vec![t, u]
204            }
205            Goto { target: t }
206            | Call { target: None, unwind: UnwindAction::Cleanup(t), .. }
207            | Call { target: Some(t), unwind: _, .. }
208            | Drop { target: t, unwind: _, .. }
209            | Assert { target: t, unwind: _, .. }
210            | InlineAsm { destination: None, unwind: UnwindAction::Cleanup(t), .. }
211            | InlineAsm { destination: Some(t), unwind: _, .. } => {
212                vec![t]
213            }
214
215            Return
216            | Resume
217            | Abort
218            | Unreachable
219            | Call { target: None, unwind: _, .. }
220            | InlineAsm { destination: None, unwind: _, .. } => {
221                vec![]
222            }
223            SwitchInt { ref targets, .. } => targets.all_targets(),
224        }
225    }
226
227    pub fn unwind(&self) -> Option<&UnwindAction> {
228        match *self {
229            TerminatorKind::Goto { .. }
230            | TerminatorKind::Return
231            | TerminatorKind::Unreachable
232            | TerminatorKind::Resume
233            | TerminatorKind::Abort
234            | TerminatorKind::SwitchInt { .. } => None,
235            TerminatorKind::Call { ref unwind, .. }
236            | TerminatorKind::Assert { ref unwind, .. }
237            | TerminatorKind::Drop { ref unwind, .. }
238            | TerminatorKind::InlineAsm { ref unwind, .. } => Some(unwind),
239        }
240    }
241}
242
243#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
244pub struct InlineAsmOperand {
245    pub in_value: Option<Operand>,
246    pub out_place: Option<Place>,
247    // This field has a raw debug representation of MIR's InlineAsmOperand.
248    // For now we care about place/operand + the rest in a debug format.
249    pub raw_rpr: String,
250}
251
252#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
253pub enum UnwindAction {
254    Continue,
255    Unreachable,
256    Terminate,
257    Cleanup(BasicBlockIdx),
258}
259
260#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
261pub enum AssertMessage {
262    BoundsCheck { len: Operand, index: Operand },
263    Overflow(BinOp, Operand, Operand),
264    OverflowNeg(Operand),
265    DivisionByZero(Operand),
266    RemainderByZero(Operand),
267    ResumedAfterReturn(CoroutineKind),
268    ResumedAfterPanic(CoroutineKind),
269    MisalignedPointerDereference { required: Operand, found: Operand },
270    NullPointerDereference,
271}
272
273impl AssertMessage {
274    pub fn description(&self) -> Result<&'static str, Error> {
275        match self {
276            AssertMessage::Overflow(BinOp::Add, _, _) => Ok("attempt to add with overflow"),
277            AssertMessage::Overflow(BinOp::Sub, _, _) => Ok("attempt to subtract with overflow"),
278            AssertMessage::Overflow(BinOp::Mul, _, _) => Ok("attempt to multiply with overflow"),
279            AssertMessage::Overflow(BinOp::Div, _, _) => Ok("attempt to divide with overflow"),
280            AssertMessage::Overflow(BinOp::Rem, _, _) => {
281                Ok("attempt to calculate the remainder with overflow")
282            }
283            AssertMessage::OverflowNeg(_) => Ok("attempt to negate with overflow"),
284            AssertMessage::Overflow(BinOp::Shr, _, _) => Ok("attempt to shift right with overflow"),
285            AssertMessage::Overflow(BinOp::Shl, _, _) => Ok("attempt to shift left with overflow"),
286            AssertMessage::Overflow(op, _, _) => Err(error!("`{:?}` cannot overflow", op)),
287            AssertMessage::DivisionByZero(_) => Ok("attempt to divide by zero"),
288            AssertMessage::RemainderByZero(_) => {
289                Ok("attempt to calculate the remainder with a divisor of zero")
290            }
291            AssertMessage::ResumedAfterReturn(CoroutineKind::Coroutine(_)) => {
292                Ok("coroutine resumed after completion")
293            }
294            AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared(
295                CoroutineDesugaring::Async,
296                _,
297            )) => Ok("`async fn` resumed after completion"),
298            AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared(
299                CoroutineDesugaring::Gen,
300                _,
301            )) => Ok("`async gen fn` resumed after completion"),
302            AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared(
303                CoroutineDesugaring::AsyncGen,
304                _,
305            )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after completion"),
306            AssertMessage::ResumedAfterPanic(CoroutineKind::Coroutine(_)) => {
307                Ok("coroutine resumed after panicking")
308            }
309            AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared(
310                CoroutineDesugaring::Async,
311                _,
312            )) => Ok("`async fn` resumed after panicking"),
313            AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared(
314                CoroutineDesugaring::Gen,
315                _,
316            )) => Ok("`async gen fn` resumed after panicking"),
317            AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared(
318                CoroutineDesugaring::AsyncGen,
319                _,
320            )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after panicking"),
321
322            AssertMessage::BoundsCheck { .. } => Ok("index out of bounds"),
323            AssertMessage::MisalignedPointerDereference { .. } => {
324                Ok("misaligned pointer dereference")
325            }
326            AssertMessage::NullPointerDereference => Ok("null pointer dereference occurred"),
327        }
328    }
329}
330
331#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
332pub enum BinOp {
333    Add,
334    AddUnchecked,
335    Sub,
336    SubUnchecked,
337    Mul,
338    MulUnchecked,
339    Div,
340    Rem,
341    BitXor,
342    BitAnd,
343    BitOr,
344    Shl,
345    ShlUnchecked,
346    Shr,
347    ShrUnchecked,
348    Eq,
349    Lt,
350    Le,
351    Ne,
352    Ge,
353    Gt,
354    Cmp,
355    Offset,
356}
357
358impl BinOp {
359    /// Return the type of this operation for the given input Ty.
360    /// This function does not perform type checking, and it currently doesn't handle SIMD.
361    pub fn ty(&self, lhs_ty: Ty, rhs_ty: Ty) -> Ty {
362        with(|ctx| ctx.binop_ty(*self, lhs_ty, rhs_ty))
363    }
364}
365
366#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
367pub enum UnOp {
368    Not,
369    Neg,
370    PtrMetadata,
371}
372
373impl UnOp {
374    /// Return the type of this operation for the given input Ty.
375    /// This function does not perform type checking, and it currently doesn't handle SIMD.
376    pub fn ty(&self, arg_ty: Ty) -> Ty {
377        with(|ctx| ctx.unop_ty(*self, arg_ty))
378    }
379}
380
381#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
382pub enum CoroutineKind {
383    Desugared(CoroutineDesugaring, CoroutineSource),
384    Coroutine(Movability),
385}
386
387#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
388pub enum CoroutineSource {
389    Block,
390    Closure,
391    Fn,
392}
393
394#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
395pub enum CoroutineDesugaring {
396    Async,
397
398    Gen,
399
400    AsyncGen,
401}
402
403pub(crate) type LocalDefId = Opaque;
404/// The rustc coverage data structures are heavily tied to internal details of the
405/// coverage implementation that are likely to change, and are unlikely to be
406/// useful to third-party tools for the foreseeable future.
407pub(crate) type Coverage = Opaque;
408
409/// The FakeReadCause describes the type of pattern why a FakeRead statement exists.
410#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
411pub enum FakeReadCause {
412    ForMatchGuard,
413    ForMatchedPlace(LocalDefId),
414    ForGuardBinding,
415    ForLet(LocalDefId),
416    ForIndex,
417}
418
419/// Describes what kind of retag is to be performed
420#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize)]
421pub enum RetagKind {
422    FnEntry,
423    TwoPhase,
424    Raw,
425    Default,
426}
427
428#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize)]
429pub enum Variance {
430    Covariant,
431    Invariant,
432    Contravariant,
433    Bivariant,
434}
435
436#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
437pub struct CopyNonOverlapping {
438    pub src: Operand,
439    pub dst: Operand,
440    pub count: Operand,
441}
442
443#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
444pub enum NonDivergingIntrinsic {
445    Assume(Operand),
446    CopyNonOverlapping(CopyNonOverlapping),
447}
448
449#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
450pub struct Statement {
451    pub kind: StatementKind,
452    pub span: Span,
453}
454
455#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
456pub enum StatementKind {
457    Assign(Place, Rvalue),
458    FakeRead(FakeReadCause, Place),
459    SetDiscriminant { place: Place, variant_index: VariantIdx },
460    Deinit(Place),
461    StorageLive(Local),
462    StorageDead(Local),
463    Retag(RetagKind, Place),
464    PlaceMention(Place),
465    AscribeUserType { place: Place, projections: UserTypeProjection, variance: Variance },
466    Coverage(Coverage),
467    Intrinsic(NonDivergingIntrinsic),
468    ConstEvalCounter,
469    Nop,
470}
471
472#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
473pub enum Rvalue {
474    /// Creates a pointer with the indicated mutability to the place.
475    ///
476    /// This is generated by pointer casts like `&v as *const _` or raw address of expressions like
477    /// `&raw v` or `addr_of!(v)`.
478    AddressOf(RawPtrKind, Place),
479
480    /// Creates an aggregate value, like a tuple or struct.
481    ///
482    /// This is needed because dataflow analysis needs to distinguish
483    /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo`
484    /// has a destructor.
485    ///
486    /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After
487    /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too.
488    Aggregate(AggregateKind, Vec<Operand>),
489
490    /// * `Offset` has the same semantics as `<*const T>::offset`, except that the second
491    ///   parameter may be a `usize` as well.
492    /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
493    ///   raw pointers, or function pointers and return a `bool`. The types of the operands must be
494    ///   matching, up to the usual caveat of the lifetimes in function pointers.
495    /// * Left and right shift operations accept signed or unsigned integers not necessarily of the
496    ///   same type and return a value of the same type as their LHS. Like in Rust, the RHS is
497    ///   truncated as needed.
498    /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
499    ///   types and return a value of that type.
500    /// * The remaining operations accept signed integers, unsigned integers, or floats with
501    ///   matching types and return a value of that type.
502    BinaryOp(BinOp, Operand, Operand),
503
504    /// Performs essentially all of the casts that can be performed via `as`.
505    ///
506    /// This allows for casts from/to a variety of types.
507    Cast(CastKind, Operand, Ty),
508
509    /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition.
510    ///
511    /// For addition, subtraction, and multiplication on integers the error condition is set when
512    /// the infinite precision result would not be equal to the actual result.
513    CheckedBinaryOp(BinOp, Operand, Operand),
514
515    /// A CopyForDeref is equivalent to a read from a place.
516    /// When such a read happens, it is guaranteed that the only use of the returned value is a
517    /// deref operation, immediately followed by one or more projections.
518    CopyForDeref(Place),
519
520    /// Computes the discriminant of the place, returning it as an integer.
521    /// Returns zero for types without discriminant.
522    ///
523    /// The validity requirements for the underlying value are undecided for this rvalue, see
524    /// [#91095]. Note too that the value of the discriminant is not the same thing as the
525    /// variant index;
526    ///
527    /// [#91095]: https://github.com/rust-lang/rust/issues/91095
528    Discriminant(Place),
529
530    /// Yields the length of the place, as a `usize`.
531    ///
532    /// If the type of the place is an array, this is the array length. For slices (`[T]`, not
533    /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
534    /// ill-formed for places of other types.
535    Len(Place),
536
537    /// Creates a reference to the place.
538    Ref(Region, BorrowKind, Place),
539
540    /// Creates an array where each element is the value of the operand.
541    ///
542    /// This is the cause of a bug in the case where the repetition count is zero because the value
543    /// is not dropped, see [#74836].
544    ///
545    /// Corresponds to source code like `[x; 32]`.
546    ///
547    /// [#74836]: https://github.com/rust-lang/rust/issues/74836
548    Repeat(Operand, TyConst),
549
550    /// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
551    ///
552    /// This is different from a normal transmute because dataflow analysis will treat the box as
553    /// initialized but its content as uninitialized. Like other pointer casts, this in general
554    /// affects alias analysis.
555    ShallowInitBox(Operand, Ty),
556
557    /// Creates a pointer/reference to the given thread local.
558    ///
559    /// The yielded type is a `*mut T` if the static is mutable, otherwise if the static is extern a
560    /// `*const T`, and if neither of those apply a `&T`.
561    ///
562    /// **Note:** This is a runtime operation that actually executes code and is in this sense more
563    /// like a function call. Also, eliminating dead stores of this rvalue causes `fn main() {}` to
564    /// SIGILL for some reason that I (JakobDegen) never got a chance to look into.
565    ///
566    /// **Needs clarification**: Are there weird additional semantics here related to the runtime
567    /// nature of this operation?
568    ThreadLocalRef(crate::CrateItem),
569
570    /// Computes a value as described by the operation.
571    NullaryOp(NullOp, Ty),
572
573    /// Exactly like `BinaryOp`, but less operands.
574    ///
575    /// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
576    /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
577    /// return a value with the same type as their operand.
578    UnaryOp(UnOp, Operand),
579
580    /// Yields the operand unchanged
581    Use(Operand),
582}
583
584impl Rvalue {
585    pub fn ty(&self, locals: &[LocalDecl]) -> Result<Ty, Error> {
586        match self {
587            Rvalue::Use(operand) => operand.ty(locals),
588            Rvalue::Repeat(operand, count) => {
589                Ok(Ty::new_array_with_const_len(operand.ty(locals)?, count.clone()))
590            }
591            Rvalue::ThreadLocalRef(did) => Ok(did.ty()),
592            Rvalue::Ref(reg, bk, place) => {
593                let place_ty = place.ty(locals)?;
594                Ok(Ty::new_ref(reg.clone(), place_ty, bk.to_mutable_lossy()))
595            }
596            Rvalue::AddressOf(mutability, place) => {
597                let place_ty = place.ty(locals)?;
598                Ok(Ty::new_ptr(place_ty, mutability.to_mutable_lossy()))
599            }
600            Rvalue::Len(..) => Ok(Ty::usize_ty()),
601            Rvalue::Cast(.., ty) => Ok(*ty),
602            Rvalue::BinaryOp(op, lhs, rhs) => {
603                let lhs_ty = lhs.ty(locals)?;
604                let rhs_ty = rhs.ty(locals)?;
605                Ok(op.ty(lhs_ty, rhs_ty))
606            }
607            Rvalue::CheckedBinaryOp(op, lhs, rhs) => {
608                let lhs_ty = lhs.ty(locals)?;
609                let rhs_ty = rhs.ty(locals)?;
610                let ty = op.ty(lhs_ty, rhs_ty);
611                Ok(Ty::new_tuple(&[ty, Ty::bool_ty()]))
612            }
613            Rvalue::UnaryOp(op, operand) => {
614                let arg_ty = operand.ty(locals)?;
615                Ok(op.ty(arg_ty))
616            }
617            Rvalue::Discriminant(place) => {
618                let place_ty = place.ty(locals)?;
619                place_ty
620                    .kind()
621                    .discriminant_ty()
622                    .ok_or_else(|| error!("Expected a `RigidTy` but found: {place_ty:?}"))
623            }
624            Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(..), _) => {
625                Ok(Ty::usize_ty())
626            }
627            Rvalue::NullaryOp(NullOp::ContractChecks, _)
628            | Rvalue::NullaryOp(NullOp::UbChecks, _) => Ok(Ty::bool_ty()),
629            Rvalue::Aggregate(ak, ops) => match *ak {
630                AggregateKind::Array(ty) => Ty::try_new_array(ty, ops.len() as u64),
631                AggregateKind::Tuple => Ok(Ty::new_tuple(
632                    &ops.iter().map(|op| op.ty(locals)).collect::<Result<Vec<_>, _>>()?,
633                )),
634                AggregateKind::Adt(def, _, ref args, _, _) => Ok(def.ty_with_args(args)),
635                AggregateKind::Closure(def, ref args) => Ok(Ty::new_closure(def, args.clone())),
636                AggregateKind::Coroutine(def, ref args, mov) => {
637                    Ok(Ty::new_coroutine(def, args.clone(), mov))
638                }
639                AggregateKind::CoroutineClosure(def, ref args) => {
640                    Ok(Ty::new_coroutine_closure(def, args.clone()))
641                }
642                AggregateKind::RawPtr(ty, mutability) => Ok(Ty::new_ptr(ty, mutability)),
643            },
644            Rvalue::ShallowInitBox(_, ty) => Ok(Ty::new_box(*ty)),
645            Rvalue::CopyForDeref(place) => place.ty(locals),
646        }
647    }
648}
649
650#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
651pub enum AggregateKind {
652    Array(Ty),
653    Tuple,
654    Adt(AdtDef, VariantIdx, GenericArgs, Option<UserTypeAnnotationIndex>, Option<FieldIdx>),
655    Closure(ClosureDef, GenericArgs),
656    // FIXME(stable_mir): Movability here is redundant
657    Coroutine(CoroutineDef, GenericArgs, Movability),
658    CoroutineClosure(CoroutineClosureDef, GenericArgs),
659    RawPtr(Ty, Mutability),
660}
661
662#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
663pub enum Operand {
664    Copy(Place),
665    Move(Place),
666    Constant(ConstOperand),
667}
668
669#[derive(Clone, Eq, PartialEq, Serialize)]
670pub struct Place {
671    pub local: Local,
672    /// projection out of a place (access a field, deref a pointer, etc)
673    pub projection: Vec<ProjectionElem>,
674}
675
676impl From<Local> for Place {
677    fn from(local: Local) -> Self {
678        Place { local, projection: vec![] }
679    }
680}
681
682#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
683pub struct ConstOperand {
684    pub span: Span,
685    pub user_ty: Option<UserTypeAnnotationIndex>,
686    pub const_: MirConst,
687}
688
689/// Debug information pertaining to a user variable.
690#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
691pub struct VarDebugInfo {
692    /// The variable name.
693    pub name: Symbol,
694
695    /// Source info of the user variable, including the scope
696    /// within which the variable is visible (to debuginfo).
697    pub source_info: SourceInfo,
698
699    /// The user variable's data is split across several fragments,
700    /// each described by a `VarDebugInfoFragment`.
701    pub composite: Option<VarDebugInfoFragment>,
702
703    /// Where the data for this user variable is to be found.
704    pub value: VarDebugInfoContents,
705
706    /// When present, indicates what argument number this variable is in the function that it
707    /// originated from (starting from 1). Note, if MIR inlining is enabled, then this is the
708    /// argument number in the original function before it was inlined.
709    pub argument_index: Option<u16>,
710}
711
712impl VarDebugInfo {
713    /// Return a local variable if this info is related to one.
714    pub fn local(&self) -> Option<Local> {
715        match &self.value {
716            VarDebugInfoContents::Place(place) if place.projection.is_empty() => Some(place.local),
717            VarDebugInfoContents::Place(_) | VarDebugInfoContents::Const(_) => None,
718        }
719    }
720
721    /// Return a constant if this info is related to one.
722    pub fn constant(&self) -> Option<&ConstOperand> {
723        match &self.value {
724            VarDebugInfoContents::Place(_) => None,
725            VarDebugInfoContents::Const(const_op) => Some(const_op),
726        }
727    }
728}
729
730pub type SourceScope = u32;
731
732#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
733pub struct SourceInfo {
734    pub span: Span,
735    pub scope: SourceScope,
736}
737
738#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
739pub struct VarDebugInfoFragment {
740    pub ty: Ty,
741    pub projection: Vec<ProjectionElem>,
742}
743
744#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
745pub enum VarDebugInfoContents {
746    Place(Place),
747    Const(ConstOperand),
748}
749
750// In MIR ProjectionElem is parameterized on the second Field argument and the Index argument. This
751// is so it can be used for both Places (for which the projection elements are of type
752// ProjectionElem<Local, Ty>) and user-provided type annotations (for which the projection elements
753// are of type ProjectionElem<(), ()>). In SMIR we don't need this generality, so we just use
754// ProjectionElem for Places.
755#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
756pub enum ProjectionElem {
757    /// Dereference projections (e.g. `*_1`) project to the address referenced by the base place.
758    Deref,
759
760    /// A field projection (e.g., `f` in `_1.f`) project to a field in the base place. The field is
761    /// referenced by source-order index rather than the name of the field. The fields type is also
762    /// given.
763    Field(FieldIdx, Ty),
764
765    /// Index into a slice/array. The value of the index is computed at runtime using the `V`
766    /// argument.
767    ///
768    /// Note that this does not also dereference, and so it does not exactly correspond to slice
769    /// indexing in Rust. In other words, in the below Rust code:
770    ///
771    /// ```rust
772    /// let x = &[1, 2, 3, 4];
773    /// let i = 2;
774    /// x[i];
775    /// ```
776    ///
777    /// The `x[i]` is turned into a `Deref` followed by an `Index`, not just an `Index`. The same
778    /// thing is true of the `ConstantIndex` and `Subslice` projections below.
779    Index(Local),
780
781    /// Index into a slice/array given by offsets.
782    ///
783    /// These indices are generated by slice patterns. Easiest to explain by example:
784    ///
785    /// ```ignore (illustrative)
786    /// [X, _, .._, _, _] => { offset: 0, min_length: 4, from_end: false },
787    /// [_, X, .._, _, _] => { offset: 1, min_length: 4, from_end: false },
788    /// [_, _, .._, X, _] => { offset: 2, min_length: 4, from_end: true },
789    /// [_, _, .._, _, X] => { offset: 1, min_length: 4, from_end: true },
790    /// ```
791    ConstantIndex {
792        /// index or -index (in Python terms), depending on from_end
793        offset: u64,
794        /// The thing being indexed must be at least this long -- otherwise, the
795        /// projection is UB.
796        ///
797        /// For arrays this is always the exact length.
798        min_length: u64,
799        /// Counting backwards from end? This is always false when indexing an
800        /// array.
801        from_end: bool,
802    },
803
804    /// Projects a slice from the base place.
805    ///
806    /// These indices are generated by slice patterns. If `from_end` is true, this represents
807    /// `slice[from..slice.len() - to]`. Otherwise it represents `array[from..to]`.
808    Subslice {
809        from: u64,
810        to: u64,
811        /// Whether `to` counts from the start or end of the array/slice.
812        from_end: bool,
813    },
814
815    /// "Downcast" to a variant of an enum or a coroutine.
816    Downcast(VariantIdx),
817
818    /// Like an explicit cast from an opaque type to a concrete type, but without
819    /// requiring an intermediate variable.
820    OpaqueCast(Ty),
821
822    /// A `Subtype(T)` projection is applied to any `StatementKind::Assign` where
823    /// type of lvalue doesn't match the type of rvalue, the primary goal is making subtyping
824    /// explicit during optimizations and codegen.
825    ///
826    /// This projection doesn't impact the runtime behavior of the program except for potentially changing
827    /// some type metadata of the interpreter or codegen backend.
828    Subtype(Ty),
829}
830
831#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
832pub struct UserTypeProjection {
833    pub base: UserTypeAnnotationIndex,
834
835    pub projection: Opaque,
836}
837
838pub type Local = usize;
839
840pub const RETURN_LOCAL: Local = 0;
841
842/// The source-order index of a field in a variant.
843///
844/// For example, in the following types,
845/// ```ignore(illustrative)
846/// enum Demo1 {
847///    Variant0 { a: bool, b: i32 },
848///    Variant1 { c: u8, d: u64 },
849/// }
850/// struct Demo2 { e: u8, f: u16, g: u8 }
851/// ```
852/// `a`'s `FieldIdx` is `0`,
853/// `b`'s `FieldIdx` is `1`,
854/// `c`'s `FieldIdx` is `0`, and
855/// `g`'s `FieldIdx` is `2`.
856pub type FieldIdx = usize;
857
858type UserTypeAnnotationIndex = usize;
859
860/// The possible branch sites of a [TerminatorKind::SwitchInt].
861#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
862pub struct SwitchTargets {
863    /// The conditional branches where the first element represents the value that guards this
864    /// branch, and the second element is the branch target.
865    branches: Vec<(u128, BasicBlockIdx)>,
866    /// The `otherwise` branch which will be taken in case none of the conditional branches are
867    /// satisfied.
868    otherwise: BasicBlockIdx,
869}
870
871impl SwitchTargets {
872    /// All possible targets including the `otherwise` target.
873    pub fn all_targets(&self) -> Successors {
874        self.branches.iter().map(|(_, target)| *target).chain(Some(self.otherwise)).collect()
875    }
876
877    /// The `otherwise` branch target.
878    pub fn otherwise(&self) -> BasicBlockIdx {
879        self.otherwise
880    }
881
882    /// The conditional targets which are only taken if the pattern matches the given value.
883    pub fn branches(&self) -> impl Iterator<Item = (u128, BasicBlockIdx)> {
884        self.branches.iter().copied()
885    }
886
887    /// The number of targets including `otherwise`.
888    pub fn len(&self) -> usize {
889        self.branches.len() + 1
890    }
891
892    /// Create a new SwitchTargets from the given branches and `otherwise` target.
893    pub fn new(branches: Vec<(u128, BasicBlockIdx)>, otherwise: BasicBlockIdx) -> SwitchTargets {
894        SwitchTargets { branches, otherwise }
895    }
896}
897
898#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
899pub enum BorrowKind {
900    /// Data must be immutable and is aliasable.
901    Shared,
902
903    /// An immutable, aliasable borrow that is discarded after borrow-checking. Can behave either
904    /// like a normal shared borrow or like a special shallow borrow (see [`FakeBorrowKind`]).
905    Fake(FakeBorrowKind),
906
907    /// Data is mutable and not aliasable.
908    Mut {
909        /// `true` if this borrow arose from method-call auto-ref
910        kind: MutBorrowKind,
911    },
912}
913
914impl BorrowKind {
915    pub fn to_mutable_lossy(self) -> Mutability {
916        match self {
917            BorrowKind::Mut { .. } => Mutability::Mut,
918            BorrowKind::Shared => Mutability::Not,
919            // FIXME: There's no type corresponding to a shallow borrow, so use `&` as an approximation.
920            BorrowKind::Fake(_) => Mutability::Not,
921        }
922    }
923}
924
925#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
926pub enum RawPtrKind {
927    Mut,
928    Const,
929    FakeForPtrMetadata,
930}
931
932impl RawPtrKind {
933    pub fn to_mutable_lossy(self) -> Mutability {
934        match self {
935            RawPtrKind::Mut { .. } => Mutability::Mut,
936            RawPtrKind::Const => Mutability::Not,
937            // FIXME: There's no type corresponding to a shallow borrow, so use `&` as an approximation.
938            RawPtrKind::FakeForPtrMetadata => Mutability::Not,
939        }
940    }
941}
942
943#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
944pub enum MutBorrowKind {
945    Default,
946    TwoPhaseBorrow,
947    ClosureCapture,
948}
949
950#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
951pub enum FakeBorrowKind {
952    /// A shared (deep) borrow. Data must be immutable and is aliasable.
953    Deep,
954    /// The immediately borrowed place must be immutable, but projections from
955    /// it don't need to be. This is used to prevent match guards from replacing
956    /// the scrutinee. For example, a fake borrow of `a.b` doesn't
957    /// conflict with a mutable borrow of `a.b.c`.
958    Shallow,
959}
960
961#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize)]
962pub enum Mutability {
963    Not,
964    Mut,
965}
966
967#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
968pub enum Safety {
969    Safe,
970    Unsafe,
971}
972
973#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
974pub enum PointerCoercion {
975    /// Go from a fn-item type to a fn-pointer type.
976    ReifyFnPointer,
977
978    /// Go from a safe fn pointer to an unsafe fn pointer.
979    UnsafeFnPointer,
980
981    /// Go from a non-capturing closure to a fn pointer or an unsafe fn pointer.
982    /// It cannot convert a closure that requires unsafe.
983    ClosureFnPointer(Safety),
984
985    /// Go from a mut raw pointer to a const raw pointer.
986    MutToConstPointer,
987
988    /// Go from `*const [T; N]` to `*const T`
989    ArrayToPointer,
990
991    /// Unsize a pointer/reference value, e.g., `&[T; n]` to
992    /// `&[T]`. Note that the source could be a thin or wide pointer.
993    /// This will do things like convert thin pointers to wide
994    /// pointers, or convert structs containing thin pointers to
995    /// structs containing wide pointers, or convert between wide
996    /// pointers.
997    Unsize,
998}
999
1000#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
1001pub enum CastKind {
1002    // FIXME(smir-rename): rename this to PointerExposeProvenance
1003    PointerExposeAddress,
1004    PointerWithExposedProvenance,
1005    PointerCoercion(PointerCoercion),
1006    // FIXME(smir-rename): change this to PointerCoercion(DynStar)
1007    DynStar,
1008    IntToInt,
1009    FloatToInt,
1010    FloatToFloat,
1011    IntToFloat,
1012    PtrToPtr,
1013    FnPtrToPtr,
1014    Transmute,
1015}
1016
1017#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
1018pub enum NullOp {
1019    /// Returns the size of a value of that type.
1020    SizeOf,
1021    /// Returns the minimum alignment of a type.
1022    AlignOf,
1023    /// Returns the offset of a field.
1024    OffsetOf(Vec<(VariantIdx, FieldIdx)>),
1025    /// cfg!(ub_checks), but at codegen time
1026    UbChecks,
1027    /// cfg!(contract_checks), but at codegen time
1028    ContractChecks,
1029}
1030
1031impl Operand {
1032    /// Get the type of an operand relative to the local declaration.
1033    ///
1034    /// In order to retrieve the correct type, the `locals` argument must match the list of all
1035    /// locals from the function body where this operand originates from.
1036    ///
1037    /// Errors indicate a malformed operand or incompatible locals list.
1038    pub fn ty(&self, locals: &[LocalDecl]) -> Result<Ty, Error> {
1039        match self {
1040            Operand::Copy(place) | Operand::Move(place) => place.ty(locals),
1041            Operand::Constant(c) => Ok(c.ty()),
1042        }
1043    }
1044}
1045
1046impl ConstOperand {
1047    pub fn ty(&self) -> Ty {
1048        self.const_.ty()
1049    }
1050}
1051
1052impl Place {
1053    /// Resolve down the chain of projections to get the type referenced at the end of it.
1054    /// E.g.:
1055    /// Calling `ty()` on `var.field` should return the type of `field`.
1056    ///
1057    /// In order to retrieve the correct type, the `locals` argument must match the list of all
1058    /// locals from the function body where this place originates from.
1059    pub fn ty(&self, locals: &[LocalDecl]) -> Result<Ty, Error> {
1060        let start_ty = locals[self.local].ty;
1061        self.projection.iter().fold(Ok(start_ty), |place_ty, elem| elem.ty(place_ty?))
1062    }
1063}
1064
1065impl ProjectionElem {
1066    /// Get the expected type after applying this projection to a given place type.
1067    pub fn ty(&self, place_ty: Ty) -> Result<Ty, Error> {
1068        let ty = place_ty;
1069        match &self {
1070            ProjectionElem::Deref => Self::deref_ty(ty),
1071            ProjectionElem::Field(_idx, fty) => Ok(*fty),
1072            ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => Self::index_ty(ty),
1073            ProjectionElem::Subslice { from, to, from_end } => {
1074                Self::subslice_ty(ty, *from, *to, *from_end)
1075            }
1076            ProjectionElem::Downcast(_) => Ok(ty),
1077            ProjectionElem::OpaqueCast(ty) | ProjectionElem::Subtype(ty) => Ok(*ty),
1078        }
1079    }
1080
1081    fn index_ty(ty: Ty) -> Result<Ty, Error> {
1082        ty.kind().builtin_index().ok_or_else(|| error!("Cannot index non-array type: {ty:?}"))
1083    }
1084
1085    fn subslice_ty(ty: Ty, from: u64, to: u64, from_end: bool) -> Result<Ty, Error> {
1086        let ty_kind = ty.kind();
1087        match ty_kind {
1088            TyKind::RigidTy(RigidTy::Slice(..)) => Ok(ty),
1089            TyKind::RigidTy(RigidTy::Array(inner, _)) if !from_end => Ty::try_new_array(
1090                inner,
1091                to.checked_sub(from).ok_or_else(|| error!("Subslice overflow: {from}..{to}"))?,
1092            ),
1093            TyKind::RigidTy(RigidTy::Array(inner, size)) => {
1094                let size = size.eval_target_usize()?;
1095                let len = size - from - to;
1096                Ty::try_new_array(inner, len)
1097            }
1098            _ => Err(Error(format!("Cannot subslice non-array type: `{ty_kind:?}`"))),
1099        }
1100    }
1101
1102    fn deref_ty(ty: Ty) -> Result<Ty, Error> {
1103        let deref_ty = ty
1104            .kind()
1105            .builtin_deref(true)
1106            .ok_or_else(|| error!("Cannot dereference type: {ty:?}"))?;
1107        Ok(deref_ty.ty)
1108    }
1109}