Skip to main content

rustc_codegen_ssa/mir/
statement.rs

1use rustc_middle::mir::{self, NonDivergingIntrinsic, StmtDebugInfo};
2use rustc_middle::span_bug;
3use tracing::instrument;
4
5use super::{FunctionCx, LocalRef};
6use crate::traits::*;
7
8impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
9    #[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("codegen_statement",
                                    "rustc_codegen_ssa::mir::statement",
                                    ::tracing::Level::DEBUG,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/statement.rs"),
                                    ::tracing_core::__macro_support::Option::Some(9u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::statement"),
                                    ::tracing_core::field::FieldSet::new(&["statement"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&statement)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: () = loop {};
            return __tracing_attr_fake_return;
        }
        {
            self.codegen_stmt_debuginfos(bx, &statement.debuginfos);
            self.set_debug_loc(bx, statement.source_info);
            match statement.kind {
                mir::StatementKind::Assign(box (ref place, ref rvalue)) => {
                    if let Some(index) = place.as_local() {
                        match self.locals[index] {
                            LocalRef::Place(cg_dest) =>
                                self.codegen_rvalue(bx, cg_dest, rvalue),
                            LocalRef::UnsizedPlace(cg_indirect_dest) => {
                                let ty = cg_indirect_dest.layout.ty;
                                ::rustc_middle::util::bug::span_bug_fmt(statement.source_info.span,
                                    format_args!("cannot reallocate from `UnsizedPlace({0})` into `{1:?}`; dynamic alloca is not supported",
                                        ty, rvalue));
                            }
                            LocalRef::PendingOperand => {
                                let operand = self.codegen_rvalue_operand(bx, rvalue);
                                self.overwrite_local(index, LocalRef::Operand(operand));
                                self.debug_introduce_local(bx, index);
                            }
                            LocalRef::Operand(op) => {
                                if !op.layout.is_zst() {
                                    ::rustc_middle::util::bug::span_bug_fmt(statement.source_info.span,
                                        format_args!("operand {0:?} already assigned", rvalue));
                                }
                                self.codegen_rvalue_operand(bx, rvalue);
                            }
                        }
                    } else {
                        let cg_dest = self.codegen_place(bx, place.as_ref());
                        self.codegen_rvalue(bx, cg_dest, rvalue);
                    }
                }
                mir::StatementKind::SetDiscriminant {
                    box ref place, variant_index } => {
                    self.codegen_place(bx,
                            place.as_ref()).codegen_set_discr(bx, variant_index);
                }
                mir::StatementKind::StorageLive(local) => {
                    if let LocalRef::Place(cg_place) = self.locals[local] {
                        cg_place.storage_live(bx);
                    } else if let LocalRef::UnsizedPlace(cg_indirect_place) =
                            self.locals[local] {
                        cg_indirect_place.storage_live(bx);
                    }
                }
                mir::StatementKind::StorageDead(local) => {
                    if let LocalRef::Place(cg_place) = self.locals[local] {
                        cg_place.storage_dead(bx);
                    } else if let LocalRef::UnsizedPlace(cg_indirect_place) =
                            self.locals[local] {
                        cg_indirect_place.storage_dead(bx);
                    }
                }
                mir::StatementKind::Coverage(ref kind) => {
                    self.codegen_coverage(bx, kind,
                        statement.source_info.scope);
                }
                mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(ref op))
                    => {
                    let op_val = self.codegen_operand(bx, op);
                    bx.assume(op_val.immediate());
                }
                mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(mir::CopyNonOverlapping {
                    ref count, ref src, ref dst })) => {
                    let dst_val = self.codegen_operand(bx, dst);
                    let src_val = self.codegen_operand(bx, src);
                    let count = self.codegen_operand(bx, count).immediate();
                    let pointee_layout =
                        dst_val.layout.pointee_info_at(bx,
                                rustc_abi::Size::ZERO).expect("Expected pointer");
                    let bytes =
                        bx.mul(count, bx.const_usize(pointee_layout.size.bytes()));
                    let align = pointee_layout.align;
                    let dst = dst_val.immediate();
                    let src = src_val.immediate();
                    bx.memcpy(dst, align, src, align, bytes,
                        crate::MemFlags::empty(), None);
                }
                mir::StatementKind::FakeRead(..) | mir::StatementKind::Retag {
                    .. } | mir::StatementKind::AscribeUserType(..) |
                    mir::StatementKind::ConstEvalCounter |
                    mir::StatementKind::PlaceMention(..) |
                    mir::StatementKind::BackwardIncompatibleDropHint { .. } |
                    mir::StatementKind::Nop => {}
            }
        }
    }
}#[instrument(level = "debug", skip(self, bx))]
10    pub(crate) fn codegen_statement(&mut self, bx: &mut Bx, statement: &mir::Statement<'tcx>) {
11        self.codegen_stmt_debuginfos(bx, &statement.debuginfos);
12        self.set_debug_loc(bx, statement.source_info);
13        match statement.kind {
14            mir::StatementKind::Assign(box (ref place, ref rvalue)) => {
15                if let Some(index) = place.as_local() {
16                    match self.locals[index] {
17                        LocalRef::Place(cg_dest) => self.codegen_rvalue(bx, cg_dest, rvalue),
18                        LocalRef::UnsizedPlace(cg_indirect_dest) => {
19                            let ty = cg_indirect_dest.layout.ty;
20                            span_bug!(
21                                statement.source_info.span,
22                                "cannot reallocate from `UnsizedPlace({ty})` \
23                                into `{rvalue:?}`; dynamic alloca is not supported",
24                            );
25                        }
26                        LocalRef::PendingOperand => {
27                            let operand = self.codegen_rvalue_operand(bx, rvalue);
28                            self.overwrite_local(index, LocalRef::Operand(operand));
29                            self.debug_introduce_local(bx, index);
30                        }
31                        LocalRef::Operand(op) => {
32                            if !op.layout.is_zst() {
33                                span_bug!(
34                                    statement.source_info.span,
35                                    "operand {:?} already assigned",
36                                    rvalue
37                                );
38                            }
39
40                            // If the type is zero-sized, it's already been set here,
41                            // but we still need to make sure we codegen the operand
42                            self.codegen_rvalue_operand(bx, rvalue);
43                        }
44                    }
45                } else {
46                    let cg_dest = self.codegen_place(bx, place.as_ref());
47                    self.codegen_rvalue(bx, cg_dest, rvalue);
48                }
49            }
50            mir::StatementKind::SetDiscriminant { box ref place, variant_index } => {
51                self.codegen_place(bx, place.as_ref()).codegen_set_discr(bx, variant_index);
52            }
53            mir::StatementKind::StorageLive(local) => {
54                if let LocalRef::Place(cg_place) = self.locals[local] {
55                    cg_place.storage_live(bx);
56                } else if let LocalRef::UnsizedPlace(cg_indirect_place) = self.locals[local] {
57                    cg_indirect_place.storage_live(bx);
58                }
59            }
60            mir::StatementKind::StorageDead(local) => {
61                if let LocalRef::Place(cg_place) = self.locals[local] {
62                    cg_place.storage_dead(bx);
63                } else if let LocalRef::UnsizedPlace(cg_indirect_place) = self.locals[local] {
64                    cg_indirect_place.storage_dead(bx);
65                }
66            }
67            mir::StatementKind::Coverage(ref kind) => {
68                self.codegen_coverage(bx, kind, statement.source_info.scope);
69            }
70            mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(ref op)) => {
71                let op_val = self.codegen_operand(bx, op);
72                bx.assume(op_val.immediate());
73            }
74            mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
75                mir::CopyNonOverlapping { ref count, ref src, ref dst },
76            )) => {
77                let dst_val = self.codegen_operand(bx, dst);
78                let src_val = self.codegen_operand(bx, src);
79                let count = self.codegen_operand(bx, count).immediate();
80                let pointee_layout = dst_val
81                    .layout
82                    .pointee_info_at(bx, rustc_abi::Size::ZERO)
83                    .expect("Expected pointer");
84                let bytes = bx.mul(count, bx.const_usize(pointee_layout.size.bytes()));
85
86                let align = pointee_layout.align;
87                let dst = dst_val.immediate();
88                let src = src_val.immediate();
89                bx.memcpy(dst, align, src, align, bytes, crate::MemFlags::empty(), None);
90            }
91            mir::StatementKind::FakeRead(..)
92            | mir::StatementKind::Retag { .. }
93            | mir::StatementKind::AscribeUserType(..)
94            | mir::StatementKind::ConstEvalCounter
95            | mir::StatementKind::PlaceMention(..)
96            | mir::StatementKind::BackwardIncompatibleDropHint { .. }
97            | mir::StatementKind::Nop => {}
98        }
99    }
100
101    pub(crate) fn codegen_stmt_debuginfo(&mut self, bx: &mut Bx, debuginfo: &StmtDebugInfo<'tcx>) {
102        match debuginfo {
103            StmtDebugInfo::AssignRef(dest, place) => {
104                let local_ref = match self.locals[place.local] {
105                    // For an rvalue like `&(_1.1)`, when `BackendRepr` is `BackendRepr::Memory`, we allocate a block of memory to this place.
106                    // The place is an indirect pointer, we can refer to it directly.
107                    LocalRef::Place(place_ref) => Some((place_ref, place.projection.as_slice())),
108                    // For an rvalue like `&((*_1).1)`, we are calculating the address of `_1.1`.
109                    // The deref projection is no-op here.
110                    LocalRef::Operand(operand_ref) if place.is_indirect_first_projection() => {
111                        Some((operand_ref.deref(bx.cx()), &place.projection[1..]))
112                    }
113                    // For an rvalue like `&1`, when `BackendRepr` is `BackendRepr::Scalar`,
114                    // we cannot get the address.
115                    // N.B. `non_ssa_locals` returns that this is an SSA local.
116                    LocalRef::Operand(_) => None,
117                    LocalRef::UnsizedPlace(_) | LocalRef::PendingOperand => None,
118                }
119                .filter(|(_, projection)| {
120                    // Drop unsupported projections.
121                    projection.iter().all(|p| p.can_use_in_debuginfo())
122                });
123                if let Some((base, projection)) = local_ref {
124                    self.debug_new_val_to_local(bx, *dest, base, projection);
125                } else {
126                    // If the address cannot be calculated, use poison to indicate that the value has been optimized out.
127                    self.debug_poison_to_local(bx, *dest);
128                }
129            }
130            StmtDebugInfo::InvalidAssign(local) => {
131                self.debug_poison_to_local(bx, *local);
132            }
133        }
134    }
135
136    pub(crate) fn codegen_stmt_debuginfos(
137        &mut self,
138        bx: &mut Bx,
139        debuginfos: &[StmtDebugInfo<'tcx>],
140    ) {
141        for debuginfo in debuginfos {
142            self.codegen_stmt_debuginfo(bx, debuginfo);
143        }
144    }
145}