rustc_codegen_ssa/mir/
analyze.rs

1//! An analysis to determine which locals require allocas and
2//! which do not.
3
4use rustc_data_structures::graph::dominators::Dominators;
5use rustc_index::bit_set::DenseBitSet;
6use rustc_index::{IndexSlice, IndexVec};
7use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
8use rustc_middle::mir::{self, DefLocation, Location, TerminatorKind, traversal};
9use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
10use rustc_middle::{bug, span_bug};
11use tracing::debug;
12
13use super::FunctionCx;
14use crate::traits::*;
15
16pub(crate) fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
17    fx: &FunctionCx<'a, 'tcx, Bx>,
18    traversal_order: &[mir::BasicBlock],
19) -> DenseBitSet<mir::Local> {
20    let mir = fx.mir;
21    let dominators = mir.basic_blocks.dominators();
22    let locals = mir
23        .local_decls
24        .iter()
25        .map(|decl| {
26            let ty = fx.monomorphize(decl.ty);
27            let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
28            if layout.is_zst() { LocalKind::ZST } else { LocalKind::Unused }
29        })
30        .collect();
31
32    let mut analyzer = LocalAnalyzer { fx, dominators, locals };
33
34    // Arguments get assigned to by means of the function being called
35    for arg in mir.args_iter() {
36        analyzer.define(arg, DefLocation::Argument);
37    }
38
39    // If there exists a local definition that dominates all uses of that local,
40    // the definition should be visited first. Traverse blocks in an order that
41    // is a topological sort of dominance partial order.
42    for bb in traversal_order.iter().copied() {
43        let data = &mir.basic_blocks[bb];
44        analyzer.visit_basic_block_data(bb, data);
45    }
46
47    let mut non_ssa_locals = DenseBitSet::new_empty(analyzer.locals.len());
48    for (local, kind) in analyzer.locals.iter_enumerated() {
49        if matches!(kind, LocalKind::Memory) {
50            non_ssa_locals.insert(local);
51        }
52    }
53
54    non_ssa_locals
55}
56
57#[derive(Copy, Clone, PartialEq, Eq)]
58enum LocalKind {
59    ZST,
60    /// A local that requires an alloca.
61    Memory,
62    /// A scalar or a scalar pair local that is neither defined nor used.
63    Unused,
64    /// A scalar or a scalar pair local with a single definition that dominates all uses.
65    SSA(DefLocation),
66}
67
68struct LocalAnalyzer<'a, 'b, 'tcx, Bx: BuilderMethods<'b, 'tcx>> {
69    fx: &'a FunctionCx<'b, 'tcx, Bx>,
70    dominators: &'a Dominators<mir::BasicBlock>,
71    locals: IndexVec<mir::Local, LocalKind>,
72}
73
74impl<'a, 'b, 'tcx, Bx: BuilderMethods<'b, 'tcx>> LocalAnalyzer<'a, 'b, 'tcx, Bx> {
75    fn define(&mut self, local: mir::Local, location: DefLocation) {
76        let fx = self.fx;
77        let kind = &mut self.locals[local];
78        let decl = &fx.mir.local_decls[local];
79        match *kind {
80            LocalKind::ZST => {}
81            LocalKind::Memory => {}
82            LocalKind::Unused => {
83                let ty = fx.monomorphize(decl.ty);
84                let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
85                *kind =
86                    if fx.cx.is_backend_immediate(layout) || fx.cx.is_backend_scalar_pair(layout) {
87                        LocalKind::SSA(location)
88                    } else {
89                        LocalKind::Memory
90                    };
91            }
92            LocalKind::SSA(_) => *kind = LocalKind::Memory,
93        }
94    }
95
96    fn process_place(
97        &mut self,
98        place_ref: &mir::PlaceRef<'tcx>,
99        context: PlaceContext,
100        location: Location,
101    ) {
102        let cx = self.fx.cx;
103
104        if let Some((place_base, elem)) = place_ref.last_projection() {
105            let mut base_context = if context.is_mutating_use() {
106                PlaceContext::MutatingUse(MutatingUseContext::Projection)
107            } else {
108                PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
109            };
110
111            // Allow uses of projections that are ZSTs or from scalar fields.
112            let is_consume = matches!(
113                context,
114                PlaceContext::NonMutatingUse(
115                    NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
116                )
117            );
118            if is_consume {
119                let base_ty = place_base.ty(self.fx.mir, cx.tcx());
120                let base_ty = self.fx.monomorphize(base_ty);
121
122                // ZSTs don't require any actual memory access.
123                let elem_ty = base_ty.projection_ty(cx.tcx(), self.fx.monomorphize(elem)).ty;
124                let span = self.fx.mir.local_decls[place_ref.local].source_info.span;
125                if cx.spanned_layout_of(elem_ty, span).is_zst() {
126                    return;
127                }
128
129                if let mir::ProjectionElem::Field(..) = elem {
130                    let layout = cx.spanned_layout_of(base_ty.ty, span);
131                    if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
132                        // Recurse with the same context, instead of `Projection`,
133                        // potentially stopping at non-operand projections,
134                        // which would trigger `not_ssa` on locals.
135                        base_context = context;
136                    }
137                }
138            }
139
140            if let mir::ProjectionElem::Deref = elem {
141                // Deref projections typically only read the pointer.
142                base_context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
143            }
144
145            self.process_place(&place_base, base_context, location);
146            // HACK(eddyb) this emulates the old `visit_projection_elem`, this
147            // entire `visit_place`-like `process_place` method should be rewritten,
148            // now that we have moved to the "slice of projections" representation.
149            if let mir::ProjectionElem::Index(local) = elem {
150                self.visit_local(
151                    local,
152                    PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
153                    location,
154                );
155            }
156        } else {
157            self.visit_local(place_ref.local, context, location);
158        }
159    }
160}
161
162impl<'a, 'b, 'tcx, Bx: BuilderMethods<'b, 'tcx>> Visitor<'tcx> for LocalAnalyzer<'a, 'b, 'tcx, Bx> {
163    fn visit_assign(
164        &mut self,
165        place: &mir::Place<'tcx>,
166        rvalue: &mir::Rvalue<'tcx>,
167        location: Location,
168    ) {
169        debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
170
171        if let Some(local) = place.as_local() {
172            self.define(local, DefLocation::Assignment(location));
173            if self.locals[local] != LocalKind::Memory {
174                let decl_span = self.fx.mir.local_decls[local].source_info.span;
175                if !self.fx.rvalue_creates_operand(rvalue, decl_span) {
176                    self.locals[local] = LocalKind::Memory;
177                }
178            }
179        } else {
180            self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location);
181        }
182
183        self.visit_rvalue(rvalue, location);
184    }
185
186    fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
187        debug!("visit_place(place={:?}, context={:?})", place, context);
188        self.process_place(&place.as_ref(), context, location);
189    }
190
191    fn visit_local(&mut self, local: mir::Local, context: PlaceContext, location: Location) {
192        match context {
193            PlaceContext::MutatingUse(MutatingUseContext::Call) => {
194                let call = location.block;
195                let TerminatorKind::Call { target, .. } =
196                    self.fx.mir.basic_blocks[call].terminator().kind
197                else {
198                    bug!()
199                };
200                self.define(local, DefLocation::CallReturn { call, target });
201            }
202
203            PlaceContext::NonUse(_)
204            | PlaceContext::NonMutatingUse(NonMutatingUseContext::PlaceMention)
205            | PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
206
207            PlaceContext::NonMutatingUse(
208                NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
209            ) => match &mut self.locals[local] {
210                LocalKind::ZST => {}
211                LocalKind::Memory => {}
212                LocalKind::SSA(def) if def.dominates(location, self.dominators) => {}
213                // Reads from uninitialized variables (e.g., in dead code, after
214                // optimizations) require locals to be in (uninitialized) memory.
215                // N.B., there can be uninitialized reads of a local visited after
216                // an assignment to that local, if they happen on disjoint paths.
217                kind @ (LocalKind::Unused | LocalKind::SSA(_)) => {
218                    *kind = LocalKind::Memory;
219                }
220            },
221
222            PlaceContext::MutatingUse(
223                MutatingUseContext::Store
224                | MutatingUseContext::Deinit
225                | MutatingUseContext::SetDiscriminant
226                | MutatingUseContext::AsmOutput
227                | MutatingUseContext::Borrow
228                | MutatingUseContext::RawBorrow
229                | MutatingUseContext::Projection,
230            )
231            | PlaceContext::NonMutatingUse(
232                NonMutatingUseContext::Inspect
233                | NonMutatingUseContext::SharedBorrow
234                | NonMutatingUseContext::FakeBorrow
235                | NonMutatingUseContext::RawBorrow
236                | NonMutatingUseContext::Projection,
237            ) => {
238                self.locals[local] = LocalKind::Memory;
239            }
240
241            PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
242                let kind = &mut self.locals[local];
243                if *kind != LocalKind::Memory {
244                    let ty = self.fx.mir.local_decls[local].ty;
245                    let ty = self.fx.monomorphize(ty);
246                    if self.fx.cx.type_needs_drop(ty) {
247                        // Only need the place if we're actually dropping it.
248                        *kind = LocalKind::Memory;
249                    }
250                }
251            }
252
253            PlaceContext::MutatingUse(MutatingUseContext::Yield) => bug!(),
254        }
255    }
256}
257
258#[derive(Copy, Clone, Debug, PartialEq, Eq)]
259pub(crate) enum CleanupKind {
260    NotCleanup,
261    Funclet,
262    Internal { funclet: mir::BasicBlock },
263}
264
265impl CleanupKind {
266    pub(crate) fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
267        match self {
268            CleanupKind::NotCleanup => None,
269            CleanupKind::Funclet => Some(for_bb),
270            CleanupKind::Internal { funclet } => Some(funclet),
271        }
272    }
273}
274
275/// MSVC requires unwinding code to be split to a tree of *funclets*, where each funclet can only
276/// branch to itself or to its parent. Luckily, the code we generates matches this pattern.
277/// Recover that structure in an analyze pass.
278pub(crate) fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
279    fn discover_masters<'tcx>(
280        result: &mut IndexSlice<mir::BasicBlock, CleanupKind>,
281        mir: &mir::Body<'tcx>,
282    ) {
283        for (bb, data) in mir.basic_blocks.iter_enumerated() {
284            match data.terminator().kind {
285                TerminatorKind::Goto { .. }
286                | TerminatorKind::UnwindResume
287                | TerminatorKind::UnwindTerminate(_)
288                | TerminatorKind::Return
289                | TerminatorKind::TailCall { .. }
290                | TerminatorKind::CoroutineDrop
291                | TerminatorKind::Unreachable
292                | TerminatorKind::SwitchInt { .. }
293                | TerminatorKind::Yield { .. }
294                | TerminatorKind::FalseEdge { .. }
295                | TerminatorKind::FalseUnwind { .. } => { /* nothing to do */ }
296                TerminatorKind::Call { unwind, .. }
297                | TerminatorKind::InlineAsm { unwind, .. }
298                | TerminatorKind::Assert { unwind, .. }
299                | TerminatorKind::Drop { unwind, .. } => {
300                    if let mir::UnwindAction::Cleanup(unwind) = unwind {
301                        debug!(
302                            "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
303                            bb, data, unwind
304                        );
305                        result[unwind] = CleanupKind::Funclet;
306                    }
307                }
308            }
309        }
310    }
311
312    fn propagate<'tcx>(
313        result: &mut IndexSlice<mir::BasicBlock, CleanupKind>,
314        mir: &mir::Body<'tcx>,
315    ) {
316        let mut funclet_succs = IndexVec::from_elem(None, &mir.basic_blocks);
317
318        let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
319            ref mut s @ None => {
320                debug!("set_successor: updating successor of {:?} to {:?}", funclet, succ);
321                *s = Some(succ);
322            }
323            Some(s) => {
324                if s != succ {
325                    span_bug!(
326                        mir.span,
327                        "funclet {:?} has 2 parents - {:?} and {:?}",
328                        funclet,
329                        s,
330                        succ
331                    );
332                }
333            }
334        };
335
336        for (bb, data) in traversal::reverse_postorder(mir) {
337            let funclet = match result[bb] {
338                CleanupKind::NotCleanup => continue,
339                CleanupKind::Funclet => bb,
340                CleanupKind::Internal { funclet } => funclet,
341            };
342
343            debug!(
344                "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
345                bb, data, result[bb], funclet
346            );
347
348            for succ in data.terminator().successors() {
349                let kind = result[succ];
350                debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
351                match kind {
352                    CleanupKind::NotCleanup => {
353                        result[succ] = CleanupKind::Internal { funclet };
354                    }
355                    CleanupKind::Funclet => {
356                        if funclet != succ {
357                            set_successor(funclet, succ);
358                        }
359                    }
360                    CleanupKind::Internal { funclet: succ_funclet } => {
361                        if funclet != succ_funclet {
362                            // `succ` has 2 different funclet going into it, so it must
363                            // be a funclet by itself.
364
365                            debug!(
366                                "promoting {:?} to a funclet and updating {:?}",
367                                succ, succ_funclet
368                            );
369                            result[succ] = CleanupKind::Funclet;
370                            set_successor(succ_funclet, succ);
371                            set_successor(funclet, succ);
372                        }
373                    }
374                }
375            }
376        }
377    }
378
379    let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, &mir.basic_blocks);
380
381    discover_masters(&mut result, mir);
382    propagate(&mut result, mir);
383    debug!("cleanup_kinds: result={:?}", result);
384    result
385}