rustc_codegen_ssa/mir/
analyze.rs

1//! An analysis to determine which locals require allocas and
2//! which do not.
3
4use rustc_data_structures::graph::dominators::Dominators;
5use rustc_index::bit_set::DenseBitSet;
6use rustc_index::{IndexSlice, IndexVec};
7use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
8use rustc_middle::mir::{self, DefLocation, Location, TerminatorKind, traversal};
9use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
10use rustc_middle::{bug, span_bug};
11use tracing::debug;
12
13use super::FunctionCx;
14use crate::traits::*;
15
16pub(crate) fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
17    fx: &FunctionCx<'a, 'tcx, Bx>,
18    traversal_order: &[mir::BasicBlock],
19) -> DenseBitSet<mir::Local> {
20    let mir = fx.mir;
21    let dominators = mir.basic_blocks.dominators();
22    let locals = mir
23        .local_decls
24        .iter()
25        .map(|decl| {
26            let ty = fx.monomorphize(decl.ty);
27            let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
28            if layout.is_zst() { LocalKind::ZST } else { LocalKind::Unused }
29        })
30        .collect();
31
32    let mut analyzer = LocalAnalyzer { fx, dominators, locals };
33
34    // Arguments get assigned to by means of the function being called
35    for arg in mir.args_iter() {
36        analyzer.define(arg, DefLocation::Argument);
37    }
38
39    // If there exists a local definition that dominates all uses of that local,
40    // the definition should be visited first. Traverse blocks in an order that
41    // is a topological sort of dominance partial order.
42    for bb in traversal_order.iter().copied() {
43        let data = &mir.basic_blocks[bb];
44        analyzer.visit_basic_block_data(bb, data);
45    }
46
47    let mut non_ssa_locals = DenseBitSet::new_empty(analyzer.locals.len());
48    for (local, kind) in analyzer.locals.iter_enumerated() {
49        if matches!(kind, LocalKind::Memory) {
50            non_ssa_locals.insert(local);
51        }
52    }
53
54    non_ssa_locals
55}
56
57#[derive(Copy, Clone, PartialEq, Eq)]
58enum LocalKind {
59    ZST,
60    /// A local that requires an alloca.
61    Memory,
62    /// A scalar or a scalar pair local that is neither defined nor used.
63    Unused,
64    /// A scalar or a scalar pair local with a single definition that dominates all uses.
65    SSA(DefLocation),
66}
67
68struct LocalAnalyzer<'a, 'b, 'tcx, Bx: BuilderMethods<'b, 'tcx>> {
69    fx: &'a FunctionCx<'b, 'tcx, Bx>,
70    dominators: &'a Dominators<mir::BasicBlock>,
71    locals: IndexVec<mir::Local, LocalKind>,
72}
73
74impl<'a, 'b, 'tcx, Bx: BuilderMethods<'b, 'tcx>> LocalAnalyzer<'a, 'b, 'tcx, Bx> {
75    fn define(&mut self, local: mir::Local, location: DefLocation) {
76        let fx = self.fx;
77        let kind = &mut self.locals[local];
78        let decl = &fx.mir.local_decls[local];
79        match *kind {
80            LocalKind::ZST => {}
81            LocalKind::Memory => {}
82            LocalKind::Unused => {
83                let ty = fx.monomorphize(decl.ty);
84                let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
85                *kind =
86                    if fx.cx.is_backend_immediate(layout) || fx.cx.is_backend_scalar_pair(layout) {
87                        LocalKind::SSA(location)
88                    } else {
89                        LocalKind::Memory
90                    };
91            }
92            LocalKind::SSA(_) => *kind = LocalKind::Memory,
93        }
94    }
95
96    fn process_place(
97        &mut self,
98        place_ref: &mir::PlaceRef<'tcx>,
99        context: PlaceContext,
100        location: Location,
101    ) {
102        let cx = self.fx.cx;
103
104        if let Some((place_base, elem)) = place_ref.last_projection() {
105            let mut base_context = if context.is_mutating_use() {
106                PlaceContext::MutatingUse(MutatingUseContext::Projection)
107            } else {
108                PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
109            };
110
111            // Allow uses of projections that are ZSTs or from scalar fields.
112            let is_consume = matches!(
113                context,
114                PlaceContext::NonMutatingUse(
115                    NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
116                )
117            );
118            if is_consume {
119                let base_ty = place_base.ty(self.fx.mir, cx.tcx());
120                let base_ty = self.fx.monomorphize(base_ty);
121
122                // ZSTs don't require any actual memory access.
123                let elem_ty = base_ty.projection_ty(cx.tcx(), self.fx.monomorphize(elem)).ty;
124                let span = self.fx.mir.local_decls[place_ref.local].source_info.span;
125                if cx.spanned_layout_of(elem_ty, span).is_zst() {
126                    return;
127                }
128
129                if let mir::ProjectionElem::Field(..) = elem {
130                    let layout = cx.spanned_layout_of(base_ty.ty, span);
131                    if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
132                        // Recurse with the same context, instead of `Projection`,
133                        // potentially stopping at non-operand projections,
134                        // which would trigger `not_ssa` on locals.
135                        base_context = context;
136                    }
137                }
138            }
139
140            if let mir::ProjectionElem::Deref = elem {
141                // Deref projections typically only read the pointer.
142                base_context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
143            }
144
145            self.process_place(&place_base, base_context, location);
146            // HACK(eddyb) this emulates the old `visit_projection_elem`, this
147            // entire `visit_place`-like `process_place` method should be rewritten,
148            // now that we have moved to the "slice of projections" representation.
149            if let mir::ProjectionElem::Index(local) = elem {
150                self.visit_local(
151                    local,
152                    PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
153                    location,
154                );
155            }
156        } else {
157            self.visit_local(place_ref.local, context, location);
158        }
159    }
160}
161
162impl<'a, 'b, 'tcx, Bx: BuilderMethods<'b, 'tcx>> Visitor<'tcx> for LocalAnalyzer<'a, 'b, 'tcx, Bx> {
163    fn visit_assign(
164        &mut self,
165        place: &mir::Place<'tcx>,
166        rvalue: &mir::Rvalue<'tcx>,
167        location: Location,
168    ) {
169        debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
170
171        if let Some(local) = place.as_local() {
172            self.define(local, DefLocation::Assignment(location));
173            if self.locals[local] != LocalKind::Memory {
174                let decl_span = self.fx.mir.local_decls[local].source_info.span;
175                if !self.fx.rvalue_creates_operand(rvalue, decl_span) {
176                    self.locals[local] = LocalKind::Memory;
177                }
178            }
179        } else {
180            self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location);
181        }
182
183        self.visit_rvalue(rvalue, location);
184    }
185
186    fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
187        debug!("visit_place(place={:?}, context={:?})", place, context);
188        self.process_place(&place.as_ref(), context, location);
189    }
190
191    fn visit_local(&mut self, local: mir::Local, context: PlaceContext, location: Location) {
192        match context {
193            PlaceContext::MutatingUse(MutatingUseContext::Call) => {
194                let call = location.block;
195                let TerminatorKind::Call { target, .. } =
196                    self.fx.mir.basic_blocks[call].terminator().kind
197                else {
198                    bug!()
199                };
200                self.define(local, DefLocation::CallReturn { call, target });
201            }
202
203            PlaceContext::NonUse(_)
204            | PlaceContext::NonMutatingUse(NonMutatingUseContext::PlaceMention)
205            | PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
206
207            PlaceContext::NonMutatingUse(
208                NonMutatingUseContext::Copy
209                | NonMutatingUseContext::Move
210                // Inspect covers things like `PtrMetadata` and `Discriminant`
211                // which we can treat similar to `Copy` use for the purpose of
212                // whether we can use SSA variables for things.
213                | NonMutatingUseContext::Inspect,
214            ) => match &mut self.locals[local] {
215                LocalKind::ZST => {}
216                LocalKind::Memory => {}
217                LocalKind::SSA(def) if def.dominates(location, self.dominators) => {}
218                // Reads from uninitialized variables (e.g., in dead code, after
219                // optimizations) require locals to be in (uninitialized) memory.
220                // N.B., there can be uninitialized reads of a local visited after
221                // an assignment to that local, if they happen on disjoint paths.
222                kind @ (LocalKind::Unused | LocalKind::SSA(_)) => {
223                    *kind = LocalKind::Memory;
224                }
225            },
226
227            PlaceContext::MutatingUse(
228                MutatingUseContext::Store
229                | MutatingUseContext::Deinit
230                | MutatingUseContext::SetDiscriminant
231                | MutatingUseContext::AsmOutput
232                | MutatingUseContext::Borrow
233                | MutatingUseContext::RawBorrow
234                | MutatingUseContext::Projection,
235            )
236            | PlaceContext::NonMutatingUse(
237                NonMutatingUseContext::SharedBorrow
238                | NonMutatingUseContext::FakeBorrow
239                | NonMutatingUseContext::RawBorrow
240                | NonMutatingUseContext::Projection,
241            ) => {
242                self.locals[local] = LocalKind::Memory;
243            }
244
245            PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
246                let kind = &mut self.locals[local];
247                if *kind != LocalKind::Memory {
248                    let ty = self.fx.mir.local_decls[local].ty;
249                    let ty = self.fx.monomorphize(ty);
250                    if self.fx.cx.type_needs_drop(ty) {
251                        // Only need the place if we're actually dropping it.
252                        *kind = LocalKind::Memory;
253                    }
254                }
255            }
256
257            PlaceContext::MutatingUse(MutatingUseContext::Yield) => bug!(),
258        }
259    }
260}
261
262#[derive(Copy, Clone, Debug, PartialEq, Eq)]
263pub(crate) enum CleanupKind {
264    NotCleanup,
265    Funclet,
266    Internal { funclet: mir::BasicBlock },
267}
268
269impl CleanupKind {
270    pub(crate) fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
271        match self {
272            CleanupKind::NotCleanup => None,
273            CleanupKind::Funclet => Some(for_bb),
274            CleanupKind::Internal { funclet } => Some(funclet),
275        }
276    }
277}
278
279/// MSVC requires unwinding code to be split to a tree of *funclets*, where each funclet can only
280/// branch to itself or to its parent. Luckily, the code we generates matches this pattern.
281/// Recover that structure in an analyze pass.
282pub(crate) fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
283    fn discover_masters<'tcx>(
284        result: &mut IndexSlice<mir::BasicBlock, CleanupKind>,
285        mir: &mir::Body<'tcx>,
286    ) {
287        for (bb, data) in mir.basic_blocks.iter_enumerated() {
288            match data.terminator().kind {
289                TerminatorKind::Goto { .. }
290                | TerminatorKind::UnwindResume
291                | TerminatorKind::UnwindTerminate(_)
292                | TerminatorKind::Return
293                | TerminatorKind::TailCall { .. }
294                | TerminatorKind::CoroutineDrop
295                | TerminatorKind::Unreachable
296                | TerminatorKind::SwitchInt { .. }
297                | TerminatorKind::Yield { .. }
298                | TerminatorKind::FalseEdge { .. }
299                | TerminatorKind::FalseUnwind { .. } => { /* nothing to do */ }
300                TerminatorKind::Call { unwind, .. }
301                | TerminatorKind::InlineAsm { unwind, .. }
302                | TerminatorKind::Assert { unwind, .. }
303                | TerminatorKind::Drop { unwind, .. } => {
304                    if let mir::UnwindAction::Cleanup(unwind) = unwind {
305                        debug!(
306                            "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
307                            bb, data, unwind
308                        );
309                        result[unwind] = CleanupKind::Funclet;
310                    }
311                }
312            }
313        }
314    }
315
316    fn propagate<'tcx>(
317        result: &mut IndexSlice<mir::BasicBlock, CleanupKind>,
318        mir: &mir::Body<'tcx>,
319    ) {
320        let mut funclet_succs = IndexVec::from_elem(None, &mir.basic_blocks);
321
322        let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
323            ref mut s @ None => {
324                debug!("set_successor: updating successor of {:?} to {:?}", funclet, succ);
325                *s = Some(succ);
326            }
327            Some(s) => {
328                if s != succ {
329                    span_bug!(
330                        mir.span,
331                        "funclet {:?} has 2 parents - {:?} and {:?}",
332                        funclet,
333                        s,
334                        succ
335                    );
336                }
337            }
338        };
339
340        for (bb, data) in traversal::reverse_postorder(mir) {
341            let funclet = match result[bb] {
342                CleanupKind::NotCleanup => continue,
343                CleanupKind::Funclet => bb,
344                CleanupKind::Internal { funclet } => funclet,
345            };
346
347            debug!(
348                "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
349                bb, data, result[bb], funclet
350            );
351
352            for succ in data.terminator().successors() {
353                let kind = result[succ];
354                debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
355                match kind {
356                    CleanupKind::NotCleanup => {
357                        result[succ] = CleanupKind::Internal { funclet };
358                    }
359                    CleanupKind::Funclet => {
360                        if funclet != succ {
361                            set_successor(funclet, succ);
362                        }
363                    }
364                    CleanupKind::Internal { funclet: succ_funclet } => {
365                        if funclet != succ_funclet {
366                            // `succ` has 2 different funclet going into it, so it must
367                            // be a funclet by itself.
368
369                            debug!(
370                                "promoting {:?} to a funclet and updating {:?}",
371                                succ, succ_funclet
372                            );
373                            result[succ] = CleanupKind::Funclet;
374                            set_successor(succ_funclet, succ);
375                            set_successor(funclet, succ);
376                        }
377                    }
378                }
379            }
380        }
381    }
382
383    let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, &mir.basic_blocks);
384
385    discover_masters(&mut result, mir);
386    propagate(&mut result, mir);
387    debug!("cleanup_kinds: result={:?}", result);
388    result
389}