rustc_mir_transform/
elaborate_drops.rs

1use std::fmt;
2
3use rustc_abi::{FieldIdx, VariantIdx};
4use rustc_index::IndexVec;
5use rustc_index::bit_set::DenseBitSet;
6use rustc_middle::mir::*;
7use rustc_middle::ty::{self, TyCtxt};
8use rustc_mir_dataflow::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
9use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
10use rustc_mir_dataflow::{
11    Analysis, DropFlagState, MoveDataTypingEnv, ResultsCursor, on_all_children_bits,
12    on_lookup_result_bits,
13};
14use rustc_span::Span;
15use tracing::{debug, instrument};
16
17use crate::deref_separator::deref_finder;
18use crate::elaborate_drop::{DropElaborator, DropFlagMode, DropStyle, Unwind, elaborate_drop};
19use crate::patch::MirPatch;
20
21/// During MIR building, Drop terminators are inserted in every place where a drop may occur.
22/// However, in this phase, the presence of these terminators does not guarantee that a destructor
23/// will run, as the target of the drop may be uninitialized.
24/// In general, the compiler cannot determine at compile time whether a destructor will run or not.
25///
26/// At a high level, this pass refines Drop to only run the destructor if the
27/// target is initialized. The way this is achieved is by inserting drop flags for every variable
28/// that may be dropped, and then using those flags to determine whether a destructor should run.
29/// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or
30/// "drop shim" for the type of the dropped place.
31///
32/// This pass relies on dropped places having an associated move path, which is then used to
33/// determine the initialization status of the place and its descendants.
34/// It's worth noting that a MIR containing a Drop without an associated move path is probably ill
35/// formed, as it would allow running a destructor on a place behind a reference:
36///
37/// ```text
38/// fn drop_term<T>(t: &mut T) {
39///     mir! {
40///         {
41///             Drop(*t, exit)
42///         }
43///         exit = {
44///             Return()
45///         }
46///     }
47/// }
48/// ```
49pub(super) struct ElaborateDrops;
50
51impl<'tcx> crate::MirPass<'tcx> for ElaborateDrops {
52    #[instrument(level = "trace", skip(self, tcx, body))]
53    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
54        debug!("elaborate_drops({:?} @ {:?})", body.source, body.span);
55        // FIXME(#132279): This is used during the phase transition from analysis
56        // to runtime, so we have to manually specify the correct typing mode.
57        let typing_env = ty::TypingEnv::post_analysis(tcx, body.source.def_id());
58        // For types that do not need dropping, the behaviour is trivial. So we only need to track
59        // init/uninit for types that do need dropping.
60        let move_data = MoveData::gather_moves(body, tcx, |ty| ty.needs_drop(tcx, typing_env));
61        let elaborate_patch = {
62            let env = MoveDataTypingEnv { move_data, typing_env };
63
64            let mut inits = MaybeInitializedPlaces::new(tcx, body, &env.move_data)
65                .skipping_unreachable_unwind()
66                .iterate_to_fixpoint(tcx, body, Some("elaborate_drops"))
67                .into_results_cursor(body);
68            let dead_unwinds = compute_dead_unwinds(body, &mut inits);
69
70            let uninits = MaybeUninitializedPlaces::new(tcx, body, &env.move_data)
71                .mark_inactive_variants_as_uninit()
72                .skipping_unreachable_unwind(dead_unwinds)
73                .iterate_to_fixpoint(tcx, body, Some("elaborate_drops"))
74                .into_results_cursor(body);
75
76            let drop_flags = IndexVec::from_elem(None, &env.move_data.move_paths);
77            ElaborateDropsCtxt {
78                tcx,
79                body,
80                env: &env,
81                init_data: InitializationData { inits, uninits },
82                drop_flags,
83                patch: MirPatch::new(body),
84            }
85            .elaborate()
86        };
87        elaborate_patch.apply(body);
88        deref_finder(tcx, body);
89    }
90
91    fn is_required(&self) -> bool {
92        true
93    }
94}
95
96/// Records unwind edges which are known to be unreachable, because they are in `drop` terminators
97/// that can't drop anything.
98#[instrument(level = "trace", skip(body, flow_inits), ret)]
99fn compute_dead_unwinds<'a, 'tcx>(
100    body: &'a Body<'tcx>,
101    flow_inits: &mut ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
102) -> DenseBitSet<BasicBlock> {
103    // We only need to do this pass once, because unwind edges can only
104    // reach cleanup blocks, which can't have unwind edges themselves.
105    let mut dead_unwinds = DenseBitSet::new_empty(body.basic_blocks.len());
106    for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
107        let TerminatorKind::Drop { place, unwind: UnwindAction::Cleanup(_), .. } =
108            bb_data.terminator().kind
109        else {
110            continue;
111        };
112
113        flow_inits.seek_before_primary_effect(body.terminator_loc(bb));
114        if flow_inits.analysis().is_unwind_dead(place, flow_inits.get()) {
115            dead_unwinds.insert(bb);
116        }
117    }
118
119    dead_unwinds
120}
121
122struct InitializationData<'a, 'tcx> {
123    inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
124    uninits: ResultsCursor<'a, 'tcx, MaybeUninitializedPlaces<'a, 'tcx>>,
125}
126
127impl InitializationData<'_, '_> {
128    fn seek_before(&mut self, loc: Location) {
129        self.inits.seek_before_primary_effect(loc);
130        self.uninits.seek_before_primary_effect(loc);
131    }
132
133    fn maybe_init_uninit(&self, path: MovePathIndex) -> (bool, bool) {
134        (self.inits.get().contains(path), self.uninits.get().contains(path))
135    }
136}
137
138impl<'a, 'tcx> DropElaborator<'a, 'tcx> for ElaborateDropsCtxt<'a, 'tcx> {
139    type Path = MovePathIndex;
140
141    fn patch(&mut self) -> &mut MirPatch<'tcx> {
142        &mut self.patch
143    }
144
145    fn body(&self) -> &'a Body<'tcx> {
146        self.body
147    }
148
149    fn tcx(&self) -> TyCtxt<'tcx> {
150        self.tcx
151    }
152
153    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
154        self.env.typing_env
155    }
156
157    #[instrument(level = "debug", skip(self), ret)]
158    fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
159        let ((maybe_init, maybe_uninit), multipart) = match mode {
160            DropFlagMode::Shallow => (self.init_data.maybe_init_uninit(path), false),
161            DropFlagMode::Deep => {
162                let mut some_maybe_init = false;
163                let mut some_maybe_uninit = false;
164                let mut children_count = 0;
165                on_all_children_bits(self.move_data(), path, |child| {
166                    let (maybe_init, maybe_uninit) = self.init_data.maybe_init_uninit(child);
167                    debug!("elaborate_drop: state({:?}) = {:?}", child, (maybe_init, maybe_uninit));
168                    some_maybe_init |= maybe_init;
169                    some_maybe_uninit |= maybe_uninit;
170                    children_count += 1;
171                });
172                ((some_maybe_init, some_maybe_uninit), children_count != 1)
173            }
174        };
175        match (maybe_init, maybe_uninit, multipart) {
176            (false, _, _) => DropStyle::Dead,
177            (true, false, _) => DropStyle::Static,
178            (true, true, false) => DropStyle::Conditional,
179            (true, true, true) => DropStyle::Open,
180        }
181    }
182
183    fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMode) {
184        match mode {
185            DropFlagMode::Shallow => {
186                self.set_drop_flag(loc, path, DropFlagState::Absent);
187            }
188            DropFlagMode::Deep => {
189                on_all_children_bits(self.move_data(), path, |child| {
190                    self.set_drop_flag(loc, child, DropFlagState::Absent)
191                });
192            }
193        }
194    }
195
196    fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path> {
197        rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| match e {
198            ProjectionElem::Field(idx, _) => idx == field,
199            _ => false,
200        })
201    }
202
203    fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path> {
204        rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| match e {
205            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
206                debug_assert!(size == min_length, "min_length should be exact for arrays");
207                assert!(!from_end, "from_end should not be used for array element ConstantIndex");
208                offset == index
209            }
210            _ => false,
211        })
212    }
213
214    fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path> {
215        rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| {
216            e == ProjectionElem::Deref
217        })
218    }
219
220    fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path> {
221        rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| match e {
222            ProjectionElem::Downcast(_, idx) => idx == variant,
223            _ => false,
224        })
225    }
226
227    fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
228        self.drop_flag(path).map(Operand::Copy)
229    }
230}
231
232struct ElaborateDropsCtxt<'a, 'tcx> {
233    tcx: TyCtxt<'tcx>,
234    body: &'a Body<'tcx>,
235    env: &'a MoveDataTypingEnv<'tcx>,
236    init_data: InitializationData<'a, 'tcx>,
237    drop_flags: IndexVec<MovePathIndex, Option<Local>>,
238    patch: MirPatch<'tcx>,
239}
240
241impl fmt::Debug for ElaborateDropsCtxt<'_, '_> {
242    fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
243        Ok(())
244    }
245}
246
247impl<'a, 'tcx> ElaborateDropsCtxt<'a, 'tcx> {
248    fn move_data(&self) -> &'a MoveData<'tcx> {
249        &self.env.move_data
250    }
251
252    fn create_drop_flag(&mut self, index: MovePathIndex, span: Span) {
253        let patch = &mut self.patch;
254        debug!("create_drop_flag({:?})", self.body.span);
255        self.drop_flags[index].get_or_insert_with(|| patch.new_temp(self.tcx.types.bool, span));
256    }
257
258    fn drop_flag(&mut self, index: MovePathIndex) -> Option<Place<'tcx>> {
259        self.drop_flags[index].map(Place::from)
260    }
261
262    /// create a patch that elaborates all drops in the input
263    /// MIR.
264    fn elaborate(mut self) -> MirPatch<'tcx> {
265        self.collect_drop_flags();
266
267        self.elaborate_drops();
268
269        self.drop_flags_on_init();
270        self.drop_flags_for_fn_rets();
271        self.drop_flags_for_args();
272        self.drop_flags_for_locs();
273
274        self.patch
275    }
276
277    fn collect_drop_flags(&mut self) {
278        for (bb, data) in self.body.basic_blocks.iter_enumerated() {
279            let terminator = data.terminator();
280            let TerminatorKind::Drop { ref place, .. } = terminator.kind else { continue };
281
282            let path = self.move_data().rev_lookup.find(place.as_ref());
283            debug!("collect_drop_flags: {:?}, place {:?} ({:?})", bb, place, path);
284
285            match path {
286                LookupResult::Exact(path) => {
287                    self.init_data.seek_before(self.body.terminator_loc(bb));
288                    on_all_children_bits(self.move_data(), path, |child| {
289                        let (maybe_init, maybe_uninit) = self.init_data.maybe_init_uninit(child);
290                        debug!(
291                            "collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
292                            child,
293                            place,
294                            path,
295                            (maybe_init, maybe_uninit)
296                        );
297                        if maybe_init && maybe_uninit {
298                            self.create_drop_flag(child, terminator.source_info.span)
299                        }
300                    });
301                }
302                LookupResult::Parent(None) => {}
303                LookupResult::Parent(Some(parent)) => {
304                    if self.body.local_decls[place.local].is_deref_temp() {
305                        continue;
306                    }
307
308                    self.init_data.seek_before(self.body.terminator_loc(bb));
309                    let (_maybe_init, maybe_uninit) = self.init_data.maybe_init_uninit(parent);
310                    if maybe_uninit {
311                        self.tcx.dcx().span_delayed_bug(
312                            terminator.source_info.span,
313                            format!(
314                                "drop of untracked, uninitialized value {bb:?}, place {place:?} ({path:?})"
315                            ),
316                        );
317                    }
318                }
319            };
320        }
321    }
322
323    fn elaborate_drops(&mut self) {
324        // This function should mirror what `collect_drop_flags` does.
325        for (bb, data) in self.body.basic_blocks.iter_enumerated() {
326            let terminator = data.terminator();
327            let TerminatorKind::Drop { place, target, unwind, replace } = terminator.kind else {
328                continue;
329            };
330
331            // This place does not need dropping. It does not have an associated move-path, so the
332            // match below will conservatively keep an unconditional drop. As that drop is useless,
333            // just remove it here and now.
334            if !place
335                .ty(&self.body.local_decls, self.tcx)
336                .ty
337                .needs_drop(self.tcx, self.typing_env())
338            {
339                self.patch.patch_terminator(bb, TerminatorKind::Goto { target });
340                continue;
341            }
342
343            let path = self.move_data().rev_lookup.find(place.as_ref());
344            match path {
345                LookupResult::Exact(path) => {
346                    let unwind = match unwind {
347                        _ if data.is_cleanup => Unwind::InCleanup,
348                        UnwindAction::Cleanup(cleanup) => Unwind::To(cleanup),
349                        UnwindAction::Continue => Unwind::To(self.patch.resume_block()),
350                        UnwindAction::Unreachable => {
351                            Unwind::To(self.patch.unreachable_cleanup_block())
352                        }
353                        UnwindAction::Terminate(reason) => {
354                            debug_assert_ne!(
355                                reason,
356                                UnwindTerminateReason::InCleanup,
357                                "we are not in a cleanup block, InCleanup reason should be impossible"
358                            );
359                            Unwind::To(self.patch.terminate_block(reason))
360                        }
361                    };
362                    self.init_data.seek_before(self.body.terminator_loc(bb));
363                    elaborate_drop(self, terminator.source_info, place, path, target, unwind, bb)
364                }
365                LookupResult::Parent(None) => {}
366                LookupResult::Parent(Some(_)) => {
367                    if !replace {
368                        self.tcx.dcx().span_bug(
369                            terminator.source_info.span,
370                            format!("drop of untracked value {bb:?}"),
371                        );
372                    }
373                    // A drop and replace behind a pointer/array/whatever.
374                    // The borrow checker requires that these locations are initialized before the
375                    // assignment, so we just leave an unconditional drop.
376                    assert!(!data.is_cleanup);
377                }
378            }
379        }
380    }
381
382    fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
383        Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
384            span,
385            user_ty: None,
386            const_: Const::from_bool(self.tcx, val),
387        })))
388    }
389
390    fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
391        if let Some(flag) = self.drop_flags[path] {
392            let span = self.patch.source_info_for_location(self.body, loc).span;
393            let val = self.constant_bool(span, val.value());
394            self.patch.add_assign(loc, Place::from(flag), val);
395        }
396    }
397
398    fn drop_flags_on_init(&mut self) {
399        let loc = Location::START;
400        let span = self.patch.source_info_for_location(self.body, loc).span;
401        let false_ = self.constant_bool(span, false);
402        for flag in self.drop_flags.iter().flatten() {
403            self.patch.add_assign(loc, Place::from(*flag), false_.clone());
404        }
405    }
406
407    fn drop_flags_for_fn_rets(&mut self) {
408        for (bb, data) in self.body.basic_blocks.iter_enumerated() {
409            if let TerminatorKind::Call {
410                destination,
411                target: Some(tgt),
412                unwind: UnwindAction::Cleanup(_),
413                ..
414            } = data.terminator().kind
415            {
416                assert!(!self.patch.is_patched(bb));
417
418                let loc = Location { block: tgt, statement_index: 0 };
419                let path = self.move_data().rev_lookup.find(destination.as_ref());
420                on_lookup_result_bits(self.move_data(), path, |child| {
421                    self.set_drop_flag(loc, child, DropFlagState::Present)
422                });
423            }
424        }
425    }
426
427    fn drop_flags_for_args(&mut self) {
428        let loc = Location::START;
429        rustc_mir_dataflow::drop_flag_effects_for_function_entry(
430            self.body,
431            &self.env.move_data,
432            |path, ds| {
433                self.set_drop_flag(loc, path, ds);
434            },
435        )
436    }
437
438    fn drop_flags_for_locs(&mut self) {
439        // We intentionally iterate only over the *old* basic blocks.
440        //
441        // Basic blocks created by drop elaboration update their
442        // drop flags by themselves, to avoid the drop flags being
443        // clobbered before they are read.
444
445        for (bb, data) in self.body.basic_blocks.iter_enumerated() {
446            debug!("drop_flags_for_locs({:?})", data);
447            for i in 0..(data.statements.len() + 1) {
448                debug!("drop_flag_for_locs: stmt {}", i);
449                if i == data.statements.len() {
450                    match data.terminator().kind {
451                        TerminatorKind::Drop { .. } => {
452                            // drop elaboration should handle that by itself
453                            continue;
454                        }
455                        TerminatorKind::UnwindResume => {
456                            // It is possible for `Resume` to be patched
457                            // (in particular it can be patched to be replaced with
458                            // a Goto; see `MirPatch::new`).
459                        }
460                        _ => {
461                            assert!(!self.patch.is_patched(bb));
462                        }
463                    }
464                }
465                let loc = Location { block: bb, statement_index: i };
466                rustc_mir_dataflow::drop_flag_effects_for_location(
467                    self.body,
468                    &self.env.move_data,
469                    loc,
470                    |path, ds| self.set_drop_flag(loc, path, ds),
471                )
472            }
473
474            // There may be a critical edge after this call,
475            // so mark the return as initialized *before* the
476            // call.
477            if let TerminatorKind::Call {
478                destination,
479                target: Some(_),
480                unwind:
481                    UnwindAction::Continue | UnwindAction::Unreachable | UnwindAction::Terminate(_),
482                ..
483            } = data.terminator().kind
484            {
485                assert!(!self.patch.is_patched(bb));
486
487                let loc = Location { block: bb, statement_index: data.statements.len() };
488                let path = self.move_data().rev_lookup.find(destination.as_ref());
489                on_lookup_result_bits(self.move_data(), path, |child| {
490                    self.set_drop_flag(loc, child, DropFlagState::Present)
491                });
492            }
493        }
494    }
495}