rustc_mir_transform/
elaborate_drops.rs

1use std::fmt;
2
3use rustc_abi::{FieldIdx, VariantIdx};
4use rustc_index::IndexVec;
5use rustc_index::bit_set::DenseBitSet;
6use rustc_middle::mir::*;
7use rustc_middle::ty::{self, TyCtxt};
8use rustc_mir_dataflow::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
9use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
10use rustc_mir_dataflow::{
11    Analysis, DropFlagState, MoveDataTypingEnv, ResultsCursor, on_all_children_bits,
12    on_lookup_result_bits,
13};
14use rustc_span::Span;
15use tracing::{debug, instrument};
16
17use crate::deref_separator::deref_finder;
18use crate::elaborate_drop::{DropElaborator, DropFlagMode, DropStyle, Unwind, elaborate_drop};
19use crate::patch::MirPatch;
20
21/// During MIR building, Drop terminators are inserted in every place where a drop may occur.
22/// However, in this phase, the presence of these terminators does not guarantee that a destructor
23/// will run, as the target of the drop may be uninitialized.
24/// In general, the compiler cannot determine at compile time whether a destructor will run or not.
25///
26/// At a high level, this pass refines Drop to only run the destructor if the
27/// target is initialized. The way this is achieved is by inserting drop flags for every variable
28/// that may be dropped, and then using those flags to determine whether a destructor should run.
29/// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or
30/// "drop shim" for the type of the dropped place.
31///
32/// This pass relies on dropped places having an associated move path, which is then used to
33/// determine the initialization status of the place and its descendants.
34/// It's worth noting that a MIR containing a Drop without an associated move path is probably ill
35/// formed, as it would allow running a destructor on a place behind a reference:
36///
37/// ```text
38/// fn drop_term<T>(t: &mut T) {
39///     mir! {
40///         {
41///             Drop(*t, exit)
42///         }
43///         exit = {
44///             Return()
45///         }
46///     }
47/// }
48/// ```
49pub(super) struct ElaborateDrops;
50
51impl<'tcx> crate::MirPass<'tcx> for ElaborateDrops {
52    #[instrument(level = "trace", skip(self, tcx, body))]
53    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
54        debug!("elaborate_drops({:?} @ {:?})", body.source, body.span);
55        // FIXME(#132279): This is used during the phase transition from analysis
56        // to runtime, so we have to manually specify the correct typing mode.
57        let typing_env = ty::TypingEnv::post_analysis(tcx, body.source.def_id());
58        // For types that do not need dropping, the behaviour is trivial. So we only need to track
59        // init/uninit for types that do need dropping.
60        let move_data = MoveData::gather_moves(body, tcx, |ty| ty.needs_drop(tcx, typing_env));
61        let elaborate_patch = {
62            let env = MoveDataTypingEnv { move_data, typing_env };
63
64            let mut inits = MaybeInitializedPlaces::new(tcx, body, &env.move_data)
65                .skipping_unreachable_unwind()
66                .iterate_to_fixpoint(tcx, body, Some("elaborate_drops"))
67                .into_results_cursor(body);
68            let dead_unwinds = compute_dead_unwinds(body, &mut inits);
69
70            let uninits = MaybeUninitializedPlaces::new(tcx, body, &env.move_data)
71                .mark_inactive_variants_as_uninit()
72                .skipping_unreachable_unwind(dead_unwinds)
73                .iterate_to_fixpoint(tcx, body, Some("elaborate_drops"))
74                .into_results_cursor(body);
75
76            let drop_flags = IndexVec::from_elem(None, &env.move_data.move_paths);
77            ElaborateDropsCtxt {
78                tcx,
79                body,
80                env: &env,
81                init_data: InitializationData { inits, uninits },
82                drop_flags,
83                patch: MirPatch::new(body),
84            }
85            .elaborate()
86        };
87        elaborate_patch.apply(body);
88        deref_finder(tcx, body);
89    }
90
91    fn is_required(&self) -> bool {
92        true
93    }
94}
95
96/// Records unwind edges which are known to be unreachable, because they are in `drop` terminators
97/// that can't drop anything.
98#[instrument(level = "trace", skip(body, flow_inits), ret)]
99fn compute_dead_unwinds<'a, 'tcx>(
100    body: &'a Body<'tcx>,
101    flow_inits: &mut ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
102) -> DenseBitSet<BasicBlock> {
103    // We only need to do this pass once, because unwind edges can only
104    // reach cleanup blocks, which can't have unwind edges themselves.
105    let mut dead_unwinds = DenseBitSet::new_empty(body.basic_blocks.len());
106    for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
107        let TerminatorKind::Drop { place, unwind: UnwindAction::Cleanup(_), .. } =
108            bb_data.terminator().kind
109        else {
110            continue;
111        };
112
113        flow_inits.seek_before_primary_effect(body.terminator_loc(bb));
114        if flow_inits.analysis().is_unwind_dead(place, flow_inits.get()) {
115            dead_unwinds.insert(bb);
116        }
117    }
118
119    dead_unwinds
120}
121
122struct InitializationData<'a, 'tcx> {
123    inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
124    uninits: ResultsCursor<'a, 'tcx, MaybeUninitializedPlaces<'a, 'tcx>>,
125}
126
127impl InitializationData<'_, '_> {
128    fn seek_before(&mut self, loc: Location) {
129        self.inits.seek_before_primary_effect(loc);
130        self.uninits.seek_before_primary_effect(loc);
131    }
132
133    fn maybe_init_uninit(&self, path: MovePathIndex) -> (bool, bool) {
134        (self.inits.get().contains(path), self.uninits.get().contains(path))
135    }
136}
137
138impl<'a, 'tcx> DropElaborator<'a, 'tcx> for ElaborateDropsCtxt<'a, 'tcx> {
139    type Path = MovePathIndex;
140
141    fn patch_ref(&self) -> &MirPatch<'tcx> {
142        &self.patch
143    }
144
145    fn patch(&mut self) -> &mut MirPatch<'tcx> {
146        &mut self.patch
147    }
148
149    fn body(&self) -> &'a Body<'tcx> {
150        self.body
151    }
152
153    fn tcx(&self) -> TyCtxt<'tcx> {
154        self.tcx
155    }
156
157    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
158        self.env.typing_env
159    }
160
161    #[instrument(level = "debug", skip(self), ret)]
162    fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
163        let ((maybe_init, maybe_uninit), multipart) = match mode {
164            DropFlagMode::Shallow => (self.init_data.maybe_init_uninit(path), false),
165            DropFlagMode::Deep => {
166                let mut some_maybe_init = false;
167                let mut some_maybe_uninit = false;
168                let mut children_count = 0;
169                on_all_children_bits(self.move_data(), path, |child| {
170                    let (maybe_init, maybe_uninit) = self.init_data.maybe_init_uninit(child);
171                    debug!("elaborate_drop: state({:?}) = {:?}", child, (maybe_init, maybe_uninit));
172                    some_maybe_init |= maybe_init;
173                    some_maybe_uninit |= maybe_uninit;
174                    children_count += 1;
175                });
176                ((some_maybe_init, some_maybe_uninit), children_count != 1)
177            }
178        };
179        match (maybe_init, maybe_uninit, multipart) {
180            (false, _, _) => DropStyle::Dead,
181            (true, false, _) => DropStyle::Static,
182            (true, true, false) => DropStyle::Conditional,
183            (true, true, true) => DropStyle::Open,
184        }
185    }
186
187    fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMode) {
188        match mode {
189            DropFlagMode::Shallow => {
190                self.set_drop_flag(loc, path, DropFlagState::Absent);
191            }
192            DropFlagMode::Deep => {
193                on_all_children_bits(self.move_data(), path, |child| {
194                    self.set_drop_flag(loc, child, DropFlagState::Absent)
195                });
196            }
197        }
198    }
199
200    fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path> {
201        rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| match e {
202            ProjectionElem::Field(idx, _) => idx == field,
203            _ => false,
204        })
205    }
206
207    fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path> {
208        rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| match e {
209            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
210                debug_assert!(size == min_length, "min_length should be exact for arrays");
211                assert!(!from_end, "from_end should not be used for array element ConstantIndex");
212                offset == index
213            }
214            _ => false,
215        })
216    }
217
218    fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path> {
219        rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| {
220            e == ProjectionElem::Deref
221        })
222    }
223
224    fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path> {
225        rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| match e {
226            ProjectionElem::Downcast(_, idx) => idx == variant,
227            _ => false,
228        })
229    }
230
231    fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
232        self.drop_flag(path).map(Operand::Copy)
233    }
234}
235
236struct ElaborateDropsCtxt<'a, 'tcx> {
237    tcx: TyCtxt<'tcx>,
238    body: &'a Body<'tcx>,
239    env: &'a MoveDataTypingEnv<'tcx>,
240    init_data: InitializationData<'a, 'tcx>,
241    drop_flags: IndexVec<MovePathIndex, Option<Local>>,
242    patch: MirPatch<'tcx>,
243}
244
245impl fmt::Debug for ElaborateDropsCtxt<'_, '_> {
246    fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
247        Ok(())
248    }
249}
250
251impl<'a, 'tcx> ElaborateDropsCtxt<'a, 'tcx> {
252    fn move_data(&self) -> &'a MoveData<'tcx> {
253        &self.env.move_data
254    }
255
256    fn create_drop_flag(&mut self, index: MovePathIndex, span: Span) {
257        let patch = &mut self.patch;
258        debug!("create_drop_flag({:?})", self.body.span);
259        self.drop_flags[index].get_or_insert_with(|| patch.new_temp(self.tcx.types.bool, span));
260    }
261
262    fn drop_flag(&mut self, index: MovePathIndex) -> Option<Place<'tcx>> {
263        self.drop_flags[index].map(Place::from)
264    }
265
266    /// create a patch that elaborates all drops in the input
267    /// MIR.
268    fn elaborate(mut self) -> MirPatch<'tcx> {
269        self.collect_drop_flags();
270
271        self.elaborate_drops();
272
273        self.drop_flags_on_init();
274        self.drop_flags_for_fn_rets();
275        self.drop_flags_for_args();
276        self.drop_flags_for_locs();
277
278        self.patch
279    }
280
281    fn collect_drop_flags(&mut self) {
282        for (bb, data) in self.body.basic_blocks.iter_enumerated() {
283            let terminator = data.terminator();
284            let TerminatorKind::Drop { ref place, .. } = terminator.kind else { continue };
285
286            let path = self.move_data().rev_lookup.find(place.as_ref());
287            debug!("collect_drop_flags: {:?}, place {:?} ({:?})", bb, place, path);
288
289            match path {
290                LookupResult::Exact(path) => {
291                    self.init_data.seek_before(self.body.terminator_loc(bb));
292                    on_all_children_bits(self.move_data(), path, |child| {
293                        let (maybe_init, maybe_uninit) = self.init_data.maybe_init_uninit(child);
294                        debug!(
295                            "collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
296                            child,
297                            place,
298                            path,
299                            (maybe_init, maybe_uninit)
300                        );
301                        if maybe_init && maybe_uninit {
302                            self.create_drop_flag(child, terminator.source_info.span)
303                        }
304                    });
305                }
306                LookupResult::Parent(None) => {}
307                LookupResult::Parent(Some(parent)) => {
308                    if self.body.local_decls[place.local].is_deref_temp() {
309                        continue;
310                    }
311
312                    self.init_data.seek_before(self.body.terminator_loc(bb));
313                    let (_maybe_init, maybe_uninit) = self.init_data.maybe_init_uninit(parent);
314                    if maybe_uninit {
315                        self.tcx.dcx().span_delayed_bug(
316                            terminator.source_info.span,
317                            format!(
318                                "drop of untracked, uninitialized value {bb:?}, place {place:?} ({path:?})"
319                            ),
320                        );
321                    }
322                }
323            };
324        }
325    }
326
327    fn elaborate_drops(&mut self) {
328        // This function should mirror what `collect_drop_flags` does.
329        for (bb, data) in self.body.basic_blocks.iter_enumerated() {
330            let terminator = data.terminator();
331            let TerminatorKind::Drop { place, target, unwind, replace } = terminator.kind else {
332                continue;
333            };
334
335            // This place does not need dropping. It does not have an associated move-path, so the
336            // match below will conservatively keep an unconditional drop. As that drop is useless,
337            // just remove it here and now.
338            if !place
339                .ty(&self.body.local_decls, self.tcx)
340                .ty
341                .needs_drop(self.tcx, self.typing_env())
342            {
343                self.patch.patch_terminator(bb, TerminatorKind::Goto { target });
344                continue;
345            }
346
347            let path = self.move_data().rev_lookup.find(place.as_ref());
348            match path {
349                LookupResult::Exact(path) => {
350                    let unwind = match unwind {
351                        _ if data.is_cleanup => Unwind::InCleanup,
352                        UnwindAction::Cleanup(cleanup) => Unwind::To(cleanup),
353                        UnwindAction::Continue => Unwind::To(self.patch.resume_block()),
354                        UnwindAction::Unreachable => {
355                            Unwind::To(self.patch.unreachable_cleanup_block())
356                        }
357                        UnwindAction::Terminate(reason) => {
358                            debug_assert_ne!(
359                                reason,
360                                UnwindTerminateReason::InCleanup,
361                                "we are not in a cleanup block, InCleanup reason should be impossible"
362                            );
363                            Unwind::To(self.patch.terminate_block(reason))
364                        }
365                    };
366                    self.init_data.seek_before(self.body.terminator_loc(bb));
367                    elaborate_drop(self, terminator.source_info, place, path, target, unwind, bb)
368                }
369                LookupResult::Parent(None) => {}
370                LookupResult::Parent(Some(_)) => {
371                    if !replace {
372                        self.tcx.dcx().span_bug(
373                            terminator.source_info.span,
374                            format!("drop of untracked value {bb:?}"),
375                        );
376                    }
377                    // A drop and replace behind a pointer/array/whatever.
378                    // The borrow checker requires that these locations are initialized before the
379                    // assignment, so we just leave an unconditional drop.
380                    assert!(!data.is_cleanup);
381                }
382            }
383        }
384    }
385
386    fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
387        Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
388            span,
389            user_ty: None,
390            const_: Const::from_bool(self.tcx, val),
391        })))
392    }
393
394    fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
395        if let Some(flag) = self.drop_flags[path] {
396            let span = self.patch.source_info_for_location(self.body, loc).span;
397            let val = self.constant_bool(span, val.value());
398            self.patch.add_assign(loc, Place::from(flag), val);
399        }
400    }
401
402    fn drop_flags_on_init(&mut self) {
403        let loc = Location::START;
404        let span = self.patch.source_info_for_location(self.body, loc).span;
405        let false_ = self.constant_bool(span, false);
406        for flag in self.drop_flags.iter().flatten() {
407            self.patch.add_assign(loc, Place::from(*flag), false_.clone());
408        }
409    }
410
411    fn drop_flags_for_fn_rets(&mut self) {
412        for (bb, data) in self.body.basic_blocks.iter_enumerated() {
413            if let TerminatorKind::Call {
414                destination,
415                target: Some(tgt),
416                unwind: UnwindAction::Cleanup(_),
417                ..
418            } = data.terminator().kind
419            {
420                assert!(!self.patch.is_term_patched(bb));
421
422                let loc = Location { block: tgt, statement_index: 0 };
423                let path = self.move_data().rev_lookup.find(destination.as_ref());
424                on_lookup_result_bits(self.move_data(), path, |child| {
425                    self.set_drop_flag(loc, child, DropFlagState::Present)
426                });
427            }
428        }
429    }
430
431    fn drop_flags_for_args(&mut self) {
432        let loc = Location::START;
433        rustc_mir_dataflow::drop_flag_effects_for_function_entry(
434            self.body,
435            &self.env.move_data,
436            |path, ds| {
437                self.set_drop_flag(loc, path, ds);
438            },
439        )
440    }
441
442    fn drop_flags_for_locs(&mut self) {
443        // We intentionally iterate only over the *old* basic blocks.
444        //
445        // Basic blocks created by drop elaboration update their
446        // drop flags by themselves, to avoid the drop flags being
447        // clobbered before they are read.
448
449        for (bb, data) in self.body.basic_blocks.iter_enumerated() {
450            debug!("drop_flags_for_locs({:?})", data);
451            for i in 0..(data.statements.len() + 1) {
452                debug!("drop_flag_for_locs: stmt {}", i);
453                if i == data.statements.len() {
454                    match data.terminator().kind {
455                        TerminatorKind::Drop { .. } => {
456                            // drop elaboration should handle that by itself
457                            continue;
458                        }
459                        TerminatorKind::UnwindResume => {
460                            // It is possible for `Resume` to be patched
461                            // (in particular it can be patched to be replaced with
462                            // a Goto; see `MirPatch::new`).
463                        }
464                        _ => {
465                            assert!(!self.patch.is_term_patched(bb));
466                        }
467                    }
468                }
469                let loc = Location { block: bb, statement_index: i };
470                rustc_mir_dataflow::drop_flag_effects_for_location(
471                    self.body,
472                    &self.env.move_data,
473                    loc,
474                    |path, ds| self.set_drop_flag(loc, path, ds),
475                )
476            }
477
478            // There may be a critical edge after this call,
479            // so mark the return as initialized *before* the
480            // call.
481            if let TerminatorKind::Call {
482                destination,
483                target: Some(_),
484                unwind:
485                    UnwindAction::Continue | UnwindAction::Unreachable | UnwindAction::Terminate(_),
486                ..
487            } = data.terminator().kind
488            {
489                assert!(!self.patch.is_term_patched(bb));
490
491                let loc = Location { block: bb, statement_index: data.statements.len() };
492                let path = self.move_data().rev_lookup.find(destination.as_ref());
493                on_lookup_result_bits(self.move_data(), path, |child| {
494                    self.set_drop_flag(loc, child, DropFlagState::Present)
495                });
496            }
497        }
498    }
499}