rustc_mir_transform/
elaborate_drop.rs

1use std::{fmt, iter, mem};
2
3use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx};
4use rustc_hir::lang_items::LangItem;
5use rustc_index::Idx;
6use rustc_middle::mir::*;
7use rustc_middle::span_bug;
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::util::IntTypeExt;
10use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt};
11use rustc_span::DUMMY_SP;
12use rustc_span::source_map::Spanned;
13use tracing::{debug, instrument};
14
15use crate::patch::MirPatch;
16
17/// Describes how/if a value should be dropped.
18#[derive(Debug)]
19pub(crate) enum DropStyle {
20    /// The value is already dead at the drop location, no drop will be executed.
21    Dead,
22
23    /// The value is known to always be initialized at the drop location, drop will always be
24    /// executed.
25    Static,
26
27    /// Whether the value needs to be dropped depends on its drop flag.
28    Conditional,
29
30    /// An "open" drop is one where only the fields of a value are dropped.
31    ///
32    /// For example, this happens when moving out of a struct field: The rest of the struct will be
33    /// dropped in such an "open" drop. It is also used to generate drop glue for the individual
34    /// components of a value, for example for dropping array elements.
35    Open,
36}
37
38/// Which drop flags to affect/check with an operation.
39#[derive(Debug)]
40pub(crate) enum DropFlagMode {
41    /// Only affect the top-level drop flag, not that of any contained fields.
42    Shallow,
43    /// Affect all nested drop flags in addition to the top-level one.
44    Deep,
45}
46
47/// Describes if unwinding is necessary and where to unwind to if a panic occurs.
48#[derive(Copy, Clone, Debug)]
49pub(crate) enum Unwind {
50    /// Unwind to this block.
51    To(BasicBlock),
52    /// Already in an unwind path, any panic will cause an abort.
53    InCleanup,
54}
55
56impl Unwind {
57    fn is_cleanup(self) -> bool {
58        match self {
59            Unwind::To(..) => false,
60            Unwind::InCleanup => true,
61        }
62    }
63
64    fn into_action(self) -> UnwindAction {
65        match self {
66            Unwind::To(bb) => UnwindAction::Cleanup(bb),
67            Unwind::InCleanup => UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
68        }
69    }
70
71    fn map<F>(self, f: F) -> Self
72    where
73        F: FnOnce(BasicBlock) -> BasicBlock,
74    {
75        match self {
76            Unwind::To(bb) => Unwind::To(f(bb)),
77            Unwind::InCleanup => Unwind::InCleanup,
78        }
79    }
80}
81
82pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug {
83    /// The type representing paths that can be moved out of.
84    ///
85    /// Users can move out of individual fields of a struct, such as `a.b.c`. This type is used to
86    /// represent such move paths. Sometimes tracking individual move paths is not necessary, in
87    /// which case this may be set to (for example) `()`.
88    type Path: Copy + fmt::Debug;
89
90    // Accessors
91
92    fn patch(&mut self) -> &mut MirPatch<'tcx>;
93    fn body(&self) -> &'a Body<'tcx>;
94    fn tcx(&self) -> TyCtxt<'tcx>;
95    fn typing_env(&self) -> ty::TypingEnv<'tcx>;
96
97    // Drop logic
98
99    /// Returns how `path` should be dropped, given `mode`.
100    fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
101
102    /// Returns the drop flag of `path` as a MIR `Operand` (or `None` if `path` has no drop flag).
103    fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
104
105    /// Modifies the MIR patch so that the drop flag of `path` (if any) is cleared at `location`.
106    ///
107    /// If `mode` is deep, drop flags of all child paths should also be cleared by inserting
108    /// additional statements.
109    fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
110
111    // Subpaths
112
113    /// Returns the subpath of a field of `path` (or `None` if there is no dedicated subpath).
114    ///
115    /// If this returns `None`, `field` will not get a dedicated drop flag.
116    fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path>;
117
118    /// Returns the subpath of a dereference of `path` (or `None` if there is no dedicated subpath).
119    ///
120    /// If this returns `None`, `*path` will not get a dedicated drop flag.
121    ///
122    /// This is only relevant for `Box<T>`, where the contained `T` can be moved out of the box.
123    fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
124
125    /// Returns the subpath of downcasting `path` to one of its variants.
126    ///
127    /// If this returns `None`, the downcast of `path` will not get a dedicated drop flag.
128    fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
129
130    /// Returns the subpath of indexing a fixed-size array `path`.
131    ///
132    /// If this returns `None`, elements of `path` will not get a dedicated drop flag.
133    ///
134    /// This is only relevant for array patterns, which can move out of individual array elements.
135    fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
136}
137
138#[derive(Debug)]
139struct DropCtxt<'a, 'b, 'tcx, D>
140where
141    D: DropElaborator<'b, 'tcx>,
142{
143    elaborator: &'a mut D,
144
145    source_info: SourceInfo,
146
147    place: Place<'tcx>,
148    path: D::Path,
149    succ: BasicBlock,
150    unwind: Unwind,
151}
152
153/// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it.
154///
155/// The passed `elaborator` is used to determine what should happen at the drop terminator. It
156/// decides whether the drop can be statically determined or whether it needs a dynamic drop flag,
157/// and whether the drop is "open", ie. should be expanded to drop all subfields of the dropped
158/// value.
159///
160/// When this returns, the MIR patch in the `elaborator` contains the necessary changes.
161pub(crate) fn elaborate_drop<'b, 'tcx, D>(
162    elaborator: &mut D,
163    source_info: SourceInfo,
164    place: Place<'tcx>,
165    path: D::Path,
166    succ: BasicBlock,
167    unwind: Unwind,
168    bb: BasicBlock,
169) where
170    D: DropElaborator<'b, 'tcx>,
171    'tcx: 'b,
172{
173    DropCtxt { elaborator, source_info, place, path, succ, unwind }.elaborate_drop(bb)
174}
175
176impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D>
177where
178    D: DropElaborator<'b, 'tcx>,
179    'tcx: 'b,
180{
181    #[instrument(level = "trace", skip(self), ret)]
182    fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
183        place.ty(self.elaborator.body(), self.tcx()).ty
184    }
185
186    fn tcx(&self) -> TyCtxt<'tcx> {
187        self.elaborator.tcx()
188    }
189
190    /// This elaborates a single drop instruction, located at `bb`, and
191    /// patches over it.
192    ///
193    /// The elaborated drop checks the drop flags to only drop what
194    /// is initialized.
195    ///
196    /// In addition, the relevant drop flags also need to be cleared
197    /// to avoid double-drops. However, in the middle of a complex
198    /// drop, one must avoid clearing some of the flags before they
199    /// are read, as that would cause a memory leak.
200    ///
201    /// In particular, when dropping an ADT, multiple fields may be
202    /// joined together under the `rest` subpath. They are all controlled
203    /// by the primary drop flag, but only the last rest-field dropped
204    /// should clear it (and it must also not clear anything else).
205    //
206    // FIXME: I think we should just control the flags externally,
207    // and then we do not need this machinery.
208    #[instrument(level = "debug")]
209    fn elaborate_drop(&mut self, bb: BasicBlock) {
210        match self.elaborator.drop_style(self.path, DropFlagMode::Deep) {
211            DropStyle::Dead => {
212                self.elaborator
213                    .patch()
214                    .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
215            }
216            DropStyle::Static => {
217                self.elaborator.patch().patch_terminator(
218                    bb,
219                    TerminatorKind::Drop {
220                        place: self.place,
221                        target: self.succ,
222                        unwind: self.unwind.into_action(),
223                        replace: false,
224                    },
225                );
226            }
227            DropStyle::Conditional => {
228                let drop_bb = self.complete_drop(self.succ, self.unwind);
229                self.elaborator
230                    .patch()
231                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
232            }
233            DropStyle::Open => {
234                let drop_bb = self.open_drop();
235                self.elaborator
236                    .patch()
237                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
238            }
239        }
240    }
241
242    /// Returns the place and move path for each field of `variant`,
243    /// (the move path is `None` if the field is a rest field).
244    fn move_paths_for_fields(
245        &self,
246        base_place: Place<'tcx>,
247        variant_path: D::Path,
248        variant: &'tcx ty::VariantDef,
249        args: GenericArgsRef<'tcx>,
250    ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
251        variant
252            .fields
253            .iter()
254            .enumerate()
255            .map(|(i, f)| {
256                let field = FieldIdx::new(i);
257                let subpath = self.elaborator.field_subpath(variant_path, field);
258                let tcx = self.tcx();
259
260                assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis);
261                let field_ty =
262                    tcx.normalize_erasing_regions(self.elaborator.typing_env(), f.ty(tcx, args));
263
264                (tcx.mk_place_field(base_place, field, field_ty), subpath)
265            })
266            .collect()
267    }
268
269    fn drop_subpath(
270        &mut self,
271        place: Place<'tcx>,
272        path: Option<D::Path>,
273        succ: BasicBlock,
274        unwind: Unwind,
275    ) -> BasicBlock {
276        if let Some(path) = path {
277            debug!("drop_subpath: for std field {:?}", place);
278
279            DropCtxt {
280                elaborator: self.elaborator,
281                source_info: self.source_info,
282                path,
283                place,
284                succ,
285                unwind,
286            }
287            .elaborated_drop_block()
288        } else {
289            debug!("drop_subpath: for rest field {:?}", place);
290
291            DropCtxt {
292                elaborator: self.elaborator,
293                source_info: self.source_info,
294                place,
295                succ,
296                unwind,
297                // Using `self.path` here to condition the drop on
298                // our own drop flag.
299                path: self.path,
300            }
301            .complete_drop(succ, unwind)
302        }
303    }
304
305    /// Creates one-half of the drop ladder for a list of fields, and return
306    /// the list of steps in it in reverse order, with the first step
307    /// dropping 0 fields and so on.
308    ///
309    /// `unwind_ladder` is such a list of steps in reverse order,
310    /// which is called if the matching step of the drop glue panics.
311    fn drop_halfladder(
312        &mut self,
313        unwind_ladder: &[Unwind],
314        mut succ: BasicBlock,
315        fields: &[(Place<'tcx>, Option<D::Path>)],
316    ) -> Vec<BasicBlock> {
317        iter::once(succ)
318            .chain(fields.iter().rev().zip(unwind_ladder).map(|(&(place, path), &unwind_succ)| {
319                succ = self.drop_subpath(place, path, succ, unwind_succ);
320                succ
321            }))
322            .collect()
323    }
324
325    fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
326        // Clear the "master" drop flag at the end. This is needed
327        // because the "master" drop protects the ADT's discriminant,
328        // which is invalidated after the ADT is dropped.
329        (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind)
330    }
331
332    /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
333    ///
334    /// For example, with 3 fields, the drop ladder is
335    ///
336    /// .d0:
337    ///     ELAB(drop location.0 [target=.d1, unwind=.c1])
338    /// .d1:
339    ///     ELAB(drop location.1 [target=.d2, unwind=.c2])
340    /// .d2:
341    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
342    /// .c1:
343    ///     ELAB(drop location.1 [target=.c2])
344    /// .c2:
345    ///     ELAB(drop location.2 [target=`self.unwind`])
346    ///
347    /// NOTE: this does not clear the master drop flag, so you need
348    /// to point succ/unwind on a `drop_ladder_bottom`.
349    fn drop_ladder(
350        &mut self,
351        fields: Vec<(Place<'tcx>, Option<D::Path>)>,
352        succ: BasicBlock,
353        unwind: Unwind,
354    ) -> (BasicBlock, Unwind) {
355        debug!("drop_ladder({:?}, {:?})", self, fields);
356
357        let mut fields = fields;
358        fields.retain(|&(place, _)| {
359            self.place_ty(place).needs_drop(self.tcx(), self.elaborator.typing_env())
360        });
361
362        debug!("drop_ladder - fields needing drop: {:?}", fields);
363
364        let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
365        let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
366            let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
367            halfladder.into_iter().map(Unwind::To).collect()
368        } else {
369            unwind_ladder
370        };
371
372        let normal_ladder = self.drop_halfladder(&unwind_ladder, succ, &fields);
373
374        (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
375    }
376
377    fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
378        debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
379
380        let fields = tys
381            .iter()
382            .enumerate()
383            .map(|(i, &ty)| {
384                (
385                    self.tcx().mk_place_field(self.place, FieldIdx::new(i), ty),
386                    self.elaborator.field_subpath(self.path, FieldIdx::new(i)),
387                )
388            })
389            .collect();
390
391        let (succ, unwind) = self.drop_ladder_bottom();
392        self.drop_ladder(fields, succ, unwind).0
393    }
394
395    /// Drops the T contained in a `Box<T>` if it has not been moved out of
396    #[instrument(level = "debug", ret)]
397    fn open_drop_for_box_contents(
398        &mut self,
399        adt: ty::AdtDef<'tcx>,
400        args: GenericArgsRef<'tcx>,
401        succ: BasicBlock,
402        unwind: Unwind,
403    ) -> BasicBlock {
404        // drop glue is sent straight to codegen
405        // box cannot be directly dereferenced
406        let unique_ty = adt.non_enum_variant().fields[FieldIdx::ZERO].ty(self.tcx(), args);
407        let unique_variant = unique_ty.ty_adt_def().unwrap().non_enum_variant();
408        let nonnull_ty = unique_variant.fields[FieldIdx::ZERO].ty(self.tcx(), args);
409        let ptr_ty = Ty::new_imm_ptr(self.tcx(), args[0].expect_ty());
410
411        let unique_place = self.tcx().mk_place_field(self.place, FieldIdx::ZERO, unique_ty);
412        let nonnull_place = self.tcx().mk_place_field(unique_place, FieldIdx::ZERO, nonnull_ty);
413        let ptr_place = self.tcx().mk_place_field(nonnull_place, FieldIdx::ZERO, ptr_ty);
414        let interior = self.tcx().mk_place_deref(ptr_place);
415
416        let interior_path = self.elaborator.deref_subpath(self.path);
417
418        self.drop_subpath(interior, interior_path, succ, unwind)
419    }
420
421    #[instrument(level = "debug", ret)]
422    fn open_drop_for_adt(
423        &mut self,
424        adt: ty::AdtDef<'tcx>,
425        args: GenericArgsRef<'tcx>,
426    ) -> BasicBlock {
427        if adt.variants().is_empty() {
428            return self.elaborator.patch().new_block(BasicBlockData {
429                statements: vec![],
430                terminator: Some(Terminator {
431                    source_info: self.source_info,
432                    kind: TerminatorKind::Unreachable,
433                }),
434                is_cleanup: self.unwind.is_cleanup(),
435            });
436        }
437
438        let skip_contents = adt.is_union() || adt.is_manually_drop();
439        let contents_drop = if skip_contents {
440            (self.succ, self.unwind)
441        } else {
442            self.open_drop_for_adt_contents(adt, args)
443        };
444
445        if adt.is_box() {
446            // we need to drop the inside of the box before running the destructor
447            let succ = self.destructor_call_block(contents_drop);
448            let unwind = contents_drop
449                .1
450                .map(|unwind| self.destructor_call_block((unwind, Unwind::InCleanup)));
451
452            self.open_drop_for_box_contents(adt, args, succ, unwind)
453        } else if adt.has_dtor(self.tcx()) {
454            self.destructor_call_block(contents_drop)
455        } else {
456            contents_drop.0
457        }
458    }
459
460    fn open_drop_for_adt_contents(
461        &mut self,
462        adt: ty::AdtDef<'tcx>,
463        args: GenericArgsRef<'tcx>,
464    ) -> (BasicBlock, Unwind) {
465        let (succ, unwind) = self.drop_ladder_bottom();
466        if !adt.is_enum() {
467            let fields =
468                self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args);
469            self.drop_ladder(fields, succ, unwind)
470        } else {
471            self.open_drop_for_multivariant(adt, args, succ, unwind)
472        }
473    }
474
475    fn open_drop_for_multivariant(
476        &mut self,
477        adt: ty::AdtDef<'tcx>,
478        args: GenericArgsRef<'tcx>,
479        succ: BasicBlock,
480        unwind: Unwind,
481    ) -> (BasicBlock, Unwind) {
482        let mut values = Vec::with_capacity(adt.variants().len());
483        let mut normal_blocks = Vec::with_capacity(adt.variants().len());
484        let mut unwind_blocks =
485            if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
486
487        let mut have_otherwise_with_drop_glue = false;
488        let mut have_otherwise = false;
489        let tcx = self.tcx();
490
491        for (variant_index, discr) in adt.discriminants(tcx) {
492            let variant = &adt.variant(variant_index);
493            let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
494
495            if let Some(variant_path) = subpath {
496                let base_place = tcx.mk_place_elem(
497                    self.place,
498                    ProjectionElem::Downcast(Some(variant.name), variant_index),
499                );
500                let fields = self.move_paths_for_fields(base_place, variant_path, variant, args);
501                values.push(discr.val);
502                if let Unwind::To(unwind) = unwind {
503                    // We can't use the half-ladder from the original
504                    // drop ladder, because this breaks the
505                    // "funclet can't have 2 successor funclets"
506                    // requirement from MSVC:
507                    //
508                    //           switch       unwind-switch
509                    //          /      \         /        \
510                    //         v1.0    v2.0  v2.0-unwind  v1.0-unwind
511                    //         |        |      /             |
512                    //    v1.1-unwind  v2.1-unwind           |
513                    //      ^                                |
514                    //       \-------------------------------/
515                    //
516                    // Create a duplicate half-ladder to avoid that. We
517                    // could technically only do this on MSVC, but I
518                    // I want to minimize the divergence between MSVC
519                    // and non-MSVC.
520
521                    let unwind_blocks = unwind_blocks.as_mut().unwrap();
522                    let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
523                    let halfladder = self.drop_halfladder(&unwind_ladder, unwind, &fields);
524                    unwind_blocks.push(halfladder.last().cloned().unwrap());
525                }
526                let (normal, _) = self.drop_ladder(fields, succ, unwind);
527                normal_blocks.push(normal);
528            } else {
529                have_otherwise = true;
530
531                let typing_env = self.elaborator.typing_env();
532                let have_field_with_drop_glue = variant
533                    .fields
534                    .iter()
535                    .any(|field| field.ty(tcx, args).needs_drop(tcx, typing_env));
536                if have_field_with_drop_glue {
537                    have_otherwise_with_drop_glue = true;
538                }
539            }
540        }
541
542        if !have_otherwise {
543            values.pop();
544        } else if !have_otherwise_with_drop_glue {
545            normal_blocks.push(self.goto_block(succ, unwind));
546            if let Unwind::To(unwind) = unwind {
547                unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
548            }
549        } else {
550            normal_blocks.push(self.drop_block(succ, unwind));
551            if let Unwind::To(unwind) = unwind {
552                unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
553            }
554        }
555
556        (
557            self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
558            unwind.map(|unwind| {
559                self.adt_switch_block(
560                    adt,
561                    unwind_blocks.unwrap(),
562                    &values,
563                    unwind,
564                    Unwind::InCleanup,
565                )
566            }),
567        )
568    }
569
570    fn adt_switch_block(
571        &mut self,
572        adt: ty::AdtDef<'tcx>,
573        blocks: Vec<BasicBlock>,
574        values: &[u128],
575        succ: BasicBlock,
576        unwind: Unwind,
577    ) -> BasicBlock {
578        // If there are multiple variants, then if something
579        // is present within the enum the discriminant, tracked
580        // by the rest path, must be initialized.
581        //
582        // Additionally, we do not want to switch on the
583        // discriminant after it is free-ed, because that
584        // way lies only trouble.
585        let discr_ty = adt.repr().discr_type().to_ty(self.tcx());
586        let discr = Place::from(self.new_temp(discr_ty));
587        let discr_rv = Rvalue::Discriminant(self.place);
588        let switch_block = BasicBlockData {
589            statements: vec![self.assign(discr, discr_rv)],
590            terminator: Some(Terminator {
591                source_info: self.source_info,
592                kind: TerminatorKind::SwitchInt {
593                    discr: Operand::Move(discr),
594                    targets: SwitchTargets::new(
595                        values.iter().copied().zip(blocks.iter().copied()),
596                        *blocks.last().unwrap(),
597                    ),
598                },
599            }),
600            is_cleanup: unwind.is_cleanup(),
601        };
602        let switch_block = self.elaborator.patch().new_block(switch_block);
603        self.drop_flag_test_block(switch_block, succ, unwind)
604    }
605
606    fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
607        debug!("destructor_call_block({:?}, {:?})", self, succ);
608        let tcx = self.tcx();
609        let drop_trait = tcx.require_lang_item(LangItem::Drop, None);
610        let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
611        let ty = self.place_ty(self.place);
612
613        let ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
614        let ref_place = self.new_temp(ref_ty);
615        let unit_temp = Place::from(self.new_temp(tcx.types.unit));
616
617        let result = BasicBlockData {
618            statements: vec![self.assign(
619                Place::from(ref_place),
620                Rvalue::Ref(
621                    tcx.lifetimes.re_erased,
622                    BorrowKind::Mut { kind: MutBorrowKind::Default },
623                    self.place,
624                ),
625            )],
626            terminator: Some(Terminator {
627                kind: TerminatorKind::Call {
628                    func: Operand::function_handle(
629                        tcx,
630                        drop_fn,
631                        [ty.into()],
632                        self.source_info.span,
633                    ),
634                    args: [Spanned { node: Operand::Move(Place::from(ref_place)), span: DUMMY_SP }]
635                        .into(),
636                    destination: unit_temp,
637                    target: Some(succ),
638                    unwind: unwind.into_action(),
639                    call_source: CallSource::Misc,
640                    fn_span: self.source_info.span,
641                },
642                source_info: self.source_info,
643            }),
644            is_cleanup: unwind.is_cleanup(),
645        };
646
647        let destructor_block = self.elaborator.patch().new_block(result);
648
649        let block_start = Location { block: destructor_block, statement_index: 0 };
650        self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
651
652        self.drop_flag_test_block(destructor_block, succ, unwind)
653    }
654
655    /// Create a loop that drops an array:
656    ///
657    /// ```text
658    /// loop-block:
659    ///    can_go = cur == len
660    ///    if can_go then succ else drop-block
661    /// drop-block:
662    ///    ptr = &raw mut P[cur]
663    ///    cur = cur + 1
664    ///    drop(ptr)
665    /// ```
666    fn drop_loop(
667        &mut self,
668        succ: BasicBlock,
669        cur: Local,
670        len: Local,
671        ety: Ty<'tcx>,
672        unwind: Unwind,
673    ) -> BasicBlock {
674        let copy = |place: Place<'tcx>| Operand::Copy(place);
675        let move_ = |place: Place<'tcx>| Operand::Move(place);
676        let tcx = self.tcx();
677
678        let ptr_ty = Ty::new_mut_ptr(tcx, ety);
679        let ptr = Place::from(self.new_temp(ptr_ty));
680        let can_go = Place::from(self.new_temp(tcx.types.bool));
681        let one = self.constant_usize(1);
682
683        let drop_block = BasicBlockData {
684            statements: vec![
685                self.assign(
686                    ptr,
687                    Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_index(self.place, cur)),
688                ),
689                self.assign(
690                    cur.into(),
691                    Rvalue::BinaryOp(BinOp::Add, Box::new((move_(cur.into()), one))),
692                ),
693            ],
694            is_cleanup: unwind.is_cleanup(),
695            terminator: Some(Terminator {
696                source_info: self.source_info,
697                // this gets overwritten by drop elaboration.
698                kind: TerminatorKind::Unreachable,
699            }),
700        };
701        let drop_block = self.elaborator.patch().new_block(drop_block);
702
703        let loop_block = BasicBlockData {
704            statements: vec![self.assign(
705                can_go,
706                Rvalue::BinaryOp(BinOp::Eq, Box::new((copy(Place::from(cur)), copy(len.into())))),
707            )],
708            is_cleanup: unwind.is_cleanup(),
709            terminator: Some(Terminator {
710                source_info: self.source_info,
711                kind: TerminatorKind::if_(move_(can_go), succ, drop_block),
712            }),
713        };
714        let loop_block = self.elaborator.patch().new_block(loop_block);
715
716        self.elaborator.patch().patch_terminator(
717            drop_block,
718            TerminatorKind::Drop {
719                place: tcx.mk_place_deref(ptr),
720                target: loop_block,
721                unwind: unwind.into_action(),
722                replace: false,
723            },
724        );
725
726        loop_block
727    }
728
729    fn open_drop_for_array(
730        &mut self,
731        array_ty: Ty<'tcx>,
732        ety: Ty<'tcx>,
733        opt_size: Option<u64>,
734    ) -> BasicBlock {
735        debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size);
736        let tcx = self.tcx();
737
738        if let Some(size) = opt_size {
739            enum ProjectionKind<Path> {
740                Drop(std::ops::Range<u64>),
741                Keep(u64, Path),
742            }
743            // Previously, we'd make a projection for every element in the array and create a drop
744            // ladder if any `array_subpath` was `Some`, i.e. moving out with an array pattern.
745            // This caused huge memory usage when generating the drops for large arrays, so we instead
746            // record the *subslices* which are dropped and the *indexes* which are kept
747            let mut drop_ranges = vec![];
748            let mut dropping = true;
749            let mut start = 0;
750            for i in 0..size {
751                let path = self.elaborator.array_subpath(self.path, i, size);
752                if dropping && path.is_some() {
753                    drop_ranges.push(ProjectionKind::Drop(start..i));
754                    dropping = false;
755                } else if !dropping && path.is_none() {
756                    dropping = true;
757                    start = i;
758                }
759                if let Some(path) = path {
760                    drop_ranges.push(ProjectionKind::Keep(i, path));
761                }
762            }
763            if !drop_ranges.is_empty() {
764                if dropping {
765                    drop_ranges.push(ProjectionKind::Drop(start..size));
766                }
767                let fields = drop_ranges
768                    .iter()
769                    .rev()
770                    .map(|p| {
771                        let (project, path) = match p {
772                            ProjectionKind::Drop(r) => (
773                                ProjectionElem::Subslice {
774                                    from: r.start,
775                                    to: r.end,
776                                    from_end: false,
777                                },
778                                None,
779                            ),
780                            &ProjectionKind::Keep(offset, path) => (
781                                ProjectionElem::ConstantIndex {
782                                    offset,
783                                    min_length: size,
784                                    from_end: false,
785                                },
786                                Some(path),
787                            ),
788                        };
789                        (tcx.mk_place_elem(self.place, project), path)
790                    })
791                    .collect::<Vec<_>>();
792                let (succ, unwind) = self.drop_ladder_bottom();
793                return self.drop_ladder(fields, succ, unwind).0;
794            }
795        }
796
797        let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty);
798        let array_ptr = self.new_temp(array_ptr_ty);
799
800        let slice_ty = Ty::new_slice(tcx, ety);
801        let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty);
802        let slice_ptr = self.new_temp(slice_ptr_ty);
803
804        let mut delegate_block = BasicBlockData {
805            statements: vec![
806                self.assign(Place::from(array_ptr), Rvalue::RawPtr(RawPtrKind::Mut, self.place)),
807                self.assign(
808                    Place::from(slice_ptr),
809                    Rvalue::Cast(
810                        CastKind::PointerCoercion(
811                            PointerCoercion::Unsize,
812                            CoercionSource::Implicit,
813                        ),
814                        Operand::Move(Place::from(array_ptr)),
815                        slice_ptr_ty,
816                    ),
817                ),
818            ],
819            is_cleanup: self.unwind.is_cleanup(),
820            terminator: None,
821        };
822
823        let array_place = mem::replace(
824            &mut self.place,
825            Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx),
826        );
827        let slice_block = self.drop_loop_pair_for_slice(ety);
828        self.place = array_place;
829
830        delegate_block.terminator = Some(Terminator {
831            source_info: self.source_info,
832            kind: TerminatorKind::Goto { target: slice_block },
833        });
834        self.elaborator.patch().new_block(delegate_block)
835    }
836
837    /// Creates a pair of drop-loops of `place`, which drops its contents, even
838    /// in the case of 1 panic.
839    fn drop_loop_pair_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
840        debug!("drop_loop_pair_for_slice({:?})", ety);
841        let tcx = self.tcx();
842        let len = self.new_temp(tcx.types.usize);
843        let cur = self.new_temp(tcx.types.usize);
844
845        let unwind =
846            self.unwind.map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup));
847
848        let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind);
849
850        let [PlaceElem::Deref] = self.place.projection.as_slice() else {
851            span_bug!(
852                self.source_info.span,
853                "Expected place for slice drop shim to be *_n, but it's {:?}",
854                self.place,
855            );
856        };
857
858        let zero = self.constant_usize(0);
859        let block = BasicBlockData {
860            statements: vec![
861                self.assign(
862                    len.into(),
863                    Rvalue::UnaryOp(
864                        UnOp::PtrMetadata,
865                        Operand::Copy(Place::from(self.place.local)),
866                    ),
867                ),
868                self.assign(cur.into(), Rvalue::Use(zero)),
869            ],
870            is_cleanup: unwind.is_cleanup(),
871            terminator: Some(Terminator {
872                source_info: self.source_info,
873                kind: TerminatorKind::Goto { target: loop_block },
874            }),
875        };
876
877        let drop_block = self.elaborator.patch().new_block(block);
878        // FIXME(#34708): handle partially-dropped array/slice elements.
879        let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
880        self.drop_flag_test_block(reset_block, self.succ, unwind)
881    }
882
883    /// The slow-path - create an "open", elaborated drop for a type
884    /// which is moved-out-of only partially, and patch `bb` to a jump
885    /// to it. This must not be called on ADTs with a destructor,
886    /// as these can't be moved-out-of, except for `Box<T>`, which is
887    /// special-cased.
888    ///
889    /// This creates a "drop ladder" that drops the needed fields of the
890    /// ADT, both in the success case or if one of the destructors fail.
891    fn open_drop(&mut self) -> BasicBlock {
892        let ty = self.place_ty(self.place);
893        match ty.kind() {
894            ty::Closure(_, args) => self.open_drop_for_tuple(args.as_closure().upvar_tys()),
895            ty::CoroutineClosure(_, args) => {
896                self.open_drop_for_tuple(args.as_coroutine_closure().upvar_tys())
897            }
898            // Note that `elaborate_drops` only drops the upvars of a coroutine,
899            // and this is ok because `open_drop` here can only be reached
900            // within that own coroutine's resume function.
901            // This should only happen for the self argument on the resume function.
902            // It effectively only contains upvars until the coroutine transformation runs.
903            // See librustc_body/transform/coroutine.rs for more details.
904            ty::Coroutine(_, args) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()),
905            ty::Tuple(fields) => self.open_drop_for_tuple(fields),
906            ty::Adt(def, args) => self.open_drop_for_adt(*def, args),
907            ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind),
908            ty::Array(ety, size) => {
909                let size = size.try_to_target_usize(self.tcx());
910                self.open_drop_for_array(ty, *ety, size)
911            }
912            ty::Slice(ety) => self.drop_loop_pair_for_slice(*ety),
913
914            _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty),
915        }
916    }
917
918    fn complete_drop(&mut self, succ: BasicBlock, unwind: Unwind) -> BasicBlock {
919        debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
920
921        let drop_block = self.drop_block(succ, unwind);
922
923        self.drop_flag_test_block(drop_block, succ, unwind)
924    }
925
926    /// Creates a block that resets the drop flag. If `mode` is deep, all children drop flags will
927    /// also be cleared.
928    fn drop_flag_reset_block(
929        &mut self,
930        mode: DropFlagMode,
931        succ: BasicBlock,
932        unwind: Unwind,
933    ) -> BasicBlock {
934        debug!("drop_flag_reset_block({:?},{:?})", self, mode);
935
936        if unwind.is_cleanup() {
937            // The drop flag isn't read again on the unwind path, so don't
938            // bother setting it.
939            return succ;
940        }
941        let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
942        let block_start = Location { block, statement_index: 0 };
943        self.elaborator.clear_drop_flag(block_start, self.path, mode);
944        block
945    }
946
947    fn elaborated_drop_block(&mut self) -> BasicBlock {
948        debug!("elaborated_drop_block({:?})", self);
949        let blk = self.drop_block(self.succ, self.unwind);
950        self.elaborate_drop(blk);
951        blk
952    }
953
954    fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
955        let block = TerminatorKind::Drop {
956            place: self.place,
957            target,
958            unwind: unwind.into_action(),
959            replace: false,
960        };
961        self.new_block(unwind, block)
962    }
963
964    fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
965        let block = TerminatorKind::Goto { target };
966        self.new_block(unwind, block)
967    }
968
969    /// Returns the block to jump to in order to test the drop flag and execute the drop.
970    ///
971    /// Depending on the required `DropStyle`, this might be a generated block with an `if`
972    /// terminator (for dynamic/open drops), or it might be `on_set` or `on_unset` itself, in case
973    /// the drop can be statically determined.
974    fn drop_flag_test_block(
975        &mut self,
976        on_set: BasicBlock,
977        on_unset: BasicBlock,
978        unwind: Unwind,
979    ) -> BasicBlock {
980        let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
981        debug!(
982            "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
983            self, on_set, on_unset, unwind, style
984        );
985
986        match style {
987            DropStyle::Dead => on_unset,
988            DropStyle::Static => on_set,
989            DropStyle::Conditional | DropStyle::Open => {
990                let flag = self.elaborator.get_drop_flag(self.path).unwrap();
991                let term = TerminatorKind::if_(flag, on_set, on_unset);
992                self.new_block(unwind, term)
993            }
994        }
995    }
996
997    fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
998        self.elaborator.patch().new_block(BasicBlockData {
999            statements: vec![],
1000            terminator: Some(Terminator { source_info: self.source_info, kind: k }),
1001            is_cleanup: unwind.is_cleanup(),
1002        })
1003    }
1004
1005    fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
1006        self.elaborator.patch().new_temp(ty, self.source_info.span)
1007    }
1008
1009    fn constant_usize(&self, val: u16) -> Operand<'tcx> {
1010        Operand::Constant(Box::new(ConstOperand {
1011            span: self.source_info.span,
1012            user_ty: None,
1013            const_: Const::from_usize(self.tcx(), val.into()),
1014        }))
1015    }
1016
1017    fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
1018        Statement {
1019            source_info: self.source_info,
1020            kind: StatementKind::Assign(Box::new((lhs, rhs))),
1021        }
1022    }
1023}