rustc_mir_transform/
elaborate_drop.rs

1use std::{fmt, iter, mem};
2
3use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx};
4use rustc_hir::lang_items::LangItem;
5use rustc_index::Idx;
6use rustc_middle::mir::*;
7use rustc_middle::span_bug;
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::util::IntTypeExt;
10use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt};
11use rustc_span::DUMMY_SP;
12use rustc_span::source_map::Spanned;
13use tracing::{debug, instrument};
14
15use crate::patch::MirPatch;
16
17/// Describes how/if a value should be dropped.
18#[derive(Debug)]
19pub(crate) enum DropStyle {
20    /// The value is already dead at the drop location, no drop will be executed.
21    Dead,
22
23    /// The value is known to always be initialized at the drop location, drop will always be
24    /// executed.
25    Static,
26
27    /// Whether the value needs to be dropped depends on its drop flag.
28    Conditional,
29
30    /// An "open" drop is one where only the fields of a value are dropped.
31    ///
32    /// For example, this happens when moving out of a struct field: The rest of the struct will be
33    /// dropped in such an "open" drop. It is also used to generate drop glue for the individual
34    /// components of a value, for example for dropping array elements.
35    Open,
36}
37
38/// Which drop flags to affect/check with an operation.
39#[derive(Debug)]
40pub(crate) enum DropFlagMode {
41    /// Only affect the top-level drop flag, not that of any contained fields.
42    Shallow,
43    /// Affect all nested drop flags in addition to the top-level one.
44    Deep,
45}
46
47/// Describes if unwinding is necessary and where to unwind to if a panic occurs.
48#[derive(Copy, Clone, Debug)]
49pub(crate) enum Unwind {
50    /// Unwind to this block.
51    To(BasicBlock),
52    /// Already in an unwind path, any panic will cause an abort.
53    InCleanup,
54}
55
56impl Unwind {
57    fn is_cleanup(self) -> bool {
58        match self {
59            Unwind::To(..) => false,
60            Unwind::InCleanup => true,
61        }
62    }
63
64    fn into_action(self) -> UnwindAction {
65        match self {
66            Unwind::To(bb) => UnwindAction::Cleanup(bb),
67            Unwind::InCleanup => UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
68        }
69    }
70
71    fn map<F>(self, f: F) -> Self
72    where
73        F: FnOnce(BasicBlock) -> BasicBlock,
74    {
75        match self {
76            Unwind::To(bb) => Unwind::To(f(bb)),
77            Unwind::InCleanup => Unwind::InCleanup,
78        }
79    }
80}
81
82pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug {
83    /// The type representing paths that can be moved out of.
84    ///
85    /// Users can move out of individual fields of a struct, such as `a.b.c`. This type is used to
86    /// represent such move paths. Sometimes tracking individual move paths is not necessary, in
87    /// which case this may be set to (for example) `()`.
88    type Path: Copy + fmt::Debug;
89
90    // Accessors
91
92    fn patch_ref(&self) -> &MirPatch<'tcx>;
93    fn patch(&mut self) -> &mut MirPatch<'tcx>;
94    fn body(&self) -> &'a Body<'tcx>;
95    fn tcx(&self) -> TyCtxt<'tcx>;
96    fn typing_env(&self) -> ty::TypingEnv<'tcx>;
97
98    // Drop logic
99
100    /// Returns how `path` should be dropped, given `mode`.
101    fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
102
103    /// Returns the drop flag of `path` as a MIR `Operand` (or `None` if `path` has no drop flag).
104    fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
105
106    /// Modifies the MIR patch so that the drop flag of `path` (if any) is cleared at `location`.
107    ///
108    /// If `mode` is deep, drop flags of all child paths should also be cleared by inserting
109    /// additional statements.
110    fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
111
112    // Subpaths
113
114    /// Returns the subpath of a field of `path` (or `None` if there is no dedicated subpath).
115    ///
116    /// If this returns `None`, `field` will not get a dedicated drop flag.
117    fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path>;
118
119    /// Returns the subpath of a dereference of `path` (or `None` if there is no dedicated subpath).
120    ///
121    /// If this returns `None`, `*path` will not get a dedicated drop flag.
122    ///
123    /// This is only relevant for `Box<T>`, where the contained `T` can be moved out of the box.
124    fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
125
126    /// Returns the subpath of downcasting `path` to one of its variants.
127    ///
128    /// If this returns `None`, the downcast of `path` will not get a dedicated drop flag.
129    fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
130
131    /// Returns the subpath of indexing a fixed-size array `path`.
132    ///
133    /// If this returns `None`, elements of `path` will not get a dedicated drop flag.
134    ///
135    /// This is only relevant for array patterns, which can move out of individual array elements.
136    fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
137}
138
139#[derive(Debug)]
140struct DropCtxt<'a, 'b, 'tcx, D>
141where
142    D: DropElaborator<'b, 'tcx>,
143{
144    elaborator: &'a mut D,
145
146    source_info: SourceInfo,
147
148    place: Place<'tcx>,
149    path: D::Path,
150    succ: BasicBlock,
151    unwind: Unwind,
152}
153
154/// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it.
155///
156/// The passed `elaborator` is used to determine what should happen at the drop terminator. It
157/// decides whether the drop can be statically determined or whether it needs a dynamic drop flag,
158/// and whether the drop is "open", ie. should be expanded to drop all subfields of the dropped
159/// value.
160///
161/// When this returns, the MIR patch in the `elaborator` contains the necessary changes.
162pub(crate) fn elaborate_drop<'b, 'tcx, D>(
163    elaborator: &mut D,
164    source_info: SourceInfo,
165    place: Place<'tcx>,
166    path: D::Path,
167    succ: BasicBlock,
168    unwind: Unwind,
169    bb: BasicBlock,
170) where
171    D: DropElaborator<'b, 'tcx>,
172    'tcx: 'b,
173{
174    DropCtxt { elaborator, source_info, place, path, succ, unwind }.elaborate_drop(bb)
175}
176
177impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D>
178where
179    D: DropElaborator<'b, 'tcx>,
180    'tcx: 'b,
181{
182    #[instrument(level = "trace", skip(self), ret)]
183    fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
184        if place.local < self.elaborator.body().local_decls.next_index() {
185            place.ty(self.elaborator.body(), self.tcx()).ty
186        } else {
187            // We don't have a slice with all the locals, since some are in the patch.
188            PlaceTy::from_ty(self.elaborator.patch_ref().local_ty(place.local))
189                .multi_projection_ty(self.elaborator.tcx(), place.projection)
190                .ty
191        }
192    }
193
194    fn tcx(&self) -> TyCtxt<'tcx> {
195        self.elaborator.tcx()
196    }
197
198    /// This elaborates a single drop instruction, located at `bb`, and
199    /// patches over it.
200    ///
201    /// The elaborated drop checks the drop flags to only drop what
202    /// is initialized.
203    ///
204    /// In addition, the relevant drop flags also need to be cleared
205    /// to avoid double-drops. However, in the middle of a complex
206    /// drop, one must avoid clearing some of the flags before they
207    /// are read, as that would cause a memory leak.
208    ///
209    /// In particular, when dropping an ADT, multiple fields may be
210    /// joined together under the `rest` subpath. They are all controlled
211    /// by the primary drop flag, but only the last rest-field dropped
212    /// should clear it (and it must also not clear anything else).
213    //
214    // FIXME: I think we should just control the flags externally,
215    // and then we do not need this machinery.
216    #[instrument(level = "debug")]
217    fn elaborate_drop(&mut self, bb: BasicBlock) {
218        match self.elaborator.drop_style(self.path, DropFlagMode::Deep) {
219            DropStyle::Dead => {
220                self.elaborator
221                    .patch()
222                    .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
223            }
224            DropStyle::Static => {
225                self.elaborator.patch().patch_terminator(
226                    bb,
227                    TerminatorKind::Drop {
228                        place: self.place,
229                        target: self.succ,
230                        unwind: self.unwind.into_action(),
231                        replace: false,
232                    },
233                );
234            }
235            DropStyle::Conditional => {
236                let drop_bb = self.complete_drop(self.succ, self.unwind);
237                self.elaborator
238                    .patch()
239                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
240            }
241            DropStyle::Open => {
242                let drop_bb = self.open_drop();
243                self.elaborator
244                    .patch()
245                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
246            }
247        }
248    }
249
250    /// Returns the place and move path for each field of `variant`,
251    /// (the move path is `None` if the field is a rest field).
252    fn move_paths_for_fields(
253        &self,
254        base_place: Place<'tcx>,
255        variant_path: D::Path,
256        variant: &'tcx ty::VariantDef,
257        args: GenericArgsRef<'tcx>,
258    ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
259        variant
260            .fields
261            .iter()
262            .enumerate()
263            .map(|(i, f)| {
264                let field = FieldIdx::new(i);
265                let subpath = self.elaborator.field_subpath(variant_path, field);
266                let tcx = self.tcx();
267
268                assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis);
269                // The type error for normalization may have been in dropck: see
270                // `compute_drop_data` in rustc_borrowck, in which case we wouldn't have
271                // deleted the MIR body and could have an error here as well.
272                let field_ty = match tcx
273                    .try_normalize_erasing_regions(self.elaborator.typing_env(), f.ty(tcx, args))
274                {
275                    Ok(t) => t,
276                    Err(_) => Ty::new_error(
277                        self.tcx(),
278                        self.elaborator
279                            .body()
280                            .tainted_by_errors
281                            .expect("Error in drop elaboration not found by dropck."),
282                    ),
283                };
284
285                (tcx.mk_place_field(base_place, field, field_ty), subpath)
286            })
287            .collect()
288    }
289
290    fn drop_subpath(
291        &mut self,
292        place: Place<'tcx>,
293        path: Option<D::Path>,
294        succ: BasicBlock,
295        unwind: Unwind,
296    ) -> BasicBlock {
297        if let Some(path) = path {
298            debug!("drop_subpath: for std field {:?}", place);
299
300            DropCtxt {
301                elaborator: self.elaborator,
302                source_info: self.source_info,
303                path,
304                place,
305                succ,
306                unwind,
307            }
308            .elaborated_drop_block()
309        } else {
310            debug!("drop_subpath: for rest field {:?}", place);
311
312            DropCtxt {
313                elaborator: self.elaborator,
314                source_info: self.source_info,
315                place,
316                succ,
317                unwind,
318                // Using `self.path` here to condition the drop on
319                // our own drop flag.
320                path: self.path,
321            }
322            .complete_drop(succ, unwind)
323        }
324    }
325
326    /// Creates one-half of the drop ladder for a list of fields, and return
327    /// the list of steps in it in reverse order, with the first step
328    /// dropping 0 fields and so on.
329    ///
330    /// `unwind_ladder` is such a list of steps in reverse order,
331    /// which is called if the matching step of the drop glue panics.
332    fn drop_halfladder(
333        &mut self,
334        unwind_ladder: &[Unwind],
335        mut succ: BasicBlock,
336        fields: &[(Place<'tcx>, Option<D::Path>)],
337    ) -> Vec<BasicBlock> {
338        iter::once(succ)
339            .chain(fields.iter().rev().zip(unwind_ladder).map(|(&(place, path), &unwind_succ)| {
340                succ = self.drop_subpath(place, path, succ, unwind_succ);
341                succ
342            }))
343            .collect()
344    }
345
346    fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
347        // Clear the "master" drop flag at the end. This is needed
348        // because the "master" drop protects the ADT's discriminant,
349        // which is invalidated after the ADT is dropped.
350        (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind)
351    }
352
353    /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
354    ///
355    /// For example, with 3 fields, the drop ladder is
356    ///
357    /// .d0:
358    ///     ELAB(drop location.0 [target=.d1, unwind=.c1])
359    /// .d1:
360    ///     ELAB(drop location.1 [target=.d2, unwind=.c2])
361    /// .d2:
362    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
363    /// .c1:
364    ///     ELAB(drop location.1 [target=.c2])
365    /// .c2:
366    ///     ELAB(drop location.2 [target=`self.unwind`])
367    ///
368    /// NOTE: this does not clear the master drop flag, so you need
369    /// to point succ/unwind on a `drop_ladder_bottom`.
370    fn drop_ladder(
371        &mut self,
372        fields: Vec<(Place<'tcx>, Option<D::Path>)>,
373        succ: BasicBlock,
374        unwind: Unwind,
375    ) -> (BasicBlock, Unwind) {
376        debug!("drop_ladder({:?}, {:?})", self, fields);
377
378        let mut fields = fields;
379        fields.retain(|&(place, _)| {
380            self.place_ty(place).needs_drop(self.tcx(), self.elaborator.typing_env())
381        });
382
383        debug!("drop_ladder - fields needing drop: {:?}", fields);
384
385        let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
386        let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
387            let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
388            halfladder.into_iter().map(Unwind::To).collect()
389        } else {
390            unwind_ladder
391        };
392
393        let normal_ladder = self.drop_halfladder(&unwind_ladder, succ, &fields);
394
395        (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
396    }
397
398    fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
399        debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
400
401        let fields = tys
402            .iter()
403            .enumerate()
404            .map(|(i, &ty)| {
405                (
406                    self.tcx().mk_place_field(self.place, FieldIdx::new(i), ty),
407                    self.elaborator.field_subpath(self.path, FieldIdx::new(i)),
408                )
409            })
410            .collect();
411
412        let (succ, unwind) = self.drop_ladder_bottom();
413        self.drop_ladder(fields, succ, unwind).0
414    }
415
416    /// Drops the T contained in a `Box<T>` if it has not been moved out of
417    #[instrument(level = "debug", ret)]
418    fn open_drop_for_box_contents(
419        &mut self,
420        adt: ty::AdtDef<'tcx>,
421        args: GenericArgsRef<'tcx>,
422        succ: BasicBlock,
423        unwind: Unwind,
424    ) -> BasicBlock {
425        // drop glue is sent straight to codegen
426        // box cannot be directly dereferenced
427        let unique_ty = adt.non_enum_variant().fields[FieldIdx::ZERO].ty(self.tcx(), args);
428        let unique_variant = unique_ty.ty_adt_def().unwrap().non_enum_variant();
429        let nonnull_ty = unique_variant.fields[FieldIdx::ZERO].ty(self.tcx(), args);
430        let ptr_ty = Ty::new_imm_ptr(self.tcx(), args[0].expect_ty());
431
432        let unique_place = self.tcx().mk_place_field(self.place, FieldIdx::ZERO, unique_ty);
433        let nonnull_place = self.tcx().mk_place_field(unique_place, FieldIdx::ZERO, nonnull_ty);
434
435        let ptr_local = self.new_temp(ptr_ty);
436
437        let interior = self.tcx().mk_place_deref(Place::from(ptr_local));
438        let interior_path = self.elaborator.deref_subpath(self.path);
439
440        let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind);
441
442        let setup_bbd = BasicBlockData {
443            statements: vec![self.assign(
444                Place::from(ptr_local),
445                Rvalue::Cast(CastKind::Transmute, Operand::Copy(nonnull_place), ptr_ty),
446            )],
447            terminator: Some(Terminator {
448                kind: TerminatorKind::Goto { target: do_drop_bb },
449                source_info: self.source_info,
450            }),
451            is_cleanup: unwind.is_cleanup(),
452        };
453        self.elaborator.patch().new_block(setup_bbd)
454    }
455
456    #[instrument(level = "debug", ret)]
457    fn open_drop_for_adt(
458        &mut self,
459        adt: ty::AdtDef<'tcx>,
460        args: GenericArgsRef<'tcx>,
461    ) -> BasicBlock {
462        if adt.variants().is_empty() {
463            return self.elaborator.patch().new_block(BasicBlockData {
464                statements: vec![],
465                terminator: Some(Terminator {
466                    source_info: self.source_info,
467                    kind: TerminatorKind::Unreachable,
468                }),
469                is_cleanup: self.unwind.is_cleanup(),
470            });
471        }
472
473        let skip_contents = adt.is_union() || adt.is_manually_drop();
474        let contents_drop = if skip_contents {
475            (self.succ, self.unwind)
476        } else {
477            self.open_drop_for_adt_contents(adt, args)
478        };
479
480        if adt.is_box() {
481            // we need to drop the inside of the box before running the destructor
482            let succ = self.destructor_call_block(contents_drop);
483            let unwind = contents_drop
484                .1
485                .map(|unwind| self.destructor_call_block((unwind, Unwind::InCleanup)));
486
487            self.open_drop_for_box_contents(adt, args, succ, unwind)
488        } else if adt.has_dtor(self.tcx()) {
489            self.destructor_call_block(contents_drop)
490        } else {
491            contents_drop.0
492        }
493    }
494
495    fn open_drop_for_adt_contents(
496        &mut self,
497        adt: ty::AdtDef<'tcx>,
498        args: GenericArgsRef<'tcx>,
499    ) -> (BasicBlock, Unwind) {
500        let (succ, unwind) = self.drop_ladder_bottom();
501        if !adt.is_enum() {
502            let fields =
503                self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args);
504            self.drop_ladder(fields, succ, unwind)
505        } else {
506            self.open_drop_for_multivariant(adt, args, succ, unwind)
507        }
508    }
509
510    fn open_drop_for_multivariant(
511        &mut self,
512        adt: ty::AdtDef<'tcx>,
513        args: GenericArgsRef<'tcx>,
514        succ: BasicBlock,
515        unwind: Unwind,
516    ) -> (BasicBlock, Unwind) {
517        let mut values = Vec::with_capacity(adt.variants().len());
518        let mut normal_blocks = Vec::with_capacity(adt.variants().len());
519        let mut unwind_blocks =
520            if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
521
522        let mut have_otherwise_with_drop_glue = false;
523        let mut have_otherwise = false;
524        let tcx = self.tcx();
525
526        for (variant_index, discr) in adt.discriminants(tcx) {
527            let variant = &adt.variant(variant_index);
528            let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
529
530            if let Some(variant_path) = subpath {
531                let base_place = tcx.mk_place_elem(
532                    self.place,
533                    ProjectionElem::Downcast(Some(variant.name), variant_index),
534                );
535                let fields = self.move_paths_for_fields(base_place, variant_path, variant, args);
536                values.push(discr.val);
537                if let Unwind::To(unwind) = unwind {
538                    // We can't use the half-ladder from the original
539                    // drop ladder, because this breaks the
540                    // "funclet can't have 2 successor funclets"
541                    // requirement from MSVC:
542                    //
543                    //           switch       unwind-switch
544                    //          /      \         /        \
545                    //         v1.0    v2.0  v2.0-unwind  v1.0-unwind
546                    //         |        |      /             |
547                    //    v1.1-unwind  v2.1-unwind           |
548                    //      ^                                |
549                    //       \-------------------------------/
550                    //
551                    // Create a duplicate half-ladder to avoid that. We
552                    // could technically only do this on MSVC, but I
553                    // I want to minimize the divergence between MSVC
554                    // and non-MSVC.
555
556                    let unwind_blocks = unwind_blocks.as_mut().unwrap();
557                    let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
558                    let halfladder = self.drop_halfladder(&unwind_ladder, unwind, &fields);
559                    unwind_blocks.push(halfladder.last().cloned().unwrap());
560                }
561                let (normal, _) = self.drop_ladder(fields, succ, unwind);
562                normal_blocks.push(normal);
563            } else {
564                have_otherwise = true;
565
566                let typing_env = self.elaborator.typing_env();
567                let have_field_with_drop_glue = variant
568                    .fields
569                    .iter()
570                    .any(|field| field.ty(tcx, args).needs_drop(tcx, typing_env));
571                if have_field_with_drop_glue {
572                    have_otherwise_with_drop_glue = true;
573                }
574            }
575        }
576
577        if !have_otherwise {
578            values.pop();
579        } else if !have_otherwise_with_drop_glue {
580            normal_blocks.push(self.goto_block(succ, unwind));
581            if let Unwind::To(unwind) = unwind {
582                unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
583            }
584        } else {
585            normal_blocks.push(self.drop_block(succ, unwind));
586            if let Unwind::To(unwind) = unwind {
587                unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
588            }
589        }
590
591        (
592            self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
593            unwind.map(|unwind| {
594                self.adt_switch_block(
595                    adt,
596                    unwind_blocks.unwrap(),
597                    &values,
598                    unwind,
599                    Unwind::InCleanup,
600                )
601            }),
602        )
603    }
604
605    fn adt_switch_block(
606        &mut self,
607        adt: ty::AdtDef<'tcx>,
608        blocks: Vec<BasicBlock>,
609        values: &[u128],
610        succ: BasicBlock,
611        unwind: Unwind,
612    ) -> BasicBlock {
613        // If there are multiple variants, then if something
614        // is present within the enum the discriminant, tracked
615        // by the rest path, must be initialized.
616        //
617        // Additionally, we do not want to switch on the
618        // discriminant after it is free-ed, because that
619        // way lies only trouble.
620        let discr_ty = adt.repr().discr_type().to_ty(self.tcx());
621        let discr = Place::from(self.new_temp(discr_ty));
622        let discr_rv = Rvalue::Discriminant(self.place);
623        let switch_block = BasicBlockData {
624            statements: vec![self.assign(discr, discr_rv)],
625            terminator: Some(Terminator {
626                source_info: self.source_info,
627                kind: TerminatorKind::SwitchInt {
628                    discr: Operand::Move(discr),
629                    targets: SwitchTargets::new(
630                        values.iter().copied().zip(blocks.iter().copied()),
631                        *blocks.last().unwrap(),
632                    ),
633                },
634            }),
635            is_cleanup: unwind.is_cleanup(),
636        };
637        let switch_block = self.elaborator.patch().new_block(switch_block);
638        self.drop_flag_test_block(switch_block, succ, unwind)
639    }
640
641    fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
642        debug!("destructor_call_block({:?}, {:?})", self, succ);
643        let tcx = self.tcx();
644        let drop_trait = tcx.require_lang_item(LangItem::Drop, None);
645        let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
646        let ty = self.place_ty(self.place);
647
648        let ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
649        let ref_place = self.new_temp(ref_ty);
650        let unit_temp = Place::from(self.new_temp(tcx.types.unit));
651
652        let result = BasicBlockData {
653            statements: vec![self.assign(
654                Place::from(ref_place),
655                Rvalue::Ref(
656                    tcx.lifetimes.re_erased,
657                    BorrowKind::Mut { kind: MutBorrowKind::Default },
658                    self.place,
659                ),
660            )],
661            terminator: Some(Terminator {
662                kind: TerminatorKind::Call {
663                    func: Operand::function_handle(
664                        tcx,
665                        drop_fn,
666                        [ty.into()],
667                        self.source_info.span,
668                    ),
669                    args: [Spanned { node: Operand::Move(Place::from(ref_place)), span: DUMMY_SP }]
670                        .into(),
671                    destination: unit_temp,
672                    target: Some(succ),
673                    unwind: unwind.into_action(),
674                    call_source: CallSource::Misc,
675                    fn_span: self.source_info.span,
676                },
677                source_info: self.source_info,
678            }),
679            is_cleanup: unwind.is_cleanup(),
680        };
681
682        let destructor_block = self.elaborator.patch().new_block(result);
683
684        let block_start = Location { block: destructor_block, statement_index: 0 };
685        self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
686
687        self.drop_flag_test_block(destructor_block, succ, unwind)
688    }
689
690    /// Create a loop that drops an array:
691    ///
692    /// ```text
693    /// loop-block:
694    ///    can_go = cur == len
695    ///    if can_go then succ else drop-block
696    /// drop-block:
697    ///    ptr = &raw mut P[cur]
698    ///    cur = cur + 1
699    ///    drop(ptr)
700    /// ```
701    fn drop_loop(
702        &mut self,
703        succ: BasicBlock,
704        cur: Local,
705        len: Local,
706        ety: Ty<'tcx>,
707        unwind: Unwind,
708    ) -> BasicBlock {
709        let copy = |place: Place<'tcx>| Operand::Copy(place);
710        let move_ = |place: Place<'tcx>| Operand::Move(place);
711        let tcx = self.tcx();
712
713        let ptr_ty = Ty::new_mut_ptr(tcx, ety);
714        let ptr = Place::from(self.new_temp(ptr_ty));
715        let can_go = Place::from(self.new_temp(tcx.types.bool));
716        let one = self.constant_usize(1);
717
718        let drop_block = BasicBlockData {
719            statements: vec![
720                self.assign(
721                    ptr,
722                    Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_index(self.place, cur)),
723                ),
724                self.assign(
725                    cur.into(),
726                    Rvalue::BinaryOp(BinOp::Add, Box::new((move_(cur.into()), one))),
727                ),
728            ],
729            is_cleanup: unwind.is_cleanup(),
730            terminator: Some(Terminator {
731                source_info: self.source_info,
732                // this gets overwritten by drop elaboration.
733                kind: TerminatorKind::Unreachable,
734            }),
735        };
736        let drop_block = self.elaborator.patch().new_block(drop_block);
737
738        let loop_block = BasicBlockData {
739            statements: vec![self.assign(
740                can_go,
741                Rvalue::BinaryOp(BinOp::Eq, Box::new((copy(Place::from(cur)), copy(len.into())))),
742            )],
743            is_cleanup: unwind.is_cleanup(),
744            terminator: Some(Terminator {
745                source_info: self.source_info,
746                kind: TerminatorKind::if_(move_(can_go), succ, drop_block),
747            }),
748        };
749        let loop_block = self.elaborator.patch().new_block(loop_block);
750
751        self.elaborator.patch().patch_terminator(
752            drop_block,
753            TerminatorKind::Drop {
754                place: tcx.mk_place_deref(ptr),
755                target: loop_block,
756                unwind: unwind.into_action(),
757                replace: false,
758            },
759        );
760
761        loop_block
762    }
763
764    fn open_drop_for_array(
765        &mut self,
766        array_ty: Ty<'tcx>,
767        ety: Ty<'tcx>,
768        opt_size: Option<u64>,
769    ) -> BasicBlock {
770        debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size);
771        let tcx = self.tcx();
772
773        if let Some(size) = opt_size {
774            enum ProjectionKind<Path> {
775                Drop(std::ops::Range<u64>),
776                Keep(u64, Path),
777            }
778            // Previously, we'd make a projection for every element in the array and create a drop
779            // ladder if any `array_subpath` was `Some`, i.e. moving out with an array pattern.
780            // This caused huge memory usage when generating the drops for large arrays, so we instead
781            // record the *subslices* which are dropped and the *indexes* which are kept
782            let mut drop_ranges = vec![];
783            let mut dropping = true;
784            let mut start = 0;
785            for i in 0..size {
786                let path = self.elaborator.array_subpath(self.path, i, size);
787                if dropping && path.is_some() {
788                    drop_ranges.push(ProjectionKind::Drop(start..i));
789                    dropping = false;
790                } else if !dropping && path.is_none() {
791                    dropping = true;
792                    start = i;
793                }
794                if let Some(path) = path {
795                    drop_ranges.push(ProjectionKind::Keep(i, path));
796                }
797            }
798            if !drop_ranges.is_empty() {
799                if dropping {
800                    drop_ranges.push(ProjectionKind::Drop(start..size));
801                }
802                let fields = drop_ranges
803                    .iter()
804                    .rev()
805                    .map(|p| {
806                        let (project, path) = match p {
807                            ProjectionKind::Drop(r) => (
808                                ProjectionElem::Subslice {
809                                    from: r.start,
810                                    to: r.end,
811                                    from_end: false,
812                                },
813                                None,
814                            ),
815                            &ProjectionKind::Keep(offset, path) => (
816                                ProjectionElem::ConstantIndex {
817                                    offset,
818                                    min_length: size,
819                                    from_end: false,
820                                },
821                                Some(path),
822                            ),
823                        };
824                        (tcx.mk_place_elem(self.place, project), path)
825                    })
826                    .collect::<Vec<_>>();
827                let (succ, unwind) = self.drop_ladder_bottom();
828                return self.drop_ladder(fields, succ, unwind).0;
829            }
830        }
831
832        let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty);
833        let array_ptr = self.new_temp(array_ptr_ty);
834
835        let slice_ty = Ty::new_slice(tcx, ety);
836        let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty);
837        let slice_ptr = self.new_temp(slice_ptr_ty);
838
839        let mut delegate_block = BasicBlockData {
840            statements: vec![
841                self.assign(Place::from(array_ptr), Rvalue::RawPtr(RawPtrKind::Mut, self.place)),
842                self.assign(
843                    Place::from(slice_ptr),
844                    Rvalue::Cast(
845                        CastKind::PointerCoercion(
846                            PointerCoercion::Unsize,
847                            CoercionSource::Implicit,
848                        ),
849                        Operand::Move(Place::from(array_ptr)),
850                        slice_ptr_ty,
851                    ),
852                ),
853            ],
854            is_cleanup: self.unwind.is_cleanup(),
855            terminator: None,
856        };
857
858        let array_place = mem::replace(
859            &mut self.place,
860            Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx),
861        );
862        let slice_block = self.drop_loop_pair_for_slice(ety);
863        self.place = array_place;
864
865        delegate_block.terminator = Some(Terminator {
866            source_info: self.source_info,
867            kind: TerminatorKind::Goto { target: slice_block },
868        });
869        self.elaborator.patch().new_block(delegate_block)
870    }
871
872    /// Creates a pair of drop-loops of `place`, which drops its contents, even
873    /// in the case of 1 panic.
874    fn drop_loop_pair_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
875        debug!("drop_loop_pair_for_slice({:?})", ety);
876        let tcx = self.tcx();
877        let len = self.new_temp(tcx.types.usize);
878        let cur = self.new_temp(tcx.types.usize);
879
880        let unwind =
881            self.unwind.map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup));
882
883        let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind);
884
885        let [PlaceElem::Deref] = self.place.projection.as_slice() else {
886            span_bug!(
887                self.source_info.span,
888                "Expected place for slice drop shim to be *_n, but it's {:?}",
889                self.place,
890            );
891        };
892
893        let zero = self.constant_usize(0);
894        let block = BasicBlockData {
895            statements: vec![
896                self.assign(
897                    len.into(),
898                    Rvalue::UnaryOp(
899                        UnOp::PtrMetadata,
900                        Operand::Copy(Place::from(self.place.local)),
901                    ),
902                ),
903                self.assign(cur.into(), Rvalue::Use(zero)),
904            ],
905            is_cleanup: unwind.is_cleanup(),
906            terminator: Some(Terminator {
907                source_info: self.source_info,
908                kind: TerminatorKind::Goto { target: loop_block },
909            }),
910        };
911
912        let drop_block = self.elaborator.patch().new_block(block);
913        // FIXME(#34708): handle partially-dropped array/slice elements.
914        let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
915        self.drop_flag_test_block(reset_block, self.succ, unwind)
916    }
917
918    /// The slow-path - create an "open", elaborated drop for a type
919    /// which is moved-out-of only partially, and patch `bb` to a jump
920    /// to it. This must not be called on ADTs with a destructor,
921    /// as these can't be moved-out-of, except for `Box<T>`, which is
922    /// special-cased.
923    ///
924    /// This creates a "drop ladder" that drops the needed fields of the
925    /// ADT, both in the success case or if one of the destructors fail.
926    fn open_drop(&mut self) -> BasicBlock {
927        let ty = self.place_ty(self.place);
928        match ty.kind() {
929            ty::Closure(_, args) => self.open_drop_for_tuple(args.as_closure().upvar_tys()),
930            ty::CoroutineClosure(_, args) => {
931                self.open_drop_for_tuple(args.as_coroutine_closure().upvar_tys())
932            }
933            // Note that `elaborate_drops` only drops the upvars of a coroutine,
934            // and this is ok because `open_drop` here can only be reached
935            // within that own coroutine's resume function.
936            // This should only happen for the self argument on the resume function.
937            // It effectively only contains upvars until the coroutine transformation runs.
938            // See librustc_body/transform/coroutine.rs for more details.
939            ty::Coroutine(_, args) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()),
940            ty::Tuple(fields) => self.open_drop_for_tuple(fields),
941            ty::Adt(def, args) => self.open_drop_for_adt(*def, args),
942            ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind),
943            ty::Array(ety, size) => {
944                let size = size.try_to_target_usize(self.tcx());
945                self.open_drop_for_array(ty, *ety, size)
946            }
947            ty::Slice(ety) => self.drop_loop_pair_for_slice(*ety),
948
949            _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty),
950        }
951    }
952
953    fn complete_drop(&mut self, succ: BasicBlock, unwind: Unwind) -> BasicBlock {
954        debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
955
956        let drop_block = self.drop_block(succ, unwind);
957
958        self.drop_flag_test_block(drop_block, succ, unwind)
959    }
960
961    /// Creates a block that resets the drop flag. If `mode` is deep, all children drop flags will
962    /// also be cleared.
963    fn drop_flag_reset_block(
964        &mut self,
965        mode: DropFlagMode,
966        succ: BasicBlock,
967        unwind: Unwind,
968    ) -> BasicBlock {
969        debug!("drop_flag_reset_block({:?},{:?})", self, mode);
970
971        if unwind.is_cleanup() {
972            // The drop flag isn't read again on the unwind path, so don't
973            // bother setting it.
974            return succ;
975        }
976        let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
977        let block_start = Location { block, statement_index: 0 };
978        self.elaborator.clear_drop_flag(block_start, self.path, mode);
979        block
980    }
981
982    fn elaborated_drop_block(&mut self) -> BasicBlock {
983        debug!("elaborated_drop_block({:?})", self);
984        let blk = self.drop_block(self.succ, self.unwind);
985        self.elaborate_drop(blk);
986        blk
987    }
988
989    fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
990        let block = TerminatorKind::Drop {
991            place: self.place,
992            target,
993            unwind: unwind.into_action(),
994            replace: false,
995        };
996        self.new_block(unwind, block)
997    }
998
999    fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1000        let block = TerminatorKind::Goto { target };
1001        self.new_block(unwind, block)
1002    }
1003
1004    /// Returns the block to jump to in order to test the drop flag and execute the drop.
1005    ///
1006    /// Depending on the required `DropStyle`, this might be a generated block with an `if`
1007    /// terminator (for dynamic/open drops), or it might be `on_set` or `on_unset` itself, in case
1008    /// the drop can be statically determined.
1009    fn drop_flag_test_block(
1010        &mut self,
1011        on_set: BasicBlock,
1012        on_unset: BasicBlock,
1013        unwind: Unwind,
1014    ) -> BasicBlock {
1015        let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
1016        debug!(
1017            "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
1018            self, on_set, on_unset, unwind, style
1019        );
1020
1021        match style {
1022            DropStyle::Dead => on_unset,
1023            DropStyle::Static => on_set,
1024            DropStyle::Conditional | DropStyle::Open => {
1025                let flag = self.elaborator.get_drop_flag(self.path).unwrap();
1026                let term = TerminatorKind::if_(flag, on_set, on_unset);
1027                self.new_block(unwind, term)
1028            }
1029        }
1030    }
1031
1032    fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
1033        self.elaborator.patch().new_block(BasicBlockData {
1034            statements: vec![],
1035            terminator: Some(Terminator { source_info: self.source_info, kind: k }),
1036            is_cleanup: unwind.is_cleanup(),
1037        })
1038    }
1039
1040    fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
1041        self.elaborator.patch().new_temp(ty, self.source_info.span)
1042    }
1043
1044    fn constant_usize(&self, val: u16) -> Operand<'tcx> {
1045        Operand::Constant(Box::new(ConstOperand {
1046            span: self.source_info.span,
1047            user_ty: None,
1048            const_: Const::from_usize(self.tcx(), val.into()),
1049        }))
1050    }
1051
1052    fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
1053        Statement {
1054            source_info: self.source_info,
1055            kind: StatementKind::Assign(Box::new((lhs, rhs))),
1056        }
1057    }
1058}