1use std::{fmt, iter, mem};
2
3use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx};
4use rustc_hir::def::DefKind;
5use rustc_hir::lang_items::LangItem;
6use rustc_index::Idx;
7use rustc_middle::mir::*;
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::util::IntTypeExt;
10use rustc_middle::ty::{self, GenericArg, GenericArgsRef, Ty, TyCtxt};
11use rustc_middle::{bug, span_bug, traits};
12use rustc_span::DUMMY_SP;
13use rustc_span::source_map::{Spanned, dummy_spanned};
14use tracing::{debug, instrument};
15
16use crate::patch::MirPatch;
17
18#[derive(Debug)]
20pub(crate) enum DropStyle {
21 Dead,
23
24 Static,
27
28 Conditional,
30
31 Open,
37}
38
39#[derive(Debug)]
41pub(crate) enum DropFlagMode {
42 Shallow,
44 Deep,
46}
47
48#[derive(Copy, Clone, Debug)]
50pub(crate) enum Unwind {
51 To(BasicBlock),
53 InCleanup,
55}
56
57impl Unwind {
58 fn is_cleanup(self) -> bool {
59 match self {
60 Unwind::To(..) => false,
61 Unwind::InCleanup => true,
62 }
63 }
64
65 fn into_action(self) -> UnwindAction {
66 match self {
67 Unwind::To(bb) => UnwindAction::Cleanup(bb),
68 Unwind::InCleanup => UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
69 }
70 }
71
72 fn map<F>(self, f: F) -> Self
73 where
74 F: FnOnce(BasicBlock) -> BasicBlock,
75 {
76 match self {
77 Unwind::To(bb) => Unwind::To(f(bb)),
78 Unwind::InCleanup => Unwind::InCleanup,
79 }
80 }
81}
82
83pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug {
84 type Path: Copy + fmt::Debug;
90
91 fn patch_ref(&self) -> &MirPatch<'tcx>;
94 fn patch(&mut self) -> &mut MirPatch<'tcx>;
95 fn body(&self) -> &'a Body<'tcx>;
96 fn tcx(&self) -> TyCtxt<'tcx>;
97 fn typing_env(&self) -> ty::TypingEnv<'tcx>;
98 fn allow_async_drops(&self) -> bool;
99
100 fn terminator_loc(&self, bb: BasicBlock) -> Location;
101
102 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
106
107 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
109
110 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
115
116 fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path>;
122
123 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
129
130 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
134
135 fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
141}
142
143#[derive(Debug)]
144struct DropCtxt<'a, 'b, 'tcx, D>
145where
146 D: DropElaborator<'b, 'tcx>,
147{
148 elaborator: &'a mut D,
149
150 source_info: SourceInfo,
151
152 place: Place<'tcx>,
153 path: D::Path,
154 succ: BasicBlock,
155 unwind: Unwind,
156 dropline: Option<BasicBlock>,
157}
158
159pub(crate) fn elaborate_drop<'b, 'tcx, D>(
168 elaborator: &mut D,
169 source_info: SourceInfo,
170 place: Place<'tcx>,
171 path: D::Path,
172 succ: BasicBlock,
173 unwind: Unwind,
174 bb: BasicBlock,
175 dropline: Option<BasicBlock>,
176) where
177 D: DropElaborator<'b, 'tcx>,
178 'tcx: 'b,
179{
180 DropCtxt { elaborator, source_info, place, path, succ, unwind, dropline }.elaborate_drop(bb)
181}
182
183impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D>
184where
185 D: DropElaborator<'b, 'tcx>,
186 'tcx: 'b,
187{
188 #[instrument(level = "trace", skip(self), ret)]
189 fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
190 if place.local < self.elaborator.body().local_decls.next_index() {
191 place.ty(self.elaborator.body(), self.tcx()).ty
192 } else {
193 PlaceTy::from_ty(self.elaborator.patch_ref().local_ty(place.local))
195 .multi_projection_ty(self.elaborator.tcx(), place.projection)
196 .ty
197 }
198 }
199
200 fn tcx(&self) -> TyCtxt<'tcx> {
201 self.elaborator.tcx()
202 }
203
204 fn build_async_drop(
212 &mut self,
213 place: Place<'tcx>,
214 drop_ty: Ty<'tcx>,
215 bb: Option<BasicBlock>,
216 succ: BasicBlock,
217 unwind: Unwind,
218 dropline: Option<BasicBlock>,
219 call_destructor_only: bool,
220 ) -> BasicBlock {
221 let tcx = self.tcx();
222 let span = self.source_info.span;
223
224 let pin_obj_bb = bb.unwrap_or_else(|| {
225 self.elaborator.patch().new_block(BasicBlockData::new(
226 Some(Terminator {
227 source_info: self.source_info,
229 kind: TerminatorKind::Return,
230 }),
231 false,
232 ))
233 });
234
235 let (fut_ty, drop_fn_def_id, trait_args) = if call_destructor_only {
236 let trait_ref =
238 ty::TraitRef::new(tcx, tcx.require_lang_item(LangItem::AsyncDrop, span), [drop_ty]);
239 let (drop_trait, trait_args) = match tcx.codegen_select_candidate(
240 ty::TypingEnv::fully_monomorphized().as_query_input(trait_ref),
241 ) {
242 Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData {
243 impl_def_id,
244 args,
245 ..
246 })) => (*impl_def_id, *args),
247 impl_source => {
248 span_bug!(span, "invalid `AsyncDrop` impl_source: {:?}", impl_source);
249 }
250 };
251 let Some(drop_fn_def_id) = tcx
255 .associated_item_def_ids(drop_trait)
256 .first()
257 .and_then(|def_id| {
258 if tcx.def_kind(def_id) == DefKind::AssocFn
259 && tcx.check_args_compatible(*def_id, trait_args)
260 {
261 Some(def_id)
262 } else {
263 None
264 }
265 })
266 .copied()
267 else {
268 tcx.dcx().span_delayed_bug(
269 self.elaborator.body().span,
270 "AsyncDrop type without correct `async fn drop(...)`.",
271 );
272 self.elaborator.patch().patch_terminator(
273 pin_obj_bb,
274 TerminatorKind::Drop {
275 place,
276 target: succ,
277 unwind: unwind.into_action(),
278 replace: false,
279 drop: None,
280 async_fut: None,
281 },
282 );
283 return pin_obj_bb;
284 };
285 let drop_fn = Ty::new_fn_def(tcx, drop_fn_def_id, trait_args);
286 let sig = drop_fn.fn_sig(tcx);
287 let sig = tcx.instantiate_bound_regions_with_erased(sig);
288 (sig.output(), drop_fn_def_id, trait_args)
289 } else {
290 let drop_fn_def_id = tcx.require_lang_item(LangItem::AsyncDropInPlace, span);
292 let trait_args = tcx.mk_args(&[drop_ty.into()]);
293 let sig = tcx.fn_sig(drop_fn_def_id).instantiate(tcx, trait_args);
294 let sig = tcx.instantiate_bound_regions_with_erased(sig);
295 (sig.output(), drop_fn_def_id, trait_args)
296 };
297
298 let fut = Place::from(self.new_temp(fut_ty));
299
300 let obj_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, drop_ty);
302 let obj_ref_place = Place::from(self.new_temp(obj_ref_ty));
303
304 let term_loc = self.elaborator.terminator_loc(pin_obj_bb);
305 self.elaborator.patch().add_assign(
306 term_loc,
307 obj_ref_place,
308 Rvalue::Ref(
309 tcx.lifetimes.re_erased,
310 BorrowKind::Mut { kind: MutBorrowKind::Default },
311 place,
312 ),
313 );
314
315 let pin_obj_new_unchecked_fn = Ty::new_fn_def(
317 tcx,
318 tcx.require_lang_item(LangItem::PinNewUnchecked, span),
319 [GenericArg::from(obj_ref_ty)],
320 );
321 let pin_obj_ty = pin_obj_new_unchecked_fn.fn_sig(tcx).output().no_bound_vars().unwrap();
322 let pin_obj_place = Place::from(self.new_temp(pin_obj_ty));
323 let pin_obj_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand {
324 span,
325 user_ty: None,
326 const_: Const::zero_sized(pin_obj_new_unchecked_fn),
327 }));
328
329 let drop_term_bb = self.new_block(
331 unwind,
332 TerminatorKind::Drop {
333 place,
334 target: succ,
335 unwind: unwind.into_action(),
336 replace: false,
337 drop: dropline,
338 async_fut: Some(fut.local),
339 },
340 );
341
342 let mut call_statements = Vec::new();
344 let drop_arg = if call_destructor_only {
345 pin_obj_place
346 } else {
347 let ty::Adt(adt_def, adt_args) = pin_obj_ty.kind() else {
348 bug!();
349 };
350 let obj_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty);
351 let unwrap_ty = adt_def.non_enum_variant().fields[FieldIdx::ZERO].ty(tcx, adt_args);
352 let obj_ref_place = Place::from(self.new_temp(unwrap_ty));
353 call_statements.push(self.assign(
354 obj_ref_place,
355 Rvalue::Use(Operand::Copy(tcx.mk_place_field(
356 pin_obj_place,
357 FieldIdx::ZERO,
358 unwrap_ty,
359 ))),
360 ));
361
362 let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty));
363
364 let addr = Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_deref(obj_ref_place));
365 call_statements.push(self.assign(obj_ptr_place, addr));
366 obj_ptr_place
367 };
368 call_statements
369 .push(Statement::new(self.source_info, StatementKind::StorageLive(fut.local)));
370
371 let call_drop_bb = self.new_block_with_statements(
372 unwind,
373 call_statements,
374 TerminatorKind::Call {
375 func: Operand::function_handle(tcx, drop_fn_def_id, trait_args, span),
376 args: [Spanned { node: Operand::Move(drop_arg), span: DUMMY_SP }].into(),
377 destination: fut,
378 target: Some(drop_term_bb),
379 unwind: unwind.into_action(),
380 call_source: CallSource::Misc,
381 fn_span: self.source_info.span,
382 },
383 );
384
385 self.elaborator.patch().add_statement(
387 Location { block: self.succ, statement_index: 0 },
388 StatementKind::StorageDead(fut.local),
389 );
390 if let Unwind::To(block) = unwind {
392 self.elaborator.patch().add_statement(
393 Location { block, statement_index: 0 },
394 StatementKind::StorageDead(fut.local),
395 );
396 }
397 if let Some(block) = dropline {
399 self.elaborator.patch().add_statement(
400 Location { block, statement_index: 0 },
401 StatementKind::StorageDead(fut.local),
402 );
403 }
404
405 self.elaborator.patch().patch_terminator(
407 pin_obj_bb,
408 TerminatorKind::Call {
409 func: pin_obj_new_unchecked_fn,
410 args: [dummy_spanned(Operand::Move(obj_ref_place))].into(),
411 destination: pin_obj_place,
412 target: Some(call_drop_bb),
413 unwind: unwind.into_action(),
414 call_source: CallSource::Misc,
415 fn_span: span,
416 },
417 );
418 pin_obj_bb
419 }
420
421 fn build_drop(&mut self, bb: BasicBlock) {
422 let drop_ty = self.place_ty(self.place);
423 if self.tcx().features().async_drop()
424 && self.elaborator.body().coroutine.is_some()
425 && self.elaborator.allow_async_drops()
426 && !self.elaborator.patch_ref().block(self.elaborator.body(), bb).is_cleanup
427 && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
428 {
429 self.build_async_drop(
430 self.place,
431 drop_ty,
432 Some(bb),
433 self.succ,
434 self.unwind,
435 self.dropline,
436 false,
437 );
438 } else {
439 self.elaborator.patch().patch_terminator(
440 bb,
441 TerminatorKind::Drop {
442 place: self.place,
443 target: self.succ,
444 unwind: self.unwind.into_action(),
445 replace: false,
446 drop: None,
447 async_fut: None,
448 },
449 );
450 }
451 }
452
453 #[instrument(level = "debug")]
472 fn elaborate_drop(&mut self, bb: BasicBlock) {
473 match self.elaborator.drop_style(self.path, DropFlagMode::Deep) {
474 DropStyle::Dead => {
475 self.elaborator
476 .patch()
477 .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
478 }
479 DropStyle::Static => {
480 self.build_drop(bb);
481 }
482 DropStyle::Conditional => {
483 let drop_bb = self.complete_drop(self.succ, self.unwind);
484 self.elaborator
485 .patch()
486 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
487 }
488 DropStyle::Open => {
489 let drop_bb = self.open_drop();
490 self.elaborator
491 .patch()
492 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
493 }
494 }
495 }
496
497 fn move_paths_for_fields(
500 &self,
501 base_place: Place<'tcx>,
502 variant_path: D::Path,
503 variant: &'tcx ty::VariantDef,
504 args: GenericArgsRef<'tcx>,
505 ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
506 variant
507 .fields
508 .iter_enumerated()
509 .map(|(field_idx, field)| {
510 let subpath = self.elaborator.field_subpath(variant_path, field_idx);
511 let tcx = self.tcx();
512
513 assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis);
514 let field_ty = match tcx.try_normalize_erasing_regions(
515 self.elaborator.typing_env(),
516 field.ty(tcx, args),
517 ) {
518 Ok(t) => t,
519 Err(_) => Ty::new_error(
520 self.tcx(),
521 self.tcx().dcx().span_delayed_bug(
522 self.elaborator.body().span,
523 "Error normalizing in drop elaboration.",
524 ),
525 ),
526 };
527
528 (tcx.mk_place_field(base_place, field_idx, field_ty), subpath)
529 })
530 .collect()
531 }
532
533 fn drop_subpath(
534 &mut self,
535 place: Place<'tcx>,
536 path: Option<D::Path>,
537 succ: BasicBlock,
538 unwind: Unwind,
539 dropline: Option<BasicBlock>,
540 ) -> BasicBlock {
541 if let Some(path) = path {
542 debug!("drop_subpath: for std field {:?}", place);
543
544 DropCtxt {
545 elaborator: self.elaborator,
546 source_info: self.source_info,
547 path,
548 place,
549 succ,
550 unwind,
551 dropline,
552 }
553 .elaborated_drop_block()
554 } else {
555 debug!("drop_subpath: for rest field {:?}", place);
556
557 DropCtxt {
558 elaborator: self.elaborator,
559 source_info: self.source_info,
560 place,
561 succ,
562 unwind,
563 dropline,
564 path: self.path,
567 }
568 .complete_drop(succ, unwind)
569 }
570 }
571
572 fn drop_halfladder(
583 &mut self,
584 unwind_ladder: &[Unwind],
585 dropline_ladder: &[Option<BasicBlock>],
586 mut succ: BasicBlock,
587 fields: &[(Place<'tcx>, Option<D::Path>)],
588 ) -> Vec<BasicBlock> {
589 iter::once(succ)
590 .chain(itertools::izip!(fields.iter().rev(), unwind_ladder, dropline_ladder).map(
591 |(&(place, path), &unwind_succ, &dropline_to)| {
592 succ = self.drop_subpath(place, path, succ, unwind_succ, dropline_to);
593 succ
594 },
595 ))
596 .collect()
597 }
598
599 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind, Option<BasicBlock>) {
600 (
604 self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind),
605 self.unwind,
606 self.dropline,
607 )
608 }
609
610 fn drop_ladder(
644 &mut self,
645 fields: Vec<(Place<'tcx>, Option<D::Path>)>,
646 succ: BasicBlock,
647 unwind: Unwind,
648 dropline: Option<BasicBlock>,
649 ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
650 debug!("drop_ladder({:?}, {:?})", self, fields);
651 assert!(
652 if unwind.is_cleanup() { dropline.is_none() } else { true },
653 "Dropline is set for cleanup drop ladder"
654 );
655
656 let mut fields = fields;
657 fields.retain(|&(place, _)| {
658 self.place_ty(place).needs_drop(self.tcx(), self.elaborator.typing_env())
659 });
660
661 debug!("drop_ladder - fields needing drop: {:?}", fields);
662
663 let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
664 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
665 let unwind_ladder: Vec<_> = if let Unwind::To(succ) = unwind {
666 let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
667 halfladder.into_iter().map(Unwind::To).collect()
668 } else {
669 unwind_ladder
670 };
671 let dropline_ladder: Vec<_> = if let Some(succ) = dropline {
672 let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
673 halfladder.into_iter().map(Some).collect()
674 } else {
675 dropline_ladder
676 };
677
678 let normal_ladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
679
680 (
681 *normal_ladder.last().unwrap(),
682 *unwind_ladder.last().unwrap(),
683 *dropline_ladder.last().unwrap(),
684 )
685 }
686
687 fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
688 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
689
690 let fields = tys
691 .iter()
692 .enumerate()
693 .map(|(i, &ty)| {
694 (
695 self.tcx().mk_place_field(self.place, FieldIdx::new(i), ty),
696 self.elaborator.field_subpath(self.path, FieldIdx::new(i)),
697 )
698 })
699 .collect();
700
701 let (succ, unwind, dropline) = self.drop_ladder_bottom();
702 self.drop_ladder(fields, succ, unwind, dropline).0
703 }
704
705 #[instrument(level = "debug", ret)]
707 fn open_drop_for_box_contents(
708 &mut self,
709 adt: ty::AdtDef<'tcx>,
710 args: GenericArgsRef<'tcx>,
711 succ: BasicBlock,
712 unwind: Unwind,
713 dropline: Option<BasicBlock>,
714 ) -> BasicBlock {
715 let unique_ty = adt.non_enum_variant().fields[FieldIdx::ZERO].ty(self.tcx(), args);
718 let unique_variant = unique_ty.ty_adt_def().unwrap().non_enum_variant();
719 let nonnull_ty = unique_variant.fields[FieldIdx::ZERO].ty(self.tcx(), args);
720 let ptr_ty = Ty::new_imm_ptr(self.tcx(), args[0].expect_ty());
721
722 let unique_place = self.tcx().mk_place_field(self.place, FieldIdx::ZERO, unique_ty);
723 let nonnull_place = self.tcx().mk_place_field(unique_place, FieldIdx::ZERO, nonnull_ty);
724
725 let ptr_local = self.new_temp(ptr_ty);
726
727 let interior = self.tcx().mk_place_deref(Place::from(ptr_local));
728 let interior_path = self.elaborator.deref_subpath(self.path);
729
730 let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind, dropline);
731
732 let setup_bbd = BasicBlockData::new_stmts(
733 vec![self.assign(
734 Place::from(ptr_local),
735 Rvalue::Cast(CastKind::Transmute, Operand::Copy(nonnull_place), ptr_ty),
736 )],
737 Some(Terminator {
738 kind: TerminatorKind::Goto { target: do_drop_bb },
739 source_info: self.source_info,
740 }),
741 unwind.is_cleanup(),
742 );
743 self.elaborator.patch().new_block(setup_bbd)
744 }
745
746 #[instrument(level = "debug", ret)]
747 fn open_drop_for_adt(
748 &mut self,
749 adt: ty::AdtDef<'tcx>,
750 args: GenericArgsRef<'tcx>,
751 ) -> BasicBlock {
752 if adt.variants().is_empty() {
753 return self.elaborator.patch().new_block(BasicBlockData::new(
754 Some(Terminator {
755 source_info: self.source_info,
756 kind: TerminatorKind::Unreachable,
757 }),
758 self.unwind.is_cleanup(),
759 ));
760 }
761
762 let skip_contents = adt.is_union() || adt.is_manually_drop();
763 let contents_drop = if skip_contents {
764 if adt.has_dtor(self.tcx()) && self.elaborator.get_drop_flag(self.path).is_some() {
765 span_bug!(self.source_info.span, "open dropping partially moved union");
772 }
773
774 (self.succ, self.unwind, self.dropline)
775 } else {
776 self.open_drop_for_adt_contents(adt, args)
777 };
778
779 if adt.has_dtor(self.tcx()) {
780 let destructor_block = if adt.is_box() {
781 let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
783 let unwind = contents_drop
784 .1
785 .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
786 let dropline = contents_drop
787 .2
788 .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
789 self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
790 } else {
791 self.destructor_call_block(contents_drop)
792 };
793
794 self.drop_flag_test_block(destructor_block, contents_drop.0, contents_drop.1)
795 } else {
796 contents_drop.0
797 }
798 }
799
800 fn open_drop_for_adt_contents(
801 &mut self,
802 adt: ty::AdtDef<'tcx>,
803 args: GenericArgsRef<'tcx>,
804 ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
805 let (succ, unwind, dropline) = self.drop_ladder_bottom();
806 if !adt.is_enum() {
807 let fields =
808 self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args);
809 self.drop_ladder(fields, succ, unwind, dropline)
810 } else {
811 self.open_drop_for_multivariant(adt, args, succ, unwind, dropline)
812 }
813 }
814
815 fn open_drop_for_multivariant(
816 &mut self,
817 adt: ty::AdtDef<'tcx>,
818 args: GenericArgsRef<'tcx>,
819 succ: BasicBlock,
820 unwind: Unwind,
821 dropline: Option<BasicBlock>,
822 ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
823 let mut values = Vec::with_capacity(adt.variants().len());
824 let mut normal_blocks = Vec::with_capacity(adt.variants().len());
825 let mut unwind_blocks =
826 if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
827 let mut dropline_blocks =
828 if dropline.is_none() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
829
830 let mut have_otherwise_with_drop_glue = false;
831 let mut have_otherwise = false;
832 let tcx = self.tcx();
833
834 for (variant_index, discr) in adt.discriminants(tcx) {
835 let variant = &adt.variant(variant_index);
836 let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
837
838 if let Some(variant_path) = subpath {
839 let base_place = tcx.mk_place_elem(
840 self.place,
841 ProjectionElem::Downcast(Some(variant.name), variant_index),
842 );
843 let fields = self.move_paths_for_fields(base_place, variant_path, variant, args);
844 values.push(discr.val);
845 if let Unwind::To(unwind) = unwind {
846 let unwind_blocks = unwind_blocks.as_mut().unwrap();
865 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
866 let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
867 let halfladder =
868 self.drop_halfladder(&unwind_ladder, &dropline_ladder, unwind, &fields);
869 unwind_blocks.push(halfladder.last().cloned().unwrap());
870 }
871 let (normal, _, drop_bb) = self.drop_ladder(fields, succ, unwind, dropline);
872 normal_blocks.push(normal);
873 if dropline.is_some() {
874 dropline_blocks.as_mut().unwrap().push(drop_bb.unwrap());
875 }
876 } else {
877 have_otherwise = true;
878
879 let typing_env = self.elaborator.typing_env();
880 let have_field_with_drop_glue = variant
881 .fields
882 .iter()
883 .any(|field| field.ty(tcx, args).needs_drop(tcx, typing_env));
884 if have_field_with_drop_glue {
885 have_otherwise_with_drop_glue = true;
886 }
887 }
888 }
889
890 if !have_otherwise {
891 values.pop();
892 } else if !have_otherwise_with_drop_glue {
893 normal_blocks.push(self.goto_block(succ, unwind));
894 if let Unwind::To(unwind) = unwind {
895 unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
896 }
897 } else {
898 normal_blocks.push(self.drop_block(succ, unwind));
899 if let Unwind::To(unwind) = unwind {
900 unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
901 }
902 }
903
904 (
905 self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
906 unwind.map(|unwind| {
907 self.adt_switch_block(
908 adt,
909 unwind_blocks.unwrap(),
910 &values,
911 unwind,
912 Unwind::InCleanup,
913 )
914 }),
915 dropline.map(|dropline| {
916 self.adt_switch_block(adt, dropline_blocks.unwrap(), &values, dropline, unwind)
917 }),
918 )
919 }
920
921 fn adt_switch_block(
922 &mut self,
923 adt: ty::AdtDef<'tcx>,
924 blocks: Vec<BasicBlock>,
925 values: &[u128],
926 succ: BasicBlock,
927 unwind: Unwind,
928 ) -> BasicBlock {
929 let discr_ty = adt.repr().discr_type().to_ty(self.tcx());
937 let discr = Place::from(self.new_temp(discr_ty));
938 let discr_rv = Rvalue::Discriminant(self.place);
939 let switch_block = BasicBlockData::new_stmts(
940 vec![self.assign(discr, discr_rv)],
941 Some(Terminator {
942 source_info: self.source_info,
943 kind: TerminatorKind::SwitchInt {
944 discr: Operand::Move(discr),
945 targets: SwitchTargets::new(
946 values.iter().copied().zip(blocks.iter().copied()),
947 *blocks.last().unwrap(),
948 ),
949 },
950 }),
951 unwind.is_cleanup(),
952 );
953 let switch_block = self.elaborator.patch().new_block(switch_block);
954 self.drop_flag_test_block(switch_block, succ, unwind)
955 }
956
957 fn destructor_call_block_sync(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
958 debug!("destructor_call_block_sync({:?}, {:?})", self, succ);
959 let tcx = self.tcx();
960 let drop_trait = tcx.require_lang_item(LangItem::Drop, DUMMY_SP);
961 let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
962 let ty = self.place_ty(self.place);
963
964 let ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
965 let ref_place = self.new_temp(ref_ty);
966 let unit_temp = Place::from(self.new_temp(tcx.types.unit));
967
968 let result = BasicBlockData::new_stmts(
969 vec![self.assign(
970 Place::from(ref_place),
971 Rvalue::Ref(
972 tcx.lifetimes.re_erased,
973 BorrowKind::Mut { kind: MutBorrowKind::Default },
974 self.place,
975 ),
976 )],
977 Some(Terminator {
978 kind: TerminatorKind::Call {
979 func: Operand::function_handle(
980 tcx,
981 drop_fn,
982 [ty.into()],
983 self.source_info.span,
984 ),
985 args: [Spanned { node: Operand::Move(Place::from(ref_place)), span: DUMMY_SP }]
986 .into(),
987 destination: unit_temp,
988 target: Some(succ),
989 unwind: unwind.into_action(),
990 call_source: CallSource::Misc,
991 fn_span: self.source_info.span,
992 },
993 source_info: self.source_info,
994 }),
995 unwind.is_cleanup(),
996 );
997
998 self.elaborator.patch().new_block(result)
999 }
1000
1001 fn destructor_call_block(
1002 &mut self,
1003 (succ, unwind, dropline): (BasicBlock, Unwind, Option<BasicBlock>),
1004 ) -> BasicBlock {
1005 debug!("destructor_call_block({:?}, {:?})", self, succ);
1006 let ty = self.place_ty(self.place);
1007 if self.tcx().features().async_drop()
1008 && self.elaborator.body().coroutine.is_some()
1009 && self.elaborator.allow_async_drops()
1010 && !unwind.is_cleanup()
1011 && ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
1012 {
1013 self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true)
1014 } else {
1015 self.destructor_call_block_sync((succ, unwind))
1016 }
1017 }
1018
1019 fn drop_loop(
1031 &mut self,
1032 succ: BasicBlock,
1033 cur: Local,
1034 len: Local,
1035 ety: Ty<'tcx>,
1036 unwind: Unwind,
1037 dropline: Option<BasicBlock>,
1038 ) -> BasicBlock {
1039 let copy = |place: Place<'tcx>| Operand::Copy(place);
1040 let move_ = |place: Place<'tcx>| Operand::Move(place);
1041 let tcx = self.tcx();
1042
1043 let ptr_ty = Ty::new_mut_ptr(tcx, ety);
1044 let ptr = Place::from(self.new_temp(ptr_ty));
1045 let can_go = Place::from(self.new_temp(tcx.types.bool));
1046 let one = self.constant_usize(1);
1047
1048 let drop_block = BasicBlockData::new_stmts(
1049 vec![
1050 self.assign(
1051 ptr,
1052 Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_index(self.place, cur)),
1053 ),
1054 self.assign(
1055 cur.into(),
1056 Rvalue::BinaryOp(BinOp::Add, Box::new((move_(cur.into()), one))),
1057 ),
1058 ],
1059 Some(Terminator {
1060 source_info: self.source_info,
1061 kind: TerminatorKind::Unreachable,
1063 }),
1064 unwind.is_cleanup(),
1065 );
1066 let drop_block = self.elaborator.patch().new_block(drop_block);
1067
1068 let loop_block = BasicBlockData::new_stmts(
1069 vec![self.assign(
1070 can_go,
1071 Rvalue::BinaryOp(BinOp::Eq, Box::new((copy(Place::from(cur)), copy(len.into())))),
1072 )],
1073 Some(Terminator {
1074 source_info: self.source_info,
1075 kind: TerminatorKind::if_(move_(can_go), succ, drop_block),
1076 }),
1077 unwind.is_cleanup(),
1078 );
1079 let loop_block = self.elaborator.patch().new_block(loop_block);
1080
1081 let place = tcx.mk_place_deref(ptr);
1082 if self.tcx().features().async_drop()
1083 && self.elaborator.body().coroutine.is_some()
1084 && self.elaborator.allow_async_drops()
1085 && !unwind.is_cleanup()
1086 && ety.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1087 {
1088 self.build_async_drop(
1089 place,
1090 ety,
1091 Some(drop_block),
1092 loop_block,
1093 unwind,
1094 dropline,
1095 false,
1096 );
1097 } else {
1098 self.elaborator.patch().patch_terminator(
1099 drop_block,
1100 TerminatorKind::Drop {
1101 place,
1102 target: loop_block,
1103 unwind: unwind.into_action(),
1104 replace: false,
1105 drop: None,
1106 async_fut: None,
1107 },
1108 );
1109 }
1110 loop_block
1111 }
1112
1113 fn open_drop_for_array(
1114 &mut self,
1115 array_ty: Ty<'tcx>,
1116 ety: Ty<'tcx>,
1117 opt_size: Option<u64>,
1118 ) -> BasicBlock {
1119 debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size);
1120 let tcx = self.tcx();
1121
1122 if let Some(size) = opt_size {
1123 enum ProjectionKind<Path> {
1124 Drop(std::ops::Range<u64>),
1125 Keep(u64, Path),
1126 }
1127 let mut drop_ranges = vec![];
1132 let mut dropping = true;
1133 let mut start = 0;
1134 for i in 0..size {
1135 let path = self.elaborator.array_subpath(self.path, i, size);
1136 if dropping && path.is_some() {
1137 drop_ranges.push(ProjectionKind::Drop(start..i));
1138 dropping = false;
1139 } else if !dropping && path.is_none() {
1140 dropping = true;
1141 start = i;
1142 }
1143 if let Some(path) = path {
1144 drop_ranges.push(ProjectionKind::Keep(i, path));
1145 }
1146 }
1147 if !drop_ranges.is_empty() {
1148 if dropping {
1149 drop_ranges.push(ProjectionKind::Drop(start..size));
1150 }
1151 let fields = drop_ranges
1152 .iter()
1153 .rev()
1154 .map(|p| {
1155 let (project, path) = match p {
1156 ProjectionKind::Drop(r) => (
1157 ProjectionElem::Subslice {
1158 from: r.start,
1159 to: r.end,
1160 from_end: false,
1161 },
1162 None,
1163 ),
1164 &ProjectionKind::Keep(offset, path) => (
1165 ProjectionElem::ConstantIndex {
1166 offset,
1167 min_length: size,
1168 from_end: false,
1169 },
1170 Some(path),
1171 ),
1172 };
1173 (tcx.mk_place_elem(self.place, project), path)
1174 })
1175 .collect::<Vec<_>>();
1176 let (succ, unwind, dropline) = self.drop_ladder_bottom();
1177 return self.drop_ladder(fields, succ, unwind, dropline).0;
1178 }
1179 }
1180
1181 let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty);
1182 let array_ptr = self.new_temp(array_ptr_ty);
1183
1184 let slice_ty = Ty::new_slice(tcx, ety);
1185 let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty);
1186 let slice_ptr = self.new_temp(slice_ptr_ty);
1187
1188 let mut delegate_block = BasicBlockData::new_stmts(
1189 vec![
1190 self.assign(Place::from(array_ptr), Rvalue::RawPtr(RawPtrKind::Mut, self.place)),
1191 self.assign(
1192 Place::from(slice_ptr),
1193 Rvalue::Cast(
1194 CastKind::PointerCoercion(
1195 PointerCoercion::Unsize,
1196 CoercionSource::Implicit,
1197 ),
1198 Operand::Move(Place::from(array_ptr)),
1199 slice_ptr_ty,
1200 ),
1201 ),
1202 ],
1203 None,
1204 self.unwind.is_cleanup(),
1205 );
1206
1207 let array_place = mem::replace(
1208 &mut self.place,
1209 Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx),
1210 );
1211 let slice_block = self.drop_loop_trio_for_slice(ety);
1212 self.place = array_place;
1213
1214 delegate_block.terminator = Some(Terminator {
1215 source_info: self.source_info,
1216 kind: TerminatorKind::Goto { target: slice_block },
1217 });
1218 self.elaborator.patch().new_block(delegate_block)
1219 }
1220
1221 fn drop_loop_trio_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
1224 debug!("drop_loop_trio_for_slice({:?})", ety);
1225 let tcx = self.tcx();
1226 let len = self.new_temp(tcx.types.usize);
1227 let cur = self.new_temp(tcx.types.usize);
1228
1229 let unwind = self
1230 .unwind
1231 .map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup, None));
1232
1233 let dropline =
1234 self.dropline.map(|dropline| self.drop_loop(dropline, cur, len, ety, unwind, None));
1235
1236 let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind, dropline);
1237
1238 let [PlaceElem::Deref] = self.place.projection.as_slice() else {
1239 span_bug!(
1240 self.source_info.span,
1241 "Expected place for slice drop shim to be *_n, but it's {:?}",
1242 self.place,
1243 );
1244 };
1245
1246 let zero = self.constant_usize(0);
1247 let block = BasicBlockData::new_stmts(
1248 vec![
1249 self.assign(
1250 len.into(),
1251 Rvalue::UnaryOp(
1252 UnOp::PtrMetadata,
1253 Operand::Copy(Place::from(self.place.local)),
1254 ),
1255 ),
1256 self.assign(cur.into(), Rvalue::Use(zero)),
1257 ],
1258 Some(Terminator {
1259 source_info: self.source_info,
1260 kind: TerminatorKind::Goto { target: loop_block },
1261 }),
1262 unwind.is_cleanup(),
1263 );
1264
1265 let drop_block = self.elaborator.patch().new_block(block);
1266 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
1268 self.drop_flag_test_block(reset_block, self.succ, unwind)
1269 }
1270
1271 fn open_drop(&mut self) -> BasicBlock {
1280 let ty = self.place_ty(self.place);
1281 match ty.kind() {
1282 ty::Closure(_, args) => self.open_drop_for_tuple(args.as_closure().upvar_tys()),
1283 ty::CoroutineClosure(_, args) => {
1284 self.open_drop_for_tuple(args.as_coroutine_closure().upvar_tys())
1285 }
1286 ty::Coroutine(_, args) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()),
1293 ty::Tuple(fields) => self.open_drop_for_tuple(fields),
1294 ty::Adt(def, args) => self.open_drop_for_adt(*def, args),
1295 ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind),
1296 ty::Array(ety, size) => {
1297 let size = size.try_to_target_usize(self.tcx());
1298 self.open_drop_for_array(ty, *ety, size)
1299 }
1300 ty::Slice(ety) => self.drop_loop_trio_for_slice(*ety),
1301
1302 ty::UnsafeBinder(_) => {
1303 self.tcx().dcx().span_delayed_bug(
1306 self.source_info.span,
1307 "open drop for unsafe binder shouldn't be encountered",
1308 );
1309 self.elaborator.patch().new_block(BasicBlockData::new(
1310 Some(Terminator {
1311 source_info: self.source_info,
1312 kind: TerminatorKind::Unreachable,
1313 }),
1314 self.unwind.is_cleanup(),
1315 ))
1316 }
1317
1318 _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty),
1319 }
1320 }
1321
1322 fn complete_drop(&mut self, succ: BasicBlock, unwind: Unwind) -> BasicBlock {
1323 debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
1324
1325 let drop_block = self.drop_block(succ, unwind);
1326
1327 self.drop_flag_test_block(drop_block, succ, unwind)
1328 }
1329
1330 fn drop_flag_reset_block(
1333 &mut self,
1334 mode: DropFlagMode,
1335 succ: BasicBlock,
1336 unwind: Unwind,
1337 ) -> BasicBlock {
1338 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
1339
1340 if unwind.is_cleanup() {
1341 return succ;
1344 }
1345 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
1346 let block_start = Location { block, statement_index: 0 };
1347 self.elaborator.clear_drop_flag(block_start, self.path, mode);
1348 block
1349 }
1350
1351 fn elaborated_drop_block(&mut self) -> BasicBlock {
1352 debug!("elaborated_drop_block({:?})", self);
1353 let blk = self.drop_block_simple(self.succ, self.unwind);
1354 self.elaborate_drop(blk);
1355 blk
1356 }
1357
1358 fn drop_block_simple(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1359 let block = TerminatorKind::Drop {
1360 place: self.place,
1361 target,
1362 unwind: unwind.into_action(),
1363 replace: false,
1364 drop: self.dropline,
1365 async_fut: None,
1366 };
1367 self.new_block(unwind, block)
1368 }
1369
1370 fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1371 let drop_ty = self.place_ty(self.place);
1372 if self.tcx().features().async_drop()
1373 && self.elaborator.body().coroutine.is_some()
1374 && self.elaborator.allow_async_drops()
1375 && !unwind.is_cleanup()
1376 && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1377 {
1378 self.build_async_drop(
1379 self.place,
1380 drop_ty,
1381 None,
1382 self.succ,
1383 unwind,
1384 self.dropline,
1385 false,
1386 )
1387 } else {
1388 let block = TerminatorKind::Drop {
1389 place: self.place,
1390 target,
1391 unwind: unwind.into_action(),
1392 replace: false,
1393 drop: None,
1394 async_fut: None,
1395 };
1396 self.new_block(unwind, block)
1397 }
1398 }
1399
1400 fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1401 let block = TerminatorKind::Goto { target };
1402 self.new_block(unwind, block)
1403 }
1404
1405 fn drop_flag_test_block(
1411 &mut self,
1412 on_set: BasicBlock,
1413 on_unset: BasicBlock,
1414 unwind: Unwind,
1415 ) -> BasicBlock {
1416 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
1417 debug!(
1418 "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
1419 self, on_set, on_unset, unwind, style
1420 );
1421
1422 match style {
1423 DropStyle::Dead => on_unset,
1424 DropStyle::Static => on_set,
1425 DropStyle::Conditional | DropStyle::Open => {
1426 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
1427 let term = TerminatorKind::if_(flag, on_set, on_unset);
1428 self.new_block(unwind, term)
1429 }
1430 }
1431 }
1432
1433 fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
1434 self.elaborator.patch().new_block(BasicBlockData::new(
1435 Some(Terminator { source_info: self.source_info, kind: k }),
1436 unwind.is_cleanup(),
1437 ))
1438 }
1439
1440 fn new_block_with_statements(
1441 &mut self,
1442 unwind: Unwind,
1443 statements: Vec<Statement<'tcx>>,
1444 k: TerminatorKind<'tcx>,
1445 ) -> BasicBlock {
1446 self.elaborator.patch().new_block(BasicBlockData::new_stmts(
1447 statements,
1448 Some(Terminator { source_info: self.source_info, kind: k }),
1449 unwind.is_cleanup(),
1450 ))
1451 }
1452
1453 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
1454 self.elaborator.patch().new_temp(ty, self.source_info.span)
1455 }
1456
1457 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
1458 Operand::Constant(Box::new(ConstOperand {
1459 span: self.source_info.span,
1460 user_ty: None,
1461 const_: Const::from_usize(self.tcx(), val.into()),
1462 }))
1463 }
1464
1465 fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
1466 Statement::new(self.source_info, StatementKind::Assign(Box::new((lhs, rhs))))
1467 }
1468}